Compare commits
1 Commits
v3.42.0
...
renovate/g
Author | SHA1 | Date | |
---|---|---|---|
2951913ad3 |
48
.github/workflows/beta_release.yml
vendored
48
.github/workflows/beta_release.yml
vendored
@ -8,9 +8,6 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
changelog:
|
changelog:
|
||||||
strategy:
|
strategy:
|
||||||
@ -32,17 +29,12 @@ jobs:
|
|||||||
ref: tags/beta
|
ref: tags/beta
|
||||||
sha: ${{ github.sha }}
|
sha: ${{ github.sha }}
|
||||||
|
|
||||||
- name: Delete beta tag
|
|
||||||
run: git tag -d beta
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- name: changelog # or changelogithub@0.12 if ensure the stable result
|
- name: changelog # or changelogithub@0.12 if ensure the stable result
|
||||||
id: changelog
|
id: changelog
|
||||||
run: |
|
run: |
|
||||||
git tag -l
|
git tag -l
|
||||||
npx changelogithub --output CHANGELOG.md
|
npx changelogithub --output CHANGELOG.md
|
||||||
# npx changelogen@latest --output CHANGELOG.md
|
# npx changelogen@latest --output CHANGELOG.md
|
||||||
|
|
||||||
- name: Upload assets
|
- name: Upload assets
|
||||||
uses: softprops/action-gh-release@v2
|
uses: softprops/action-gh-release@v2
|
||||||
with:
|
with:
|
||||||
@ -57,7 +49,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- target: '!(*musl*|*windows-arm64*|*android*|*freebsd*)' # xgo
|
- target: '!(*musl*|*windows-arm64*|*android*)' # xgo
|
||||||
hash: "md5"
|
hash: "md5"
|
||||||
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
||||||
hash: "md5-linux-musl"
|
hash: "md5-linux-musl"
|
||||||
@ -67,9 +59,6 @@ jobs:
|
|||||||
hash: "md5-windows-arm64"
|
hash: "md5-windows-arm64"
|
||||||
- target: 'android-*' #android
|
- target: 'android-*' #android
|
||||||
hash: "md5-android"
|
hash: "md5-android"
|
||||||
- target: 'freebsd-*' #freebsd
|
|
||||||
hash: "md5-freebsd"
|
|
||||||
|
|
||||||
name: Beta Release
|
name: Beta Release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
@ -87,18 +76,12 @@ jobs:
|
|||||||
run: bash build.sh dev web
|
run: bash build.sh dev web
|
||||||
|
|
||||||
- name: Build
|
- name: Build
|
||||||
|
id: test-action
|
||||||
uses: go-cross/cgo-actions@v1
|
uses: go-cross/cgo-actions@v1
|
||||||
with:
|
with:
|
||||||
targets: ${{ matrix.target }}
|
targets: ${{ matrix.target }}
|
||||||
musl-target-format: $os-$musl-$arch
|
musl-target-format: $os-$musl-$arch
|
||||||
out-dir: build
|
out-dir: build
|
||||||
x-flags: |
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.BuiltAt=$built_at
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.GoVersion=$go_version
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.GitAuthor=Xhofe
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.GitCommit=$git_commit
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.Version=$tag
|
|
||||||
github.com/alist-org/alist/v3/internal/conf.WebVersion=dev
|
|
||||||
|
|
||||||
- name: Compress
|
- name: Compress
|
||||||
run: |
|
run: |
|
||||||
@ -117,23 +100,14 @@ jobs:
|
|||||||
name: Beta Release Desktop
|
name: Beta Release Desktop
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- uses: peter-evans/create-or-update-comment@v4
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
with:
|
||||||
repository: alist-org/desktop-release
|
issue-number: 69
|
||||||
ref: main
|
body: |
|
||||||
persist-credentials: false
|
/release-beta
|
||||||
fetch-depth: 0
|
- triggered by ${{ github.actor }}
|
||||||
|
- commit sha: ${{ github.sha }}
|
||||||
- name: Commit
|
- view files: https://github.com/alist-org/alist/tree/${{ github.sha }}
|
||||||
run: |
|
reactions: 'rocket'
|
||||||
git config --local user.email "bot@nn.ci"
|
token: ${{ secrets.MY_TOKEN }}
|
||||||
git config --local user.name "IlaBot"
|
|
||||||
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
|
|
||||||
|
|
||||||
- name: Push commit
|
|
||||||
uses: ad-m/github-push-action@master
|
|
||||||
with:
|
|
||||||
github_token: ${{ secrets.MY_TOKEN }}
|
|
||||||
branch: main
|
|
||||||
repository: alist-org/desktop-release
|
repository: alist-org/desktop-release
|
126
.github/workflows/build_docker.yml
vendored
Normal file
126
.github/workflows/build_docker.yml
vendored
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
name: build_docker
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ main ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ main ]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build_docker:
|
||||||
|
name: Build Docker
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: xhofe/alist
|
||||||
|
tags: |
|
||||||
|
type=schedule
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=tag
|
||||||
|
type=ref,event=pr
|
||||||
|
type=raw,value=beta,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- name: Docker meta with ffmpeg
|
||||||
|
id: meta-ffmpeg
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: xhofe/alist
|
||||||
|
flavor: |
|
||||||
|
suffix=-ffmpeg
|
||||||
|
tags: |
|
||||||
|
type=schedule
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=tag
|
||||||
|
type=ref,event=pr
|
||||||
|
type=raw,value=beta,enable={{is_default_branch}}
|
||||||
|
|
||||||
|
- uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: 'stable'
|
||||||
|
|
||||||
|
- name: Cache Musl
|
||||||
|
id: cache-musl
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: build/musl-libs
|
||||||
|
key: docker-musl-libs
|
||||||
|
|
||||||
|
- name: Download Musl Library
|
||||||
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
|
run: bash build.sh prepare docker-multiplatform
|
||||||
|
|
||||||
|
- name: Build go binary
|
||||||
|
run: bash build.sh dev docker-multiplatform
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Login to DockerHub
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
username: xhofe
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
id: docker_build
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.ci
|
||||||
|
push: ${{ github.event_name == 'push' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
- name: Build and push with ffmpeg
|
||||||
|
id: docker_build_ffmpeg
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.ci
|
||||||
|
push: ${{ github.event_name == 'push' }}
|
||||||
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
|
build-args: INSTALL_FFMPEG=true
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
build_docker_with_aria2:
|
||||||
|
needs: build_docker
|
||||||
|
name: Build docker with aria2
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: alist-org/with_aria2
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Commit
|
||||||
|
run: |
|
||||||
|
git config --local user.email "bot@nn.ci"
|
||||||
|
git config --local user.name "IlaBot"
|
||||||
|
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
|
||||||
|
|
||||||
|
- name: Push commit
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.MY_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
repository: alist-org/with_aria2
|
4
.github/workflows/changelog.yml
vendored
4
.github/workflows/changelog.yml
vendored
@ -15,10 +15,6 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Delete beta tag
|
|
||||||
run: git tag -d beta
|
|
||||||
continue-on-error: true
|
|
||||||
|
|
||||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
||||||
|
17
.github/workflows/release.yml
vendored
17
.github/workflows/release.yml
vendored
@ -13,23 +13,6 @@ jobs:
|
|||||||
name: Release
|
name: Release
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Free Disk Space (Ubuntu)
|
|
||||||
uses: jlumbroso/free-disk-space@main
|
|
||||||
with:
|
|
||||||
# this might remove tools that are actually needed,
|
|
||||||
# if set to "true" but frees about 6 GB
|
|
||||||
tool-cache: false
|
|
||||||
|
|
||||||
# all of these default to true, but feel free to set to
|
|
||||||
# "false" if necessary for your workflow
|
|
||||||
android: true
|
|
||||||
dotnet: true
|
|
||||||
haskell: true
|
|
||||||
large-packages: true
|
|
||||||
docker-images: true
|
|
||||||
swap-storage: true
|
|
||||||
|
|
||||||
- name: Prerelease
|
- name: Prerelease
|
||||||
uses: irongut/EditRelease@v1.2.0
|
uses: irongut/EditRelease@v1.2.0
|
||||||
with:
|
with:
|
||||||
|
147
.github/workflows/release_docker.yml
vendored
147
.github/workflows/release_docker.yml
vendored
@ -4,34 +4,10 @@ on:
|
|||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
|
|
||||||
concurrency:
|
|
||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: 'xhofe/alist'
|
|
||||||
REGISTRY_USERNAME: 'xhofe'
|
|
||||||
REGISTRY_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
|
||||||
ARTIFACT_NAME: 'binaries_docker_release'
|
|
||||||
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
|
||||||
IMAGE_PUSH: ${{ github.event_name == 'push' }}
|
|
||||||
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' }}
|
|
||||||
IMAGE_TAGS_BETA: |
|
|
||||||
type=schedule
|
|
||||||
type=ref,event=branch
|
|
||||||
type=ref,event=tag
|
|
||||||
type=ref,event=pr
|
|
||||||
type=raw,value=beta,enable={{is_default_branch}}
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build_binary:
|
release_docker:
|
||||||
name: Build Binaries for Docker Release
|
name: Release Docker
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
@ -46,59 +22,20 @@ jobs:
|
|||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: build/musl-libs
|
path: build/musl-libs
|
||||||
key: docker-musl-libs-v2
|
key: docker-musl-libs
|
||||||
|
|
||||||
- name: Download Musl Library
|
- name: Download Musl Library
|
||||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
run: bash build.sh prepare docker-multiplatform
|
run: bash build.sh prepare docker-multiplatform
|
||||||
|
|
||||||
- name: Build go binary (beta)
|
- name: Build go binary
|
||||||
if: env.IMAGE_IS_PROD != 'true'
|
|
||||||
run: bash build.sh beta docker-multiplatform
|
|
||||||
|
|
||||||
- name: Build go binary (release)
|
|
||||||
if: env.IMAGE_IS_PROD == 'true'
|
|
||||||
run: bash build.sh release docker-multiplatform
|
run: bash build.sh release docker-multiplatform
|
||||||
|
|
||||||
- name: Upload artifacts
|
- name: Docker meta
|
||||||
uses: actions/upload-artifact@v4
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
with:
|
with:
|
||||||
name: ${{ env.ARTIFACT_NAME }}
|
images: xhofe/alist
|
||||||
overwrite: true
|
|
||||||
path: |
|
|
||||||
build/
|
|
||||||
!build/*.tgz
|
|
||||||
!build/musl-libs/**
|
|
||||||
|
|
||||||
release_docker:
|
|
||||||
needs: build_binary
|
|
||||||
name: Release Docker image
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
image: ["latest", "ffmpeg", "aria2", "aio"]
|
|
||||||
include:
|
|
||||||
- image: "latest"
|
|
||||||
build_arg: ""
|
|
||||||
tag_favor: ""
|
|
||||||
- image: "ffmpeg"
|
|
||||||
build_arg: INSTALL_FFMPEG=true
|
|
||||||
tag_favor: "suffix=-ffmpeg,onlatest=true"
|
|
||||||
- image: "aria2"
|
|
||||||
build_arg: INSTALL_ARIA2=true
|
|
||||||
tag_favor: "suffix=-aria2,onlatest=true"
|
|
||||||
- image: "aio"
|
|
||||||
build_arg: |
|
|
||||||
INSTALL_FFMPEG=true
|
|
||||||
INSTALL_ARIA2=true
|
|
||||||
tag_favor: "suffix=-aio,onlatest=true"
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: ${{ env.ARTIFACT_NAME }}
|
|
||||||
path: 'build/'
|
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v3
|
||||||
@ -107,22 +44,10 @@ jobs:
|
|||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: env.IMAGE_PUSH == 'true'
|
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v3
|
||||||
with:
|
with:
|
||||||
logout: true
|
username: xhofe
|
||||||
username: ${{ env.REGISTRY_USERNAME }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
password: ${{ env.REGISTRY_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Docker meta
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}
|
|
||||||
tags: ${{ env.IMAGE_IS_PROD == 'true' && '' || env.IMAGE_TAGS_BETA }}
|
|
||||||
flavor: |
|
|
||||||
${{ env.IMAGE_IS_PROD == 'true' && 'latest=true' || '' }}
|
|
||||||
${{ matrix.tag_favor }}
|
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
@ -130,8 +55,54 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
context: .
|
context: .
|
||||||
file: Dockerfile.ci
|
file: Dockerfile.ci
|
||||||
push: ${{ env.IMAGE_PUSH == 'true' }}
|
push: true
|
||||||
build-args: ${{ matrix.build_arg }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: ${{ env.RELEASE_PLATFORMS }}
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
- name: Docker meta with ffmpeg
|
||||||
|
id: meta-ffmpeg
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: xhofe/alist
|
||||||
|
flavor: |
|
||||||
|
latest=true
|
||||||
|
suffix=-ffmpeg,onlatest=true
|
||||||
|
|
||||||
|
- name: Build and push with ffmpeg
|
||||||
|
id: docker_build_ffmpeg
|
||||||
|
uses: docker/build-push-action@v6
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.ci
|
||||||
|
push: true
|
||||||
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
|
build-args: INSTALL_FFMPEG=true
|
||||||
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||||
|
|
||||||
|
release_docker_with_aria2:
|
||||||
|
needs: release_docker
|
||||||
|
name: Release docker with aria2
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
repository: alist-org/with_aria2
|
||||||
|
ref: main
|
||||||
|
persist-credentials: false
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Add tag
|
||||||
|
run: |
|
||||||
|
git config --local user.email "bot@nn.ci"
|
||||||
|
git config --local user.name "IlaBot"
|
||||||
|
git tag -a ${{ github.ref_name }} -m "release ${{ github.ref_name }}"
|
||||||
|
|
||||||
|
- name: Push tags
|
||||||
|
uses: ad-m/github-push-action@master
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.MY_TOKEN }}
|
||||||
|
branch: main
|
||||||
|
repository: alist-org/with_aria2
|
||||||
|
34
.github/workflows/release_freebsd.yml
vendored
34
.github/workflows/release_freebsd.yml
vendored
@ -1,34 +0,0 @@
|
|||||||
name: release_freebsd
|
|
||||||
|
|
||||||
on:
|
|
||||||
release:
|
|
||||||
types: [ published ]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release_freebsd:
|
|
||||||
strategy:
|
|
||||||
matrix:
|
|
||||||
platform: [ ubuntu-latest ]
|
|
||||||
go-version: [ '1.21' ]
|
|
||||||
name: Release
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: Setup Go
|
|
||||||
uses: actions/setup-go@v5
|
|
||||||
with:
|
|
||||||
go-version: ${{ matrix.go-version }}
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Build
|
|
||||||
run: |
|
|
||||||
bash build.sh release freebsd
|
|
||||||
|
|
||||||
- name: Upload assets
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
files: build/compress/*
|
|
17
Dockerfile
17
Dockerfile
@ -10,7 +10,6 @@ RUN bash build.sh release docker
|
|||||||
FROM alpine:edge
|
FROM alpine:edge
|
||||||
|
|
||||||
ARG INSTALL_FFMPEG=false
|
ARG INSTALL_FFMPEG=false
|
||||||
ARG INSTALL_ARIA2=false
|
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
|
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
@ -19,25 +18,13 @@ RUN apk update && \
|
|||||||
apk upgrade --no-cache && \
|
apk upgrade --no-cache && \
|
||||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
|
||||||
mkdir -p /opt/aria2/.aria2 && \
|
|
||||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
|
||||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
|
||||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
|
||||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
|
||||||
touch /opt/aria2/.aria2/aria2.session && \
|
|
||||||
/opt/aria2/.aria2/tracker.sh ; \
|
|
||||||
rm -rf /var/cache/apk/*
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
COPY --from=builder /app/bin/alist ./
|
COPY --from=builder /app/bin/alist ./
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /opt/alist/alist && \
|
RUN chmod +x /entrypoint.sh && /entrypoint.sh version
|
||||||
chmod +x /entrypoint.sh && /entrypoint.sh version
|
|
||||||
|
|
||||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
VOLUME /opt/alist/data/
|
VOLUME /opt/alist/data/
|
||||||
EXPOSE 5244 5245
|
EXPOSE 5244 5245
|
||||||
CMD [ "/entrypoint.sh" ]
|
CMD [ "/entrypoint.sh" ]
|
@ -2,7 +2,6 @@ FROM alpine:edge
|
|||||||
|
|
||||||
ARG TARGETPLATFORM
|
ARG TARGETPLATFORM
|
||||||
ARG INSTALL_FFMPEG=false
|
ARG INSTALL_FFMPEG=false
|
||||||
ARG INSTALL_ARIA2=false
|
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
|
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
@ -11,25 +10,13 @@ RUN apk update && \
|
|||||||
apk upgrade --no-cache && \
|
apk upgrade --no-cache && \
|
||||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
[ "$INSTALL_FFMPEG" = "true" ] && apk add --no-cache ffmpeg; \
|
||||||
[ "$INSTALL_ARIA2" = "true" ] && apk add --no-cache curl aria2 && \
|
|
||||||
mkdir -p /opt/aria2/.aria2 && \
|
|
||||||
wget https://github.com/P3TERX/aria2.conf/archive/refs/heads/master.tar.gz -O /tmp/aria-conf.tar.gz && \
|
|
||||||
tar -zxvf /tmp/aria-conf.tar.gz -C /opt/aria2/.aria2 --strip-components=1 && rm -f /tmp/aria-conf.tar.gz && \
|
|
||||||
sed -i 's|rpc-secret|#rpc-secret|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root/.aria2|/opt/aria2/.aria2|g' /opt/aria2/.aria2/script.conf && \
|
|
||||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/aria2.conf && \
|
|
||||||
sed -i 's|/root|/opt/aria2|g' /opt/aria2/.aria2/script.conf && \
|
|
||||||
touch /opt/aria2/.aria2/aria2.session && \
|
|
||||||
/opt/aria2/.aria2/tracker.sh ; \
|
|
||||||
rm -rf /var/cache/apk/*
|
rm -rf /var/cache/apk/*
|
||||||
|
|
||||||
COPY /build/${TARGETPLATFORM}/alist ./
|
COPY /build/${TARGETPLATFORM}/alist ./
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN chmod +x /opt/alist/alist && \
|
RUN chmod +x /entrypoint.sh && /entrypoint.sh version
|
||||||
chmod +x /entrypoint.sh && /entrypoint.sh version
|
|
||||||
|
|
||||||
ENV PUID=0 PGID=0 UMASK=022 RUN_ARIA2=${INSTALL_ARIA2}
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
VOLUME /opt/alist/data/
|
VOLUME /opt/alist/data/
|
||||||
EXPOSE 5244 5245
|
EXPOSE 5244 5245
|
||||||
CMD [ "/entrypoint.sh" ]
|
CMD [ "/entrypoint.sh" ]
|
@ -39,7 +39,7 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@ -58,7 +58,7 @@ English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing
|
|||||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||||
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
|
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||||
- [x] [Terabox](https://www.terabox.com/main)
|
- [x] [Terabox](https://www.terabox.com/main)
|
||||||
@ -98,7 +98,7 @@ English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing
|
|||||||
|
|
||||||
## Document
|
## Document
|
||||||
|
|
||||||
<https://alistgo.com/>
|
<https://alist.nn.ci/>
|
||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
@ -138,4 +138,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||||
|
@ -58,7 +58,7 @@
|
|||||||
- [x] WebDav(支持无API的OneDrive/SharePoint)
|
- [x] WebDav(支持无API的OneDrive/SharePoint)
|
||||||
- [x] Teambition([中国](https://www.teambition.com/ ),[国际](https://us.teambition.com/ ))
|
- [x] Teambition([中国](https://www.teambition.com/ ),[国际](https://us.teambition.com/ ))
|
||||||
- [x] [分秒帧](https://www.mediatrack.cn/)
|
- [x] [分秒帧](https://www.mediatrack.cn/)
|
||||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云,共享群组)
|
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
||||||
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
||||||
- [x] [百度网盘](http://pan.baidu.com/)
|
- [x] [百度网盘](http://pan.baidu.com/)
|
||||||
- [x] [UC网盘](https://drive.uc.cn)
|
- [x] [UC网盘](https://drive.uc.cn)
|
||||||
|
@ -58,7 +58,7 @@
|
|||||||
- [x] WebDav(Support OneDrive/SharePoint without API)
|
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||||
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||||
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||||
- [x] [139yun](https://yun.139.com/) (Personal, Family, Group)
|
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||||
- [x] [Terabox](https://www.terabox.com/main)
|
- [x] [Terabox](https://www.terabox.com/main)
|
||||||
|
49
build.sh
49
build.sh
@ -7,11 +7,7 @@ gitCommit=$(git log --pretty=format:"%h" -1)
|
|||||||
if [ "$1" = "dev" ]; then
|
if [ "$1" = "dev" ]; then
|
||||||
version="dev"
|
version="dev"
|
||||||
webVersion="dev"
|
webVersion="dev"
|
||||||
elif [ "$1" = "beta" ]; then
|
|
||||||
version="beta"
|
|
||||||
webVersion="dev"
|
|
||||||
else
|
else
|
||||||
git tag -d beta
|
|
||||||
version=$(git describe --abbrev=0 --tags)
|
version=$(git describe --abbrev=0 --tags)
|
||||||
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||||
fi
|
fi
|
||||||
@ -96,7 +92,7 @@ BuildDocker() {
|
|||||||
PrepareBuildDockerMusl() {
|
PrepareBuildDockerMusl() {
|
||||||
mkdir -p build/musl-libs
|
mkdir -p build/musl-libs
|
||||||
BASE="https://musl.cc/"
|
BASE="https://musl.cc/"
|
||||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross riscv64-linux-musl-cross powerpc64le-linux-musl-cross)
|
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
||||||
for i in "${FILES[@]}"; do
|
for i in "${FILES[@]}"; do
|
||||||
url="${BASE}${i}.tgz"
|
url="${BASE}${i}.tgz"
|
||||||
lib_tgz="build/${i}.tgz"
|
lib_tgz="build/${i}.tgz"
|
||||||
@ -115,8 +111,8 @@ BuildDockerMultiplatform() {
|
|||||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||||
export CGO_ENABLED=1
|
export CGO_ENABLED=1
|
||||||
|
|
||||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x linux-riscv64 linux-ppc64le)
|
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
|
||||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc riscv64-linux-musl-gcc powerpc64le-linux-musl-gcc)
|
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
os_arch=${OS_ARCHES[$i]}
|
os_arch=${OS_ARCHES[$i]}
|
||||||
cgo_cc=${CGO_ARGS[$i]}
|
cgo_cc=${CGO_ARGS[$i]}
|
||||||
@ -236,29 +232,6 @@ BuildReleaseAndroid() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
BuildReleaseFreeBSD() {
|
|
||||||
rm -rf .git/
|
|
||||||
mkdir -p "build/freebsd"
|
|
||||||
OS_ARCHES=(amd64 arm64 i386)
|
|
||||||
GO_ARCHES=(amd64 arm64 386)
|
|
||||||
CGO_ARGS=(x86_64-unknown-freebsd14.1 aarch64-unknown-freebsd14.1 i386-unknown-freebsd14.1)
|
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
|
||||||
os_arch=${OS_ARCHES[$i]}
|
|
||||||
cgo_cc="clang --target=${CGO_ARGS[$i]} --sysroot=/opt/freebsd/${os_arch}"
|
|
||||||
echo building for freebsd-${os_arch}
|
|
||||||
sudo mkdir -p "/opt/freebsd/${os_arch}"
|
|
||||||
wget -q https://download.freebsd.org/releases/${os_arch}/14.1-RELEASE/base.txz
|
|
||||||
sudo tar -xf ./base.txz -C /opt/freebsd/${os_arch}
|
|
||||||
rm base.txz
|
|
||||||
export GOOS=freebsd
|
|
||||||
export GOARCH=${GO_ARCHES[$i]}
|
|
||||||
export CC=${cgo_cc}
|
|
||||||
export CGO_ENABLED=1
|
|
||||||
export CGO_LDFLAGS="-fuse-ld=lld"
|
|
||||||
go build -o ./build/$appName-freebsd-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
|
||||||
done
|
|
||||||
}
|
|
||||||
|
|
||||||
MakeRelease() {
|
MakeRelease() {
|
||||||
cd build
|
cd build
|
||||||
mkdir compress
|
mkdir compress
|
||||||
@ -277,11 +250,6 @@ MakeRelease() {
|
|||||||
tar -czvf compress/"$i".tar.gz alist
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
rm -f alist
|
rm -f alist
|
||||||
done
|
done
|
||||||
for i in $(find . -type f -name "$appName-freebsd-*"); do
|
|
||||||
cp "$i" alist
|
|
||||||
tar -czvf compress/"$i".tar.gz alist
|
|
||||||
rm -f alist
|
|
||||||
done
|
|
||||||
for i in $(find . -type f -name "$appName-windows-*"); do
|
for i in $(find . -type f -name "$appName-windows-*"); do
|
||||||
cp "$i" alist.exe
|
cp "$i" alist.exe
|
||||||
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
||||||
@ -304,12 +272,8 @@ if [ "$1" = "dev" ]; then
|
|||||||
else
|
else
|
||||||
BuildDev
|
BuildDev
|
||||||
fi
|
fi
|
||||||
elif [ "$1" = "release" -o "$1" = "beta" ]; then
|
elif [ "$1" = "release" ]; then
|
||||||
if [ "$1" = "beta" ]; then
|
FetchWebRelease
|
||||||
FetchWebDev
|
|
||||||
else
|
|
||||||
FetchWebRelease
|
|
||||||
fi
|
|
||||||
if [ "$2" = "docker" ]; then
|
if [ "$2" = "docker" ]; then
|
||||||
BuildDocker
|
BuildDocker
|
||||||
elif [ "$2" = "docker-multiplatform" ]; then
|
elif [ "$2" = "docker-multiplatform" ]; then
|
||||||
@ -323,9 +287,6 @@ elif [ "$1" = "release" -o "$1" = "beta" ]; then
|
|||||||
elif [ "$2" = "android" ]; then
|
elif [ "$2" = "android" ]; then
|
||||||
BuildReleaseAndroid
|
BuildReleaseAndroid
|
||||||
MakeRelease "md5-android.txt"
|
MakeRelease "md5-android.txt"
|
||||||
elif [ "$2" = "freebsd" ]; then
|
|
||||||
BuildReleaseFreeBSD
|
|
||||||
MakeRelease "md5-freebsd.txt"
|
|
||||||
elif [ "$2" = "web" ]; then
|
elif [ "$2" = "web" ]; then
|
||||||
echo "web only"
|
echo "web only"
|
||||||
else
|
else
|
||||||
|
@ -18,7 +18,6 @@ func Init() {
|
|||||||
bootstrap.InitDB()
|
bootstrap.InitDB()
|
||||||
data.InitData()
|
data.InitData()
|
||||||
bootstrap.InitIndex()
|
bootstrap.InitIndex()
|
||||||
bootstrap.InitUpgradePatch()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func Release() {
|
func Release() {
|
||||||
|
54
cmd/kill.go
54
cmd/kill.go
@ -1,54 +0,0 @@
|
|||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
// KillCmd represents the kill command
|
|
||||||
var KillCmd = &cobra.Command{
|
|
||||||
Use: "kill",
|
|
||||||
Short: "Force kill alist server process by daemon/pid file",
|
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
|
||||||
kill()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func kill() {
|
|
||||||
initDaemon()
|
|
||||||
if pid == -1 {
|
|
||||||
log.Info("Seems not have been started. Try use `alist start` to start server.")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
process, err := os.FindProcess(pid)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
err = process.Kill()
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("failed to kill process %d: %v", pid, err)
|
|
||||||
} else {
|
|
||||||
log.Info("killed process: ", pid)
|
|
||||||
}
|
|
||||||
err = os.Remove(pidFile)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("failed to remove pid file")
|
|
||||||
}
|
|
||||||
pid = -1
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
RootCmd.AddCommand(KillCmd)
|
|
||||||
|
|
||||||
// Here you will define your flags and configuration settings.
|
|
||||||
|
|
||||||
// Cobra supports Persistent Flags which will work for this command
|
|
||||||
// and all subcommands, e.g.:
|
|
||||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
|
||||||
|
|
||||||
// Cobra supports local flags which will only run when this command
|
|
||||||
// is called directly, e.g.:
|
|
||||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
|
||||||
}
|
|
@ -6,7 +6,6 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/cmd/flags"
|
"github.com/alist-org/alist/v3/cmd/flags"
|
||||||
_ "github.com/alist-org/alist/v3/drivers"
|
_ "github.com/alist-org/alist/v3/drivers"
|
||||||
_ "github.com/alist-org/alist/v3/internal/archive"
|
|
||||||
_ "github.com/alist-org/alist/v3/internal/offline_download"
|
_ "github.com/alist-org/alist/v3/internal/offline_download"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
@ -4,9 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
ftpserver "github.com/KirCute/ftpserverlib-pasvportmap"
|
|
||||||
"github.com/KirCute/sftpd-alist"
|
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
|
||||||
"net"
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
@ -115,42 +112,6 @@ the address is defined in config file`,
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
var ftpDriver *server.FtpMainDriver
|
|
||||||
var ftpServer *ftpserver.FtpServer
|
|
||||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable {
|
|
||||||
var err error
|
|
||||||
ftpDriver, err = server.NewMainDriver()
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.Fatalf("failed to start ftp driver: %s", err.Error())
|
|
||||||
} else {
|
|
||||||
utils.Log.Infof("start ftp server on %s", conf.Conf.FTP.Listen)
|
|
||||||
go func() {
|
|
||||||
ftpServer = ftpserver.NewFtpServer(ftpDriver)
|
|
||||||
err = ftpServer.ListenAndServe()
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.Fatalf("problem ftp server listening: %s", err.Error())
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var sftpDriver *server.SftpDriver
|
|
||||||
var sftpServer *sftpd.SftpServer
|
|
||||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable {
|
|
||||||
var err error
|
|
||||||
sftpDriver, err = server.NewSftpDriver()
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.Fatalf("failed to start sftp driver: %s", err.Error())
|
|
||||||
} else {
|
|
||||||
utils.Log.Infof("start sftp server on %s", conf.Conf.SFTP.Listen)
|
|
||||||
go func() {
|
|
||||||
sftpServer = sftpd.NewSftpServer(sftpDriver)
|
|
||||||
err = sftpServer.RunServer()
|
|
||||||
if err != nil {
|
|
||||||
utils.Log.Fatalf("problem sftp server listening: %s", err.Error())
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Wait for interrupt signal to gracefully shutdown the server with
|
// Wait for interrupt signal to gracefully shutdown the server with
|
||||||
// a timeout of 1 second.
|
// a timeout of 1 second.
|
||||||
quit := make(chan os.Signal, 1)
|
quit := make(chan os.Signal, 1)
|
||||||
@ -160,7 +121,6 @@ the address is defined in config file`,
|
|||||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
<-quit
|
<-quit
|
||||||
utils.Log.Println("Shutdown server...")
|
utils.Log.Println("Shutdown server...")
|
||||||
fs.ArchiveContentUploadTaskManager.RemoveAll()
|
|
||||||
Release()
|
Release()
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
@ -192,25 +152,6 @@ the address is defined in config file`,
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
if conf.Conf.FTP.Listen != "" && conf.Conf.FTP.Enable && ftpServer != nil && ftpDriver != nil {
|
|
||||||
wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer wg.Done()
|
|
||||||
ftpDriver.Stop()
|
|
||||||
if err := ftpServer.Stop(); err != nil {
|
|
||||||
utils.Log.Fatal("FTP server shutdown err: ", err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
if conf.Conf.SFTP.Listen != "" && conf.Conf.SFTP.Enable && sftpServer != nil && sftpDriver != nil {
|
|
||||||
wg.Add(1)
|
|
||||||
go func() {
|
|
||||||
defer wg.Done()
|
|
||||||
if err := sftpServer.Close(); err != nil {
|
|
||||||
utils.Log.Fatal("SFTP server shutdown err: ", err)
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
utils.Log.Println("Server exit")
|
utils.Log.Println("Server exit")
|
||||||
},
|
},
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
//go:build !windows
|
/*
|
||||||
|
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
|
||||||
|
*/
|
||||||
package cmd
|
package cmd
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
"syscall"
|
|
||||||
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@ -30,11 +30,11 @@ func stop() {
|
|||||||
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
err = process.Signal(syscall.SIGTERM)
|
err = process.Kill()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("failed to terminate process %d: %v", pid, err)
|
log.Errorf("failed to kill process %d: %v", pid, err)
|
||||||
} else {
|
} else {
|
||||||
log.Info("terminated process: ", pid)
|
log.Info("killed process: ", pid)
|
||||||
}
|
}
|
||||||
err = os.Remove(pidFile)
|
err = os.Remove(pidFile)
|
||||||
if err != nil {
|
if err != nil {
|
@ -1,34 +0,0 @@
|
|||||||
//go:build windows
|
|
||||||
|
|
||||||
package cmd
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
// StopCmd represents the stop command
|
|
||||||
var StopCmd = &cobra.Command{
|
|
||||||
Use: "stop",
|
|
||||||
Short: "Same as the kill command",
|
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
|
||||||
stop()
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
func stop() {
|
|
||||||
kill()
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
RootCmd.AddCommand(StopCmd)
|
|
||||||
|
|
||||||
// Here you will define your flags and configuration settings.
|
|
||||||
|
|
||||||
// Cobra supports Persistent Flags which will work for this command
|
|
||||||
// and all subcommands, e.g.:
|
|
||||||
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
|
|
||||||
|
|
||||||
// Cobra supports local flags which will only run when this command
|
|
||||||
// is called directly, e.g.:
|
|
||||||
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
|
||||||
}
|
|
@ -1,43 +0,0 @@
|
|||||||
package _115
|
|
||||||
|
|
||||||
import (
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
md5Salt = "Qclm8MGWUv59TnrR0XPg"
|
|
||||||
appVer = "27.0.5.7"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *Pan115) getAppVersion() ([]driver115.AppVersion, error) {
|
|
||||||
result := driver115.VersionResp{}
|
|
||||||
resp, err := base.RestyClient.R().Get(driver115.ApiGetVersion)
|
|
||||||
|
|
||||||
err = driver115.CheckErr(err, &result, resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.Data.GetAppVersions(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getAppVer() string {
|
|
||||||
// todo add some cache?
|
|
||||||
vers, err := d.getAppVersion()
|
|
||||||
if err != nil {
|
|
||||||
log.Warnf("[115] get app version failed: %v", err)
|
|
||||||
return appVer
|
|
||||||
}
|
|
||||||
for _, ver := range vers {
|
|
||||||
if ver.AppName == "win" {
|
|
||||||
return ver.Version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return appVer
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) initAppVer() {
|
|
||||||
appVer = d.getAppVer()
|
|
||||||
}
|
|
@ -3,7 +3,6 @@ package _115
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
|
||||||
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -17,9 +16,8 @@ import (
|
|||||||
type Pan115 struct {
|
type Pan115 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
client *driver115.Pan115Client
|
client *driver115.Pan115Client
|
||||||
limiter *rate.Limiter
|
limiter *rate.Limiter
|
||||||
appVerOnce sync.Once
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Config() driver.Config {
|
func (d *Pan115) Config() driver.Config {
|
||||||
@ -31,7 +29,6 @@ func (d *Pan115) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Init(ctx context.Context) error {
|
func (d *Pan115) Init(ctx context.Context) error {
|
||||||
d.appVerOnce.Do(d.initAppVer)
|
|
||||||
if d.LimitRate > 0 {
|
if d.LimitRate > 0 {
|
||||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||||
}
|
}
|
||||||
@ -79,60 +76,28 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
return link, nil
|
return link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
||||||
result := driver115.MkdirResp{}
|
return err
|
||||||
form := map[string]string{
|
|
||||||
"pid": parentDir.GetID(),
|
|
||||||
"cname": dirName,
|
|
||||||
}
|
}
|
||||||
req := d.client.NewRequest().
|
return nil
|
||||||
SetFormData(form).
|
|
||||||
SetResult(&result).
|
|
||||||
ForceContentType("application/json;charset=UTF-8")
|
|
||||||
|
|
||||||
resp, err := req.Post(driver115.ApiDirAdd)
|
|
||||||
|
|
||||||
err = driver115.CheckErr(err, &result, resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile(result.FileID)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if err := d.client.Move(dstDir.GetID(), srcObj.GetID()); err != nil {
|
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile(srcObj.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if err := d.client.Rename(srcObj.GetID(), newName); err != nil {
|
return d.client.Rename(srcObj.GetID(), newName)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
f, err := d.getNewFile((srcObj.GetID()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
@ -149,9 +114,9 @@ func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return d.client.Delete(obj.GetID())
|
return d.client.Delete(obj.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -160,10 +125,10 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
)
|
)
|
||||||
|
|
||||||
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
||||||
return nil, driver115.ErrUploadTooLarge
|
return driver115.ErrUploadTooLarge
|
||||||
}
|
}
|
||||||
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
||||||
// return err
|
// return err
|
||||||
@ -176,22 +141,22 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
preHash, err := utils.HashReader(utils.SHA1, reader)
|
preHash, err := utils.HashReader(utils.SHA1, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
preHash = strings.ToUpper(preHash)
|
preHash = strings.ToUpper(preHash)
|
||||||
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
||||||
if len(fullHash) <= 0 {
|
if len(fullHash) <= 0 {
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fullHash = strings.ToUpper(fullHash)
|
fullHash = strings.ToUpper(fullHash)
|
||||||
@ -200,36 +165,20 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
// note that 115 add timeout for rapid-upload,
|
// note that 115 add timeout for rapid-upload,
|
||||||
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
||||||
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
if matched, err := fastInfo.Ok(); err != nil {
|
if matched, err := fastInfo.Ok(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
} else if matched {
|
} else if matched {
|
||||||
f, err := d.getNewFileByPickCode(fastInfo.PickCode)
|
return nil
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return f, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var uploadResult *UploadResult
|
|
||||||
// 闪传失败,上传
|
// 闪传失败,上传
|
||||||
if stream.GetSize() <= 10*utils.MB { // 文件大小小于10MB,改用普通模式上传
|
if stream.GetSize() <= utils.KB { // 文件大小小于1KB,改用普通模式上传
|
||||||
if uploadResult, err = d.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID); err != nil {
|
return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// 分片上传
|
|
||||||
if uploadResult, err = d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
// 分片上传
|
||||||
file, err := d.getNewFile(uploadResult.Data.FileID)
|
return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
|
||||||
if err != nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
return file, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
||||||
@ -241,7 +190,7 @@ func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, err
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) OfflineDownload(ctx context.Context, uris []string, dstDir model.Obj) ([]string, error) {
|
func (d *Pan115) OfflineDownload(ctx context.Context, uris []string, dstDir model.Obj) ([]string, error) {
|
||||||
return d.client.AddOfflineTaskURIs(uris, dstDir.GetID(), driver115.WithAppVer(appVer))
|
return d.client.AddOfflineTaskURIs(uris, dstDir.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) DeleteOfflineTasks(ctx context.Context, hashes []string, deleteFiles bool) error {
|
func (d *Pan115) DeleteOfflineTasks(ctx context.Context, hashes []string, deleteFiles bool) error {
|
||||||
|
@ -9,8 +9,8 @@ type Addition struct {
|
|||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate ([limit]r/1s)"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
package _115
|
package _115
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ model.Obj = (*FileObj)(nil)
|
var _ model.Obj = (*FileObj)(nil)
|
||||||
@ -21,18 +20,3 @@ func (f *FileObj) CreateTime() time.Time {
|
|||||||
func (f *FileObj) GetHash() utils.HashInfo {
|
func (f *FileObj) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||||
}
|
}
|
||||||
|
|
||||||
type UploadResult struct {
|
|
||||||
driver.BasicResp
|
|
||||||
Data struct {
|
|
||||||
PickCode string `json:"pick_code"`
|
|
||||||
FileSize int `json:"file_size"`
|
|
||||||
FileID string `json:"file_id"`
|
|
||||||
ThumbURL string `json:"thumb_url"`
|
|
||||||
Sha1 string `json:"sha1"`
|
|
||||||
Aid int `json:"aid"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
Cid string `json:"cid"`
|
|
||||||
IsVideo int `json:"is_video"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
@ -2,14 +2,13 @@ package _115
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"crypto/md5"
|
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
"encoding/hex"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -21,17 +20,18 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||||
|
|
||||||
cipher "github.com/SheltonZhu/115driver/pkg/crypto/ec115"
|
|
||||||
crypto "github.com/SheltonZhu/115driver/pkg/crypto/m115"
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
|
crypto "github.com/gaoyb7/115drive-webdav/115"
|
||||||
|
"github.com/orzogc/fake115uploader/cipher"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
// var UserAgent = driver115.UA115Browser
|
var UserAgent = driver115.UA115Desktop
|
||||||
|
|
||||||
func (d *Pan115) login() error {
|
func (d *Pan115) login() error {
|
||||||
var err error
|
var err error
|
||||||
opts := []driver115.Option{
|
opts := []driver115.Option{
|
||||||
driver115.UA(d.getUA()),
|
driver115.UA(UserAgent),
|
||||||
func(c *driver115.Pan115Client) {
|
func(c *driver115.Pan115Client) {
|
||||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
},
|
},
|
||||||
@ -45,7 +45,7 @@ func (d *Pan115) login() error {
|
|||||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by qrcode")
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
}
|
}
|
||||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
d.QRCodeToken = ""
|
d.QRCodeToken = ""
|
||||||
} else if d.Cookie != "" {
|
} else if d.Cookie != "" {
|
||||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||||
@ -63,7 +63,7 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
|||||||
if d.PageSize <= 0 {
|
if d.PageSize <= 0 {
|
||||||
d.PageSize = driver115.FileListLimit
|
d.PageSize = driver115.FileListLimit
|
||||||
}
|
}
|
||||||
files, err := d.client.ListWithLimit(fileId, d.PageSize, driver115.WithMultiUrls())
|
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -73,42 +73,14 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) getNewFile(fileId string) (*FileObj, error) {
|
const (
|
||||||
file, err := d.client.GetFile(fileId)
|
appVer = "2.0.3.6"
|
||||||
if err != nil {
|
)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &FileObj{*file}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getNewFileByPickCode(pickCode string) (*FileObj, error) {
|
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||||
result := driver115.GetFileInfoResponse{}
|
|
||||||
req := d.client.NewRequest().
|
|
||||||
SetQueryParam("pick_code", pickCode).
|
|
||||||
ForceContentType("application/json;charset=UTF-8").
|
|
||||||
SetResult(&result)
|
|
||||||
resp, err := req.Get(driver115.ApiFileInfo)
|
|
||||||
if err := driver115.CheckErr(err, &result, resp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if len(result.Files) == 0 {
|
|
||||||
return nil, errors.New("not get file info")
|
|
||||||
}
|
|
||||||
fileInfo := result.Files[0]
|
|
||||||
|
|
||||||
f := &FileObj{}
|
|
||||||
f.From(fileInfo)
|
|
||||||
return f, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) getUA() string {
|
|
||||||
return fmt.Sprintf("Mozilla/5.0 115Browser/%s", appVer)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
|
||||||
key := crypto.GenerateKey()
|
key := crypto.GenerateKey()
|
||||||
result := driver115.DownloadResp{}
|
result := driver115.DownloadResp{}
|
||||||
params, err := utils.Json.Marshal(map[string]string{"pick_code": pickCode})
|
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -116,13 +88,13 @@ func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
data := crypto.Encode(params, key)
|
data := crypto.Encode(params, key)
|
||||||
|
|
||||||
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
||||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.AndroidApiDownloadGetUrl, driver115.Now().String())
|
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
req.Header.Set("Cookie", d.Cookie)
|
req.Header.Set("Cookie", c.Cookie)
|
||||||
req.Header.Set("User-Agent", ua)
|
req.Header.Set("User-Agent", ua)
|
||||||
|
|
||||||
resp, err := d.client.Client.GetClient().Do(req)
|
resp, err := c.client.Client.GetClient().Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -145,25 +117,19 @@ func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
downloadInfo := struct {
|
downloadInfo := driver115.DownloadData{}
|
||||||
Url string `json:"url"`
|
|
||||||
}{}
|
|
||||||
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
|
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
info := &driver115.DownloadInfo{}
|
for _, info := range downloadInfo {
|
||||||
info.PickCode = pickCode
|
if info.FileSize < 0 {
|
||||||
info.Header = resp.Request.Header
|
return nil, driver115.ErrDownloadEmpty
|
||||||
info.Url.Url = downloadInfo.Url
|
}
|
||||||
return info, nil
|
info.Header = resp.Request.Header
|
||||||
}
|
return info, nil
|
||||||
|
}
|
||||||
func (c *Pan115) GenerateToken(fileID, preID, timeStamp, fileSize, signKey, signVal string) string {
|
return nil, driver115.ErrUnexpected
|
||||||
userID := strconv.FormatInt(c.client.UserID, 10)
|
|
||||||
userIDMd5 := md5.Sum([]byte(userID))
|
|
||||||
tokenMd5 := md5.Sum([]byte(md5Salt + fileID + fileSize + signKey + signVal + userID + timeStamp + hex.EncodeToString(userIDMd5[:]) + appVer))
|
|
||||||
return hex.EncodeToString(tokenMd5[:])
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||||
@ -195,7 +161,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
|
|
||||||
signKey, signVal := "", ""
|
signKey, signVal := "", ""
|
||||||
for retry := true; retry; {
|
for retry := true; retry; {
|
||||||
t := driver115.NowMilli()
|
t := driver115.Now()
|
||||||
|
|
||||||
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -206,7 +172,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
form.Set("t", t.String())
|
form.Set("t", t.String())
|
||||||
form.Set("token", d.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||||
if signKey != "" && signVal != "" {
|
if signKey != "" && signVal != "" {
|
||||||
form.Set("sign_key", signKey)
|
form.Set("sign_key", signKey)
|
||||||
form.Set("sign_val", signVal)
|
form.Set("sign_val", signVal)
|
||||||
@ -259,9 +225,6 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
|||||||
|
|
||||||
length := end - start + 1
|
length := end - start + 1
|
||||||
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
hashStr, err := utils.HashReader(utils.SHA1, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
@ -270,38 +233,8 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// UploadByOSS use aliyun sdk to upload
|
|
||||||
func (c *Pan115) UploadByOSS(params *driver115.UploadOSSParams, r io.Reader, dirID string) (*UploadResult, error) {
|
|
||||||
ossToken, err := c.client.GetOSSToken()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
ossClient, err := oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
bucket, err := ossClient.Bucket(params.Bucket)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var bodyBytes []byte
|
|
||||||
if err = bucket.PutObject(params.Object, r, append(
|
|
||||||
driver115.OssOption(params, ossToken),
|
|
||||||
oss.CallbackResult(&bodyBytes),
|
|
||||||
)...); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var uploadResult UploadResult
|
|
||||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
|
||||||
}
|
|
||||||
|
|
||||||
// UploadByMultipart upload by mutipart blocks
|
// UploadByMultipart upload by mutipart blocks
|
||||||
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) (*UploadResult, error) {
|
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
|
||||||
var (
|
var (
|
||||||
chunks []oss.FileChunk
|
chunks []oss.FileChunk
|
||||||
parts []oss.UploadPart
|
parts []oss.UploadPart
|
||||||
@ -309,13 +242,12 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
ossClient *oss.Client
|
ossClient *oss.Client
|
||||||
bucket *oss.Bucket
|
bucket *oss.Bucket
|
||||||
ossToken *driver115.UploadOSSTokenResp
|
ossToken *driver115.UploadOSSTokenResp
|
||||||
bodyBytes []byte
|
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
options := driver115.DefalutUploadMultipartOptions()
|
options := driver115.DefalutUploadMultipartOptions()
|
||||||
@ -324,19 +256,17 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
f(options)
|
f(options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// oss 启用Sequential必须按顺序上传
|
|
||||||
options.ThreadsNum = 1
|
|
||||||
|
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret, oss.EnableMD5(true), oss.EnableCRC(true)); err != nil {
|
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
||||||
@ -346,15 +276,14 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
timeout := time.NewTimer(options.Timeout)
|
timeout := time.NewTimer(options.Timeout)
|
||||||
|
|
||||||
if chunks, err = SplitFile(fileSize); err != nil {
|
if chunks, err = SplitFile(fileSize); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
||||||
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
||||||
oss.UserAgentHeader(driver115.OSSUserAgent),
|
oss.UserAgentHeader(driver115.OSSUserAgent),
|
||||||
oss.EnableSha1(), oss.Sequential(),
|
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
wg := sync.WaitGroup{}
|
wg := sync.WaitGroup{}
|
||||||
@ -396,7 +325,8 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if part, err = bucket.UploadPart(imur, bytes.NewBuffer(buf), chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
b := bytes.NewBuffer(buf)
|
||||||
|
if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -420,31 +350,25 @@ LOOP:
|
|||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
// 到时重新获取ossToken
|
// 到时重新获取ossToken
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return nil, err
|
return err
|
||||||
}
|
}
|
||||||
case <-quit:
|
case <-quit:
|
||||||
break LOOP
|
break LOOP
|
||||||
case <-errCh:
|
case <-errCh:
|
||||||
return nil, err
|
return err
|
||||||
case <-timeout.C:
|
case <-timeout.C:
|
||||||
return nil, fmt.Errorf("time out")
|
return fmt.Errorf("time out")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// 不知道啥原因,oss那边分片上传不计算sha1,导致115服务器校验错误
|
// EOF错误是xml的Unmarshal导致的,响应其实是json格式,所以实际上上传是成功的
|
||||||
// params.Callback.Callback = strings.ReplaceAll(params.Callback.Callback, "${sha1}", params.SHA1)
|
if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
|
||||||
if _, err := bucket.CompleteMultipartUpload(imur, parts, append(
|
// 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误,实际上上传是成功的
|
||||||
driver115.OssOption(params, ossToken),
|
if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
|
||||||
oss.CallbackResult(&bodyBytes),
|
return err
|
||||||
)...); err != nil {
|
}
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
|
return d.checkUploadStatus(dirID, params.SHA1)
|
||||||
var uploadResult UploadResult
|
|
||||||
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
||||||
@ -453,6 +377,27 @@ func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
|
||||||
|
// 验证上传是否成功
|
||||||
|
req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
|
||||||
|
opts := []driver115.GetFileOptions{
|
||||||
|
driver115.WithOrder(driver115.FileOrderByTime),
|
||||||
|
driver115.WithShowDirEnable(false),
|
||||||
|
driver115.WithAsc(false),
|
||||||
|
driver115.WithLimit(500),
|
||||||
|
}
|
||||||
|
fResp, err := driver115.GetFiles(req, dirID, opts...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, fileInfo := range fResp.Files {
|
||||||
|
if fileInfo.Sha1 == sha1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return driver115.ErrUploadFailed
|
||||||
|
}
|
||||||
|
|
||||||
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
||||||
for i := int64(1); i < 10; i++ {
|
for i := int64(1); i < 10; i++ {
|
||||||
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
|
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
|
||||||
|
@ -9,8 +9,8 @@ type Addition struct {
|
|||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"float" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
@ -18,7 +18,7 @@ type Addition struct {
|
|||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "115 Share",
|
Name: "115 Share",
|
||||||
DefaultRoot: "0",
|
DefaultRoot: "",
|
||||||
// OnlyProxy: true,
|
// OnlyProxy: true,
|
||||||
// OnlyLocal: true,
|
// OnlyLocal: true,
|
||||||
CheckStatus: false,
|
CheckStatus: false,
|
||||||
|
@ -96,7 +96,7 @@ func (d *Pan115Share) login() error {
|
|||||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||||
return errors.Wrap(err, "failed to login by qrcode")
|
return errors.Wrap(err, "failed to login by qrcode")
|
||||||
}
|
}
|
||||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s;KID=%s", cr.UID, cr.CID, cr.SEID, cr.KID)
|
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||||
d.QRCodeToken = ""
|
d.QRCodeToken = ""
|
||||||
} else if d.Cookie != "" {
|
} else if d.Cookie != "" {
|
||||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||||
|
@ -6,14 +6,13 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"golang.org/x/time/rate"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -42,12 +41,12 @@ func (d *Pan123) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Init(ctx context.Context) error {
|
func (d *Pan123) Init(ctx context.Context) error {
|
||||||
_, err := d.Request(UserInfo, http.MethodGet, nil, nil)
|
_, err := d.request(UserInfo, http.MethodGet, nil, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Drop(ctx context.Context) error {
|
func (d *Pan123) Drop(ctx context.Context) error {
|
||||||
_, _ = d.Request(Logout, http.MethodPost, func(req *resty.Request) {
|
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{})
|
req.SetBody(base.Json{})
|
||||||
}, nil)
|
}, nil)
|
||||||
return nil
|
return nil
|
||||||
@ -82,8 +81,7 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
"size": f.Size,
|
"size": f.Size,
|
||||||
"type": f.Type,
|
"type": f.Type,
|
||||||
}
|
}
|
||||||
resp, err := d.Request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||||
|
|
||||||
req.SetBody(data).SetHeaders(headers)
|
req.SetBody(data).SetHeaders(headers)
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -136,7 +134,7 @@ func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
"size": 0,
|
"size": 0,
|
||||||
"type": 1,
|
"type": 1,
|
||||||
}
|
}
|
||||||
_, err := d.Request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
@ -147,7 +145,7 @@ func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
||||||
"parentFileId": dstDir.GetID(),
|
"parentFileId": dstDir.GetID(),
|
||||||
}
|
}
|
||||||
_, err := d.Request(Move, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(Move, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
@ -159,7 +157,7 @@ func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
|||||||
"fileId": srcObj.GetID(),
|
"fileId": srcObj.GetID(),
|
||||||
"fileName": newName,
|
"fileName": newName,
|
||||||
}
|
}
|
||||||
_, err := d.Request(Rename, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
@ -176,7 +174,7 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
"operation": true,
|
"operation": true,
|
||||||
"fileTrashInfoList": []File{f},
|
"fileTrashInfoList": []File{f},
|
||||||
}
|
}
|
||||||
_, err := d.Request(Trash, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
@ -214,7 +212,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
"type": 0,
|
"type": 0,
|
||||||
}
|
}
|
||||||
var resp UploadResp
|
var resp UploadResp
|
||||||
res, err := d.Request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
res, err := d.request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -249,7 +247,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
_, err = uploader.UploadWithContext(ctx, input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
}
|
}
|
||||||
_, err = d.Request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": resp.Data.FileId,
|
"fileId": resp.Data.FileId,
|
||||||
}).SetContext(ctx)
|
}).SetContext(ctx)
|
||||||
|
@ -25,7 +25,7 @@ func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, star
|
|||||||
"StorageNode": upReq.Data.StorageNode,
|
"StorageNode": upReq.Data.StorageNode,
|
||||||
}
|
}
|
||||||
var s3PreSignedUrls S3PreSignedURLs
|
var s3PreSignedUrls S3PreSignedURLs
|
||||||
_, err := d.Request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, &s3PreSignedUrls)
|
}, &s3PreSignedUrls)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -44,7 +44,7 @@ func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end in
|
|||||||
"uploadId": upReq.Data.UploadId,
|
"uploadId": upReq.Data.UploadId,
|
||||||
}
|
}
|
||||||
var s3PreSignedUrls S3PreSignedURLs
|
var s3PreSignedUrls S3PreSignedURLs
|
||||||
_, err := d.Request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, &s3PreSignedUrls)
|
}, &s3PreSignedUrls)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -63,7 +63,7 @@ func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.F
|
|||||||
"key": upReq.Data.Key,
|
"key": upReq.Data.Key,
|
||||||
"uploadId": upReq.Data.UploadId,
|
"uploadId": upReq.Data.UploadId,
|
||||||
}
|
}
|
||||||
_, err := d.Request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
|
@ -26,9 +26,8 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
LoginApi = "https://login.123pan.com/api"
|
|
||||||
MainApi = BApi
|
MainApi = BApi
|
||||||
SignIn = LoginApi + "/user/sign_in"
|
SignIn = MainApi + "/user/sign_in"
|
||||||
Logout = MainApi + "/user/logout"
|
Logout = MainApi + "/user/logout"
|
||||||
UserInfo = MainApi + "/user/info"
|
UserInfo = MainApi + "/user/info"
|
||||||
FileList = MainApi + "/file/list/new"
|
FileList = MainApi + "/file/list/new"
|
||||||
@ -194,9 +193,7 @@ func (d *Pan123) login() error {
|
|||||||
// return &authKey, nil
|
// return &authKey, nil
|
||||||
//}
|
//}
|
||||||
|
|
||||||
func (d *Pan123) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
isRetry := false
|
|
||||||
do:
|
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
@ -225,13 +222,12 @@ do:
|
|||||||
body := res.Body()
|
body := res.Body()
|
||||||
code := utils.Json.Get(body, "code").ToInt()
|
code := utils.Json.Get(body, "code").ToInt()
|
||||||
if code != 0 {
|
if code != 0 {
|
||||||
if !isRetry && code == 401 {
|
if code == 401 {
|
||||||
err := d.login()
|
err := d.login()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
isRetry = true
|
return d.request(url, method, callback, resp)
|
||||||
goto do
|
|
||||||
}
|
}
|
||||||
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
return nil, errors.New(jsoniter.Get(body, "message").ToString())
|
||||||
}
|
}
|
||||||
@ -263,7 +259,7 @@ func (d *Pan123) getFiles(ctx context.Context, parentId string, name string) ([]
|
|||||||
"operateType": "4",
|
"operateType": "4",
|
||||||
"inDirectSpace": "false",
|
"inDirectSpace": "false",
|
||||||
}
|
}
|
||||||
_res, err := d.Request(FileList, http.MethodGet, func(req *resty.Request) {
|
_res, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -4,14 +4,12 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"golang.org/x/time/rate"
|
|
||||||
|
|
||||||
_123 "github.com/alist-org/alist/v3/drivers/123"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -25,7 +23,6 @@ type Pan123Share struct {
|
|||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
apiRateLimit sync.Map
|
apiRateLimit sync.Map
|
||||||
ref *_123.Pan123
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) Config() driver.Config {
|
func (d *Pan123Share) Config() driver.Config {
|
||||||
@ -42,17 +39,7 @@ func (d *Pan123Share) Init(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) InitReference(storage driver.Driver) error {
|
|
||||||
refStorage, ok := storage.(*_123.Pan123)
|
|
||||||
if ok {
|
|
||||||
d.ref = refStorage
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return fmt.Errorf("ref: storage is not 123Pan")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Pan123Share) Drop(ctx context.Context) error {
|
func (d *Pan123Share) Drop(ctx context.Context) error {
|
||||||
d.ref = nil
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,9 +53,6 @@ func GetApi(rawUrl string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
if d.ref != nil {
|
|
||||||
return d.ref.Request(url, method, callback, resp)
|
|
||||||
}
|
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
|
@ -2,29 +2,28 @@ package _139
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/cron"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
"github.com/alist-org/alist/v3/pkg/cron"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Yun139 struct {
|
type Yun139 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
cron *cron.Cron
|
cron *cron.Cron
|
||||||
Account string
|
Account string
|
||||||
ref *Yun139
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Config() driver.Config {
|
func (d *Yun139) Config() driver.Config {
|
||||||
@ -36,78 +35,56 @@ func (d *Yun139) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Init(ctx context.Context) error {
|
func (d *Yun139) Init(ctx context.Context) error {
|
||||||
if d.ref == nil {
|
if d.Authorization == "" {
|
||||||
if d.Authorization == "" {
|
return fmt.Errorf("authorization is empty")
|
||||||
return fmt.Errorf("authorization is empty")
|
}
|
||||||
}
|
d.cron = cron.NewCron(time.Hour * 24 * 7)
|
||||||
|
d.cron.Do(func() {
|
||||||
err := d.refreshToken()
|
err := d.refreshToken()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
log.Errorf("%+v", err)
|
||||||
}
|
}
|
||||||
d.cron = cron.NewCron(time.Hour * 12)
|
})
|
||||||
d.cron.Do(func() {
|
|
||||||
err := d.refreshToken()
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("%+v", err)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
switch d.Addition.Type {
|
switch d.Addition.Type {
|
||||||
case MetaPersonalNew:
|
case MetaPersonalNew:
|
||||||
if len(d.Addition.RootFolderID) == 0 {
|
if len(d.Addition.RootFolderID) == 0 {
|
||||||
d.RootFolderID = "/"
|
d.RootFolderID = "/"
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
case MetaPersonal:
|
case MetaPersonal:
|
||||||
if len(d.Addition.RootFolderID) == 0 {
|
if len(d.Addition.RootFolderID) == 0 {
|
||||||
d.RootFolderID = "root"
|
d.RootFolderID = "root"
|
||||||
}
|
}
|
||||||
case MetaGroup:
|
fallthrough
|
||||||
if len(d.Addition.RootFolderID) == 0 {
|
|
||||||
d.RootFolderID = d.CloudID
|
|
||||||
}
|
|
||||||
case MetaFamily:
|
case MetaFamily:
|
||||||
|
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decodeStr := string(decode)
|
||||||
|
splits := strings.Split(decodeStr, ":")
|
||||||
|
if len(splits) < 2 {
|
||||||
|
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||||
|
}
|
||||||
|
d.Account = splits[1]
|
||||||
|
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||||
|
"qryUserExternInfoReq": base.Json{
|
||||||
|
"commonAccountInfo": base.Json{
|
||||||
|
"account": d.Account,
|
||||||
|
"accountType": 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
default:
|
default:
|
||||||
return errs.NotImplement
|
return errs.NotImplement
|
||||||
}
|
}
|
||||||
// if d.ref != nil {
|
|
||||||
// return nil
|
|
||||||
// }
|
|
||||||
// decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
|
||||||
// if err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// decodeStr := string(decode)
|
|
||||||
// splits := strings.Split(decodeStr, ":")
|
|
||||||
// if len(splits) < 2 {
|
|
||||||
// return fmt.Errorf("authorization is invalid, splits < 2")
|
|
||||||
// }
|
|
||||||
// d.Account = splits[1]
|
|
||||||
// _, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
|
||||||
// "qryUserExternInfoReq": base.Json{
|
|
||||||
// "commonAccountInfo": base.Json{
|
|
||||||
// "account": d.getAccount(),
|
|
||||||
// "accountType": 1,
|
|
||||||
// },
|
|
||||||
// },
|
|
||||||
// }, nil)
|
|
||||||
// return err
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Yun139) InitReference(storage driver.Driver) error {
|
|
||||||
refStorage, ok := storage.(*Yun139)
|
|
||||||
if ok {
|
|
||||||
d.ref = refStorage
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Drop(ctx context.Context) error {
|
func (d *Yun139) Drop(ctx context.Context) error {
|
||||||
if d.cron != nil {
|
if d.cron != nil {
|
||||||
d.cron.Stop()
|
d.cron.Stop()
|
||||||
}
|
}
|
||||||
d.ref = nil
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -119,8 +96,6 @@ func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
|||||||
return d.getFiles(dir.GetID())
|
return d.getFiles(dir.GetID())
|
||||||
case MetaFamily:
|
case MetaFamily:
|
||||||
return d.familyGetFiles(dir.GetID())
|
return d.familyGetFiles(dir.GetID())
|
||||||
case MetaGroup:
|
|
||||||
return d.groupGetFiles(dir.GetID())
|
|
||||||
default:
|
default:
|
||||||
return nil, errs.NotImplement
|
return nil, errs.NotImplement
|
||||||
}
|
}
|
||||||
@ -133,11 +108,9 @@ func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
case MetaPersonalNew:
|
case MetaPersonalNew:
|
||||||
url, err = d.personalGetLink(file.GetID())
|
url, err = d.personalGetLink(file.GetID())
|
||||||
case MetaPersonal:
|
case MetaPersonal:
|
||||||
url, err = d.getLink(file.GetID())
|
fallthrough
|
||||||
case MetaFamily:
|
case MetaFamily:
|
||||||
url, err = d.familyGetLink(file.GetID(), file.GetPath())
|
url, err = d.getLink(file.GetID())
|
||||||
case MetaGroup:
|
|
||||||
url, err = d.groupGetLink(file.GetID(), file.GetPath())
|
|
||||||
default:
|
default:
|
||||||
return nil, errs.NotImplement
|
return nil, errs.NotImplement
|
||||||
}
|
}
|
||||||
@ -166,7 +139,7 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
"parentCatalogID": parentDir.GetID(),
|
"parentCatalogID": parentDir.GetID(),
|
||||||
"newCatalogName": dirName,
|
"newCatalogName": dirName,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -177,26 +150,12 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
data := base.Json{
|
data := base.Json{
|
||||||
"cloudID": d.CloudID,
|
"cloudID": d.CloudID,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
"docLibName": dirName,
|
"docLibName": dirName,
|
||||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
|
||||||
}
|
}
|
||||||
pathname := "/orchestration/familyCloud-rebuild/cloudCatalog/v1.0/createCloudDoc"
|
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||||
_, err = d.post(pathname, data, nil)
|
|
||||||
case MetaGroup:
|
|
||||||
data := base.Json{
|
|
||||||
"catalogName": dirName,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
"groupID": d.CloudID,
|
|
||||||
"parentFileId": parentDir.GetID(),
|
|
||||||
"path": path.Join(parentDir.GetPath(), parentDir.GetID()),
|
|
||||||
}
|
|
||||||
pathname := "/orchestration/group-rebuild/catalog/v1.0/createGroupCatalog"
|
|
||||||
_, err = d.post(pathname, data, nil)
|
_, err = d.post(pathname, data, nil)
|
||||||
default:
|
default:
|
||||||
err = errs.NotImplement
|
err = errs.NotImplement
|
||||||
@ -217,34 +176,6 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return srcObj, nil
|
return srcObj, nil
|
||||||
case MetaGroup:
|
|
||||||
var contentList []string
|
|
||||||
var catalogList []string
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
catalogList = append(catalogList, srcObj.GetID())
|
|
||||||
} else {
|
|
||||||
contentList = append(contentList, srcObj.GetID())
|
|
||||||
}
|
|
||||||
data := base.Json{
|
|
||||||
"taskType": 3,
|
|
||||||
"srcType": 2,
|
|
||||||
"srcGroupID": d.CloudID,
|
|
||||||
"destType": 2,
|
|
||||||
"destGroupID": d.CloudID,
|
|
||||||
"destPath": dstDir.GetPath(),
|
|
||||||
"contentList": contentList,
|
|
||||||
"catalogList": catalogList,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return srcObj, nil
|
|
||||||
case MetaPersonal:
|
case MetaPersonal:
|
||||||
var contentInfoList []string
|
var contentInfoList []string
|
||||||
var catalogInfoList []string
|
var catalogInfoList []string
|
||||||
@ -263,7 +194,7 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj,
|
|||||||
"newCatalogID": dstDir.GetID(),
|
"newCatalogID": dstDir.GetID(),
|
||||||
},
|
},
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -298,7 +229,7 @@ func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
|||||||
"catalogID": srcObj.GetID(),
|
"catalogID": srcObj.GetID(),
|
||||||
"catalogName": newName,
|
"catalogName": newName,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -308,72 +239,13 @@ func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
|||||||
"contentID": srcObj.GetID(),
|
"contentID": srcObj.GetID(),
|
||||||
"contentName": newName,
|
"contentName": newName,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||||
}
|
}
|
||||||
_, err = d.post(pathname, data, nil)
|
_, err = d.post(pathname, data, nil)
|
||||||
case MetaGroup:
|
|
||||||
var data base.Json
|
|
||||||
var pathname string
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
data = base.Json{
|
|
||||||
"groupID": d.CloudID,
|
|
||||||
"modifyCatalogID": srcObj.GetID(),
|
|
||||||
"modifyCatalogName": newName,
|
|
||||||
"path": srcObj.GetPath(),
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname = "/orchestration/group-rebuild/catalog/v1.0/modifyGroupCatalog"
|
|
||||||
} else {
|
|
||||||
data = base.Json{
|
|
||||||
"groupID": d.CloudID,
|
|
||||||
"contentID": srcObj.GetID(),
|
|
||||||
"contentName": newName,
|
|
||||||
"path": srcObj.GetPath(),
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname = "/orchestration/group-rebuild/content/v1.0/modifyGroupContent"
|
|
||||||
}
|
|
||||||
_, err = d.post(pathname, data, nil)
|
|
||||||
case MetaFamily:
|
|
||||||
var data base.Json
|
|
||||||
var pathname string
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
// 网页接口不支持重命名家庭云文件夹
|
|
||||||
// data = base.Json{
|
|
||||||
// "catalogType": 3,
|
|
||||||
// "catalogID": srcObj.GetID(),
|
|
||||||
// "catalogName": newName,
|
|
||||||
// "commonAccountInfo": base.Json{
|
|
||||||
// "account": d.getAccount(),
|
|
||||||
// "accountType": 1,
|
|
||||||
// },
|
|
||||||
// "path": srcObj.GetPath(),
|
|
||||||
// }
|
|
||||||
// pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyCatalogInfo"
|
|
||||||
return errs.NotImplement
|
|
||||||
} else {
|
|
||||||
data = base.Json{
|
|
||||||
"contentID": srcObj.GetID(),
|
|
||||||
"contentName": newName,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
"path": srcObj.GetPath(),
|
|
||||||
}
|
|
||||||
pathname = "/orchestration/familyCloud-rebuild/photoContent/v1.0/modifyContentInfo"
|
|
||||||
}
|
|
||||||
_, err = d.post(pathname, data, nil)
|
|
||||||
default:
|
default:
|
||||||
err = errs.NotImplement
|
err = errs.NotImplement
|
||||||
}
|
}
|
||||||
@ -409,7 +281,7 @@ func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
"newCatalogID": dstDir.GetID(),
|
"newCatalogID": dstDir.GetID(),
|
||||||
},
|
},
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -431,28 +303,6 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
pathname := "/hcy/recyclebin/batchTrash"
|
pathname := "/hcy/recyclebin/batchTrash"
|
||||||
_, err := d.personalPost(pathname, data, nil)
|
_, err := d.personalPost(pathname, data, nil)
|
||||||
return err
|
return err
|
||||||
case MetaGroup:
|
|
||||||
var contentList []string
|
|
||||||
var catalogList []string
|
|
||||||
// 必须使用完整路径删除
|
|
||||||
if obj.IsDir() {
|
|
||||||
catalogList = append(catalogList, obj.GetPath())
|
|
||||||
} else {
|
|
||||||
contentList = append(contentList, path.Join(obj.GetPath(), obj.GetID()))
|
|
||||||
}
|
|
||||||
data := base.Json{
|
|
||||||
"taskType": 2,
|
|
||||||
"srcGroupID": d.CloudID,
|
|
||||||
"contentList": contentList,
|
|
||||||
"catalogList": catalogList,
|
|
||||||
"commonAccountInfo": base.Json{
|
|
||||||
"account": d.getAccount(),
|
|
||||||
"accountType": 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
pathname := "/orchestration/group-rebuild/task/v1.0/createBatchOprTask"
|
|
||||||
_, err := d.post(pathname, data, nil)
|
|
||||||
return err
|
|
||||||
case MetaPersonal:
|
case MetaPersonal:
|
||||||
fallthrough
|
fallthrough
|
||||||
case MetaFamily:
|
case MetaFamily:
|
||||||
@ -473,7 +323,7 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
"catalogInfoList": catalogInfoList,
|
"catalogInfoList": catalogInfoList,
|
||||||
},
|
},
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@ -484,15 +334,13 @@ func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
"catalogList": catalogInfoList,
|
"catalogList": catalogInfoList,
|
||||||
"contentList": contentInfoList,
|
"contentList": contentInfoList,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
"sourceCloudID": d.CloudID,
|
|
||||||
"sourceCatalogType": 1002,
|
"sourceCatalogType": 1002,
|
||||||
"taskType": 2,
|
"taskType": 2,
|
||||||
"path": obj.GetPath(),
|
|
||||||
}
|
}
|
||||||
pathname = "/orchestration/familyCloud-rebuild/batchOprTask/v1.0/createBatchOprTask"
|
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||||
}
|
}
|
||||||
_, err := d.post(pathname, data, nil)
|
_, err := d.post(pathname, data, nil)
|
||||||
return err
|
return err
|
||||||
@ -509,10 +357,7 @@ const (
|
|||||||
TB
|
TB
|
||||||
)
|
)
|
||||||
|
|
||||||
func (d *Yun139) getPartSize(size int64) int64 {
|
func getPartSize(size int64) int64 {
|
||||||
if d.CustomUploadPartSize != 0 {
|
|
||||||
return d.CustomUploadPartSize
|
|
||||||
}
|
|
||||||
// 网盘对于分片数量存在上限
|
// 网盘对于分片数量存在上限
|
||||||
if size/GB > 30 {
|
if size/GB > 30 {
|
||||||
return 512 * MB
|
return 512 * MB
|
||||||
@ -535,51 +380,24 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// return errs.NotImplement
|
||||||
partInfos := []PartInfo{}
|
|
||||||
var partSize = d.getPartSize(stream.GetSize())
|
|
||||||
part := (stream.GetSize() + partSize - 1) / partSize
|
|
||||||
if part == 0 {
|
|
||||||
part = 1
|
|
||||||
}
|
|
||||||
for i := int64(0); i < part; i++ {
|
|
||||||
if utils.IsCanceled(ctx) {
|
|
||||||
return ctx.Err()
|
|
||||||
}
|
|
||||||
start := i * partSize
|
|
||||||
byteSize := stream.GetSize() - start
|
|
||||||
if byteSize > partSize {
|
|
||||||
byteSize = partSize
|
|
||||||
}
|
|
||||||
partNumber := i + 1
|
|
||||||
partInfo := PartInfo{
|
|
||||||
PartNumber: partNumber,
|
|
||||||
PartSize: byteSize,
|
|
||||||
ParallelHashCtx: ParallelHashCtx{
|
|
||||||
PartOffset: start,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
partInfos = append(partInfos, partInfo)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 筛选出前 100 个 partInfos
|
|
||||||
firstPartInfos := partInfos
|
|
||||||
if len(firstPartInfos) > 100 {
|
|
||||||
firstPartInfos = firstPartInfos[:100]
|
|
||||||
}
|
|
||||||
|
|
||||||
// 创建任务,获取上传信息和前100个分片的上传地址
|
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"contentHash": fullHash,
|
"contentHash": fullHash,
|
||||||
"contentHashAlgorithm": "SHA256",
|
"contentHashAlgorithm": "SHA256",
|
||||||
"contentType": "application/octet-stream",
|
"contentType": "application/octet-stream",
|
||||||
"parallelUpload": false,
|
"parallelUpload": false,
|
||||||
"partInfos": firstPartInfos,
|
"partInfos": []base.Json{{
|
||||||
"size": stream.GetSize(),
|
"parallelHashCtx": base.Json{
|
||||||
"parentFileId": dstDir.GetID(),
|
"partOffset": 0,
|
||||||
"name": stream.GetName(),
|
},
|
||||||
"type": "file",
|
"partNumber": 1,
|
||||||
"fileRenameMode": "auto_rename",
|
"partSize": stream.GetSize(),
|
||||||
|
}},
|
||||||
|
"size": stream.GetSize(),
|
||||||
|
"parentFileId": dstDir.GetID(),
|
||||||
|
"name": stream.GetName(),
|
||||||
|
"type": "file",
|
||||||
|
"fileRenameMode": "auto_rename",
|
||||||
}
|
}
|
||||||
pathname := "/hcy/file/create"
|
pathname := "/hcy/file/create"
|
||||||
var resp PersonalUploadResp
|
var resp PersonalUploadResp
|
||||||
@ -588,156 +406,52 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// 判断文件是否已存在
|
if resp.Data.Exist || resp.Data.RapidUpload {
|
||||||
// resp.Data.Exist: true 已存在同名文件且校验相同,云端不会重复增加文件,无需手动处理冲突
|
|
||||||
if resp.Data.Exist {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// 判断文件是否支持快传
|
// Progress
|
||||||
// resp.Data.RapidUpload: true 支持快传,但此处直接检测是否返回分片的上传地址
|
p := driver.NewProgress(stream.GetSize(), up)
|
||||||
// 快传的情况下同样需要手动处理冲突
|
|
||||||
if resp.Data.PartInfos != nil {
|
|
||||||
// 读取前100个分片的上传地址
|
|
||||||
uploadPartInfos := resp.Data.PartInfos
|
|
||||||
|
|
||||||
// 获取后续分片的上传地址
|
// Update Progress
|
||||||
for i := 101; i < len(partInfos); i += 100 {
|
r := io.TeeReader(stream, p)
|
||||||
end := i + 100
|
|
||||||
if end > len(partInfos) {
|
|
||||||
end = len(partInfos)
|
|
||||||
}
|
|
||||||
batchPartInfos := partInfos[i:end]
|
|
||||||
|
|
||||||
moredata := base.Json{
|
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
|
||||||
"fileId": resp.Data.FileId,
|
if err != nil {
|
||||||
"uploadId": resp.Data.UploadId,
|
return err
|
||||||
"partInfos": batchPartInfos,
|
}
|
||||||
"commonAccountInfo": base.Json{
|
req = req.WithContext(ctx)
|
||||||
"account": d.getAccount(),
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
"accountType": 1,
|
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
|
||||||
},
|
req.Header.Set("Origin", "https://yun.139.com")
|
||||||
}
|
req.Header.Set("Referer", "https://yun.139.com/")
|
||||||
pathname := "/hcy/file/getUploadUrl"
|
req.ContentLength = stream.GetSize()
|
||||||
var moreresp PersonalUploadUrlResp
|
|
||||||
_, err = d.personalPost(pathname, moredata, &moreresp)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
uploadPartInfos = append(uploadPartInfos, moreresp.Data.PartInfos...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Progress
|
res, err := base.HttpClient.Do(req)
|
||||||
p := driver.NewProgress(stream.GetSize(), up)
|
if err != nil {
|
||||||
|
return err
|
||||||
// 上传所有分片
|
|
||||||
for _, uploadPartInfo := range uploadPartInfos {
|
|
||||||
index := uploadPartInfo.PartNumber - 1
|
|
||||||
partSize := partInfos[index].PartSize
|
|
||||||
log.Debugf("[139] uploading part %+v/%+v", index, len(uploadPartInfos))
|
|
||||||
limitReader := io.LimitReader(stream, partSize)
|
|
||||||
|
|
||||||
// Update Progress
|
|
||||||
r := io.TeeReader(limitReader, p)
|
|
||||||
|
|
||||||
req, err := http.NewRequest("PUT", uploadPartInfo.UploadUrl, r)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req = req.WithContext(ctx)
|
|
||||||
req.Header.Set("Content-Type", "application/octet-stream")
|
|
||||||
req.Header.Set("Content-Length", fmt.Sprint(partSize))
|
|
||||||
req.Header.Set("Origin", "https://yun.139.com")
|
|
||||||
req.Header.Set("Referer", "https://yun.139.com/")
|
|
||||||
req.ContentLength = partSize
|
|
||||||
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_ = res.Body.Close()
|
|
||||||
log.Debugf("[139] uploaded: %+v", res)
|
|
||||||
if res.StatusCode != http.StatusOK {
|
|
||||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
data = base.Json{
|
|
||||||
"contentHash": fullHash,
|
|
||||||
"contentHashAlgorithm": "SHA256",
|
|
||||||
"fileId": resp.Data.FileId,
|
|
||||||
"uploadId": resp.Data.UploadId,
|
|
||||||
}
|
|
||||||
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 处理冲突
|
_ = res.Body.Close()
|
||||||
if resp.Data.FileName != stream.GetName() {
|
log.Debugf("%+v", res)
|
||||||
log.Debugf("[139] conflict detected: %s != %s", resp.Data.FileName, stream.GetName())
|
if res.StatusCode != http.StatusOK {
|
||||||
// 给服务器一定时间处理数据,避免无法刷新文件列表
|
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||||
time.Sleep(time.Millisecond * 500)
|
}
|
||||||
// 刷新并获取文件列表
|
|
||||||
files, err := d.List(ctx, dstDir, model.ListArgs{Refresh: true})
|
data = base.Json{
|
||||||
if err != nil {
|
"contentHash": fullHash,
|
||||||
return err
|
"contentHashAlgorithm": "SHA256",
|
||||||
}
|
"fileId": resp.Data.FileId,
|
||||||
// 删除旧文件
|
"uploadId": resp.Data.UploadId,
|
||||||
for _, file := range files {
|
}
|
||||||
if file.GetName() == stream.GetName() {
|
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
||||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
if err != nil {
|
||||||
// 删除前重命名旧文件,避免仍旧冲突
|
return err
|
||||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = d.Remove(ctx, file)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// 重命名新文件
|
|
||||||
for _, file := range files {
|
|
||||||
if file.GetName() == resp.Data.FileName {
|
|
||||||
log.Debugf("[139] conflict: renaming new: %s => %s", file.GetName(), stream.GetName())
|
|
||||||
err = d.Rename(ctx, file, stream.GetName())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
case MetaPersonal:
|
case MetaPersonal:
|
||||||
fallthrough
|
fallthrough
|
||||||
case MetaFamily:
|
case MetaFamily:
|
||||||
// 处理冲突
|
|
||||||
// 获取文件列表
|
|
||||||
files, err := d.List(ctx, dstDir, model.ListArgs{})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// 删除旧文件
|
|
||||||
for _, file := range files {
|
|
||||||
if file.GetName() == stream.GetName() {
|
|
||||||
log.Debugf("[139] conflict: removing old: %s", file.GetName())
|
|
||||||
// 删除前重命名旧文件,避免仍旧冲突
|
|
||||||
err = d.Rename(ctx, file, stream.GetName()+random.String(4))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
err = d.Remove(ctx, file)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"manualRename": 2,
|
"manualRename": 2,
|
||||||
"operation": 0,
|
"operation": 0,
|
||||||
@ -751,29 +465,30 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
"parentCatalogID": dstDir.GetID(),
|
"parentCatalogID": dstDir.GetID(),
|
||||||
"newCatalogName": "",
|
"newCatalogName": "",
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||||
if d.isFamily() {
|
if d.isFamily() {
|
||||||
data = d.newJson(base.Json{
|
// data = d.newJson(base.Json{
|
||||||
"fileCount": 1,
|
// "fileCount": 1,
|
||||||
"manualRename": 2,
|
// "manualRename": 2,
|
||||||
"operation": 0,
|
// "operation": 0,
|
||||||
"path": path.Join(dstDir.GetPath(), dstDir.GetID()),
|
// "path": "",
|
||||||
"seqNo": random.String(32), //序列号不能为空
|
// "seqNo": "",
|
||||||
"totalSize": 0,
|
// "totalSize": 0,
|
||||||
"uploadContentList": []base.Json{{
|
// "uploadContentList": []base.Json{{
|
||||||
"contentName": stream.GetName(),
|
// "contentName": stream.GetName(),
|
||||||
"contentSize": 0,
|
// "contentSize": 0,
|
||||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||||
}},
|
// }},
|
||||||
})
|
// })
|
||||||
pathname = "/orchestration/familyCloud-rebuild/content/v1.0/getFileUploadURL"
|
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
||||||
|
return errs.NotImplement
|
||||||
}
|
}
|
||||||
var resp UploadResp
|
var resp UploadResp
|
||||||
_, err = d.post(pathname, data, &resp)
|
_, err := d.post(pathname, data, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -781,7 +496,7 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
// Progress
|
// Progress
|
||||||
p := driver.NewProgress(stream.GetSize(), up)
|
p := driver.NewProgress(stream.GetSize(), up)
|
||||||
|
|
||||||
var partSize = d.getPartSize(stream.GetSize())
|
var partSize = getPartSize(stream.GetSize())
|
||||||
part := (stream.GetSize() + partSize - 1) / partSize
|
part := (stream.GetSize() + partSize - 1) / partSize
|
||||||
if part == 0 {
|
if part == 0 {
|
||||||
part = 1
|
part = 1
|
||||||
|
@ -9,9 +9,8 @@ type Addition struct {
|
|||||||
//Account string `json:"account" required:"true"`
|
//Account string `json:"account" required:"true"`
|
||||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Type string `json:"type" type:"select" options:"personal_new,family,group,personal" default:"personal_new"`
|
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
|
||||||
CloudID string `json:"cloud_id"`
|
CloudID string `json:"cloud_id"`
|
||||||
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
const (
|
const (
|
||||||
MetaPersonal string = "personal"
|
MetaPersonal string = "personal"
|
||||||
MetaFamily string = "family"
|
MetaFamily string = "family"
|
||||||
MetaGroup string = "group"
|
|
||||||
MetaPersonalNew string = "personal_new"
|
MetaPersonalNew string = "personal_new"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -55,7 +54,6 @@ type Content struct {
|
|||||||
//ContentDesc string `json:"contentDesc"`
|
//ContentDesc string `json:"contentDesc"`
|
||||||
//ContentType int `json:"contentType"`
|
//ContentType int `json:"contentType"`
|
||||||
//ContentOrigin int `json:"contentOrigin"`
|
//ContentOrigin int `json:"contentOrigin"`
|
||||||
CreateTime string `json:"createTime"`
|
|
||||||
UpdateTime string `json:"updateTime"`
|
UpdateTime string `json:"updateTime"`
|
||||||
//CommentCount int `json:"commentCount"`
|
//CommentCount int `json:"commentCount"`
|
||||||
ThumbnailURL string `json:"thumbnailURL"`
|
ThumbnailURL string `json:"thumbnailURL"`
|
||||||
@ -198,37 +196,6 @@ type QueryContentListResp struct {
|
|||||||
} `json:"data"`
|
} `json:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type QueryGroupContentListResp struct {
|
|
||||||
BaseResp
|
|
||||||
Data struct {
|
|
||||||
Result struct {
|
|
||||||
ResultCode string `json:"resultCode"`
|
|
||||||
ResultDesc string `json:"resultDesc"`
|
|
||||||
} `json:"result"`
|
|
||||||
GetGroupContentResult struct {
|
|
||||||
ParentCatalogID string `json:"parentCatalogID"` // 根目录是"0"
|
|
||||||
CatalogList []struct {
|
|
||||||
Catalog
|
|
||||||
Path string `json:"path"`
|
|
||||||
} `json:"catalogList"`
|
|
||||||
ContentList []Content `json:"contentList"`
|
|
||||||
NodeCount int `json:"nodeCount"` // 文件+文件夹数量
|
|
||||||
CtlgCnt int `json:"ctlgCnt"` // 文件夹数量
|
|
||||||
ContCnt int `json:"contCnt"` // 文件数量
|
|
||||||
} `json:"getGroupContentResult"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ParallelHashCtx struct {
|
|
||||||
PartOffset int64 `json:"partOffset"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PartInfo struct {
|
|
||||||
PartNumber int64 `json:"partNumber"`
|
|
||||||
PartSize int64 `json:"partSize"`
|
|
||||||
ParallelHashCtx ParallelHashCtx `json:"parallelHashCtx"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type PersonalThumbnail struct {
|
type PersonalThumbnail struct {
|
||||||
Style string `json:"style"`
|
Style string `json:"style"`
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
@ -261,7 +228,6 @@ type PersonalUploadResp struct {
|
|||||||
BaseResp
|
BaseResp
|
||||||
Data struct {
|
Data struct {
|
||||||
FileId string `json:"fileId"`
|
FileId string `json:"fileId"`
|
||||||
FileName string `json:"fileName"`
|
|
||||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||||
Exist bool `json:"exist"`
|
Exist bool `json:"exist"`
|
||||||
RapidUpload bool `json:"rapidUpload"`
|
RapidUpload bool `json:"rapidUpload"`
|
||||||
@ -269,15 +235,6 @@ type PersonalUploadResp struct {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type PersonalUploadUrlResp struct {
|
|
||||||
BaseResp
|
|
||||||
Data struct {
|
|
||||||
FileId string `json:"fileId"`
|
|
||||||
UploadId string `json:"uploadId"`
|
|
||||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type RefreshTokenResp struct {
|
type RefreshTokenResp struct {
|
||||||
XMLName xml.Name `xml:"root"`
|
XMLName xml.Name `xml:"root"`
|
||||||
Return string `xml:"return"`
|
Return string `xml:"return"`
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
@ -14,9 +13,9 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils/random"
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@ -55,37 +54,14 @@ func getTime(t string) time.Time {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) refreshToken() error {
|
func (d *Yun139) refreshToken() error {
|
||||||
if d.ref != nil {
|
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
||||||
return d.ref.refreshToken()
|
var resp RefreshTokenResp
|
||||||
}
|
|
||||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("authorization decode failed: %s", err)
|
return err
|
||||||
}
|
}
|
||||||
decodeStr := string(decode)
|
decodeStr := string(decode)
|
||||||
splits := strings.Split(decodeStr, ":")
|
splits := strings.Split(decodeStr, ":")
|
||||||
if len(splits) < 3 {
|
|
||||||
return fmt.Errorf("authorization is invalid, splits < 3")
|
|
||||||
}
|
|
||||||
strs := strings.Split(splits[2], "|")
|
|
||||||
if len(strs) < 4 {
|
|
||||||
return fmt.Errorf("authorization is invalid, strs < 4")
|
|
||||||
}
|
|
||||||
expiration, err := strconv.ParseInt(strs[3], 10, 64)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("authorization is invalid")
|
|
||||||
}
|
|
||||||
expiration -= time.Now().UnixMilli()
|
|
||||||
if expiration > 1000*60*60*24*15 {
|
|
||||||
// Authorization有效期大于15天无需刷新
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if expiration < 0 {
|
|
||||||
return fmt.Errorf("authorization has expired")
|
|
||||||
}
|
|
||||||
|
|
||||||
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
|
|
||||||
var resp RefreshTokenResp
|
|
||||||
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
|
||||||
_, err = base.RestyClient.R().
|
_, err = base.RestyClient.R().
|
||||||
ForceContentType("application/xml").
|
ForceContentType("application/xml").
|
||||||
@ -123,22 +99,21 @@ func (d *Yun139) request(pathname string, method string, callback base.ReqCallba
|
|||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"CMS-DEVICE": "default",
|
"CMS-DEVICE": "default",
|
||||||
"Authorization": "Basic " + d.getAuthorization(),
|
"Authorization": "Basic " + d.Authorization,
|
||||||
"mcloud-channel": "1000101",
|
"mcloud-channel": "1000101",
|
||||||
"mcloud-client": "10701",
|
"mcloud-client": "10701",
|
||||||
//"mcloud-route": "001",
|
//"mcloud-route": "001",
|
||||||
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||||
//"mcloud-skey":"",
|
//"mcloud-skey":"",
|
||||||
"mcloud-version": "7.14.0",
|
"mcloud-version": "6.6.0",
|
||||||
"Origin": "https://yun.139.com",
|
"Origin": "https://yun.139.com",
|
||||||
"Referer": "https://yun.139.com/w/",
|
"Referer": "https://yun.139.com/w/",
|
||||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
"x-DeviceInfo": "||9|6.6.0|chrome|95.0.4638.69|uwIy75obnsRPIwlJSd7D9GhUvFwG96ce||macos 10.15.2||zh-CN|||",
|
||||||
"x-huawei-channelSrc": "10000034",
|
"x-huawei-channelSrc": "10000034",
|
||||||
"x-inner-ntwk": "2",
|
"x-inner-ntwk": "2",
|
||||||
"x-m4c-caller": "PC",
|
"x-m4c-caller": "PC",
|
||||||
"x-m4c-src": "10002",
|
"x-m4c-src": "10002",
|
||||||
"x-SvcType": svcType,
|
"x-SvcType": svcType,
|
||||||
"Inner-Hcy-Router-Https": "1",
|
|
||||||
})
|
})
|
||||||
|
|
||||||
var e BaseResp
|
var e BaseResp
|
||||||
@ -176,7 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
|
|||||||
"catalogSortType": 0,
|
"catalogSortType": 0,
|
||||||
"contentSortType": 0,
|
"contentSortType": 0,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -224,7 +199,7 @@ func (d *Yun139) newJson(data map[string]interface{}) base.Json {
|
|||||||
"cloudID": d.CloudID,
|
"cloudID": d.CloudID,
|
||||||
"cloudType": 1,
|
"cloudType": 1,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -245,11 +220,10 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
|||||||
"sortDirection": 1,
|
"sortDirection": 1,
|
||||||
})
|
})
|
||||||
var resp QueryContentListResp
|
var resp QueryContentListResp
|
||||||
_, err := d.post("/orchestration/familyCloud-rebuild/content/v1.2/queryContentList", data, &resp)
|
_, err := d.post("/orchestration/familyCloud/content/v1.0/queryContentList", data, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
path := resp.Data.Path
|
|
||||||
for _, catalog := range resp.Data.CloudCatalogList {
|
for _, catalog := range resp.Data.CloudCatalogList {
|
||||||
f := model.Object{
|
f := model.Object{
|
||||||
ID: catalog.CatalogID,
|
ID: catalog.CatalogID,
|
||||||
@ -258,7 +232,6 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
|||||||
IsFolder: true,
|
IsFolder: true,
|
||||||
Modified: getTime(catalog.LastUpdateTime),
|
Modified: getTime(catalog.LastUpdateTime),
|
||||||
Ctime: getTime(catalog.CreateTime),
|
Ctime: getTime(catalog.CreateTime),
|
||||||
Path: path, // 文件夹上一级的Path
|
|
||||||
}
|
}
|
||||||
files = append(files, &f)
|
files = append(files, &f)
|
||||||
}
|
}
|
||||||
@ -270,14 +243,13 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
|||||||
Size: content.ContentSize,
|
Size: content.ContentSize,
|
||||||
Modified: getTime(content.LastUpdateTime),
|
Modified: getTime(content.LastUpdateTime),
|
||||||
Ctime: getTime(content.CreateTime),
|
Ctime: getTime(content.CreateTime),
|
||||||
Path: path, // 文件所在目录的Path
|
|
||||||
},
|
},
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||||
//Thumbnail: content.BigthumbnailURL,
|
//Thumbnail: content.BigthumbnailURL,
|
||||||
}
|
}
|
||||||
files = append(files, &f)
|
files = append(files, &f)
|
||||||
}
|
}
|
||||||
if resp.Data.TotalCount == 0 {
|
if 100*pageNum > resp.Data.TotalCount {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
pageNum++
|
pageNum++
|
||||||
@ -285,67 +257,12 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
|||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) groupGetFiles(catalogID string) ([]model.Obj, error) {
|
|
||||||
pageNum := 1
|
|
||||||
files := make([]model.Obj, 0)
|
|
||||||
for {
|
|
||||||
data := d.newJson(base.Json{
|
|
||||||
"groupID": d.CloudID,
|
|
||||||
"catalogID": path.Base(catalogID),
|
|
||||||
"contentSortType": 0,
|
|
||||||
"sortDirection": 1,
|
|
||||||
"startNumber": pageNum,
|
|
||||||
"endNumber": pageNum + 99,
|
|
||||||
"path": path.Join(d.RootFolderID, catalogID),
|
|
||||||
})
|
|
||||||
|
|
||||||
var resp QueryGroupContentListResp
|
|
||||||
_, err := d.post("/orchestration/group-rebuild/content/v1.0/queryGroupContentList", data, &resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
path := resp.Data.GetGroupContentResult.ParentCatalogID
|
|
||||||
for _, catalog := range resp.Data.GetGroupContentResult.CatalogList {
|
|
||||||
f := model.Object{
|
|
||||||
ID: catalog.CatalogID,
|
|
||||||
Name: catalog.CatalogName,
|
|
||||||
Size: 0,
|
|
||||||
IsFolder: true,
|
|
||||||
Modified: getTime(catalog.UpdateTime),
|
|
||||||
Ctime: getTime(catalog.CreateTime),
|
|
||||||
Path: catalog.Path, // 文件夹的真实Path, root:/开头
|
|
||||||
}
|
|
||||||
files = append(files, &f)
|
|
||||||
}
|
|
||||||
for _, content := range resp.Data.GetGroupContentResult.ContentList {
|
|
||||||
f := model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
ID: content.ContentID,
|
|
||||||
Name: content.ContentName,
|
|
||||||
Size: content.ContentSize,
|
|
||||||
Modified: getTime(content.UpdateTime),
|
|
||||||
Ctime: getTime(content.CreateTime),
|
|
||||||
Path: path, // 文件所在目录的Path
|
|
||||||
},
|
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
|
||||||
//Thumbnail: content.BigthumbnailURL,
|
|
||||||
}
|
|
||||||
files = append(files, &f)
|
|
||||||
}
|
|
||||||
if (pageNum + 99) > resp.Data.GetGroupContentResult.NodeCount {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
pageNum = pageNum + 100
|
|
||||||
}
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Yun139) getLink(contentId string) (string, error) {
|
func (d *Yun139) getLink(contentId string) (string, error) {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"appName": "",
|
"appName": "",
|
||||||
"contentID": contentId,
|
"contentID": contentId,
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.getAccount(),
|
"account": d.Account,
|
||||||
"accountType": 1,
|
"accountType": 1,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -356,32 +273,6 @@ func (d *Yun139) getLink(contentId string) (string, error) {
|
|||||||
}
|
}
|
||||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
||||||
}
|
}
|
||||||
func (d *Yun139) familyGetLink(contentId string, path string) (string, error) {
|
|
||||||
data := d.newJson(base.Json{
|
|
||||||
"contentID": contentId,
|
|
||||||
"path": path,
|
|
||||||
})
|
|
||||||
res, err := d.post("/orchestration/familyCloud-rebuild/content/v1.0/getFileDownLoadURL",
|
|
||||||
data, nil)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Yun139) groupGetLink(contentId string, path string) (string, error) {
|
|
||||||
data := d.newJson(base.Json{
|
|
||||||
"contentID": contentId,
|
|
||||||
"groupID": d.CloudID,
|
|
||||||
"path": path,
|
|
||||||
})
|
|
||||||
res, err := d.post("/orchestration/group-rebuild/groupManage/v1.0/getGroupFileDownLoadURL",
|
|
||||||
data, nil)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func unicode(str string) string {
|
func unicode(str string) string {
|
||||||
textQuoted := strconv.QuoteToASCII(str)
|
textQuoted := strconv.QuoteToASCII(str)
|
||||||
@ -408,17 +299,17 @@ func (d *Yun139) personalRequest(pathname string, method string, callback base.R
|
|||||||
}
|
}
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"Authorization": "Basic " + d.getAuthorization(),
|
"Authorization": "Basic " + d.Authorization,
|
||||||
"Caller": "web",
|
"Caller": "web",
|
||||||
"Cms-Device": "default",
|
"Cms-Device": "default",
|
||||||
"Mcloud-Channel": "1000101",
|
"Mcloud-Channel": "1000101",
|
||||||
"Mcloud-Client": "10701",
|
"Mcloud-Client": "10701",
|
||||||
"Mcloud-Route": "001",
|
"Mcloud-Route": "001",
|
||||||
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||||
"Mcloud-Version": "7.14.0",
|
"Mcloud-Version": "7.13.0",
|
||||||
"Origin": "https://yun.139.com",
|
"Origin": "https://yun.139.com",
|
||||||
"Referer": "https://yun.139.com/w/",
|
"Referer": "https://yun.139.com/w/",
|
||||||
"x-DeviceInfo": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||||
"x-huawei-channelSrc": "10000034",
|
"x-huawei-channelSrc": "10000034",
|
||||||
"x-inner-ntwk": "2",
|
"x-inner-ntwk": "2",
|
||||||
"x-m4c-caller": "PC",
|
"x-m4c-caller": "PC",
|
||||||
@ -427,7 +318,7 @@ func (d *Yun139) personalRequest(pathname string, method string, callback base.R
|
|||||||
"X-Yun-Api-Version": "v1",
|
"X-Yun-Api-Version": "v1",
|
||||||
"X-Yun-App-Channel": "10000034",
|
"X-Yun-App-Channel": "10000034",
|
||||||
"X-Yun-Channel-Source": "10000034",
|
"X-Yun-Channel-Source": "10000034",
|
||||||
"X-Yun-Client-Info": "||9|7.14.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||||
"X-Yun-Module-Type": "100",
|
"X-Yun-Module-Type": "100",
|
||||||
"X-Yun-Svc-Type": "1",
|
"X-Yun-Svc-Type": "1",
|
||||||
})
|
})
|
||||||
@ -539,16 +430,3 @@ func (d *Yun139) personalGetLink(fileId string) (string, error) {
|
|||||||
return jsoniter.Get(res, "data", "url").ToString(), nil
|
return jsoniter.Get(res, "data", "url").ToString(), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) getAuthorization() string {
|
|
||||||
if d.ref != nil {
|
|
||||||
return d.ref.getAuthorization()
|
|
||||||
}
|
|
||||||
return d.Authorization
|
|
||||||
}
|
|
||||||
func (d *Yun139) getAccount() string {
|
|
||||||
if d.ref != nil {
|
|
||||||
return d.ref.getAccount()
|
|
||||||
}
|
|
||||||
return d.Account
|
|
||||||
}
|
|
||||||
|
@ -33,7 +33,6 @@ type Cloud189PC struct {
|
|||||||
cleanFamilyTransferFile func()
|
cleanFamilyTransferFile func()
|
||||||
|
|
||||||
storageConfig driver.Config
|
storageConfig driver.Config
|
||||||
ref *Cloud189PC
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Config() driver.Config {
|
func (y *Cloud189PC) Config() driver.Config {
|
||||||
@ -65,22 +64,20 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
y.uploadThread, y.UploadThread = 3, "3"
|
y.uploadThread, y.UploadThread = 3, "3"
|
||||||
}
|
}
|
||||||
|
|
||||||
if y.ref == nil {
|
// 初始化请求客户端
|
||||||
// 初始化请求客户端
|
if y.client == nil {
|
||||||
if y.client == nil {
|
y.client = base.NewRestyClient().SetHeaders(map[string]string{
|
||||||
y.client = base.NewRestyClient().SetHeaders(map[string]string{
|
"Accept": "application/json;charset=UTF-8",
|
||||||
"Accept": "application/json;charset=UTF-8",
|
"Referer": WEB_URL,
|
||||||
"Referer": WEB_URL,
|
})
|
||||||
})
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// 避免重复登陆
|
// 避免重复登陆
|
||||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||||
if !y.isLogin() || y.identity != identity {
|
if !y.isLogin() || y.identity != identity {
|
||||||
y.identity = identity
|
y.identity = identity
|
||||||
if err = y.login(); err != nil {
|
if err = y.login(); err != nil {
|
||||||
return
|
return
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -106,17 +103,7 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloud189PC) InitReference(storage driver.Driver) error {
|
|
||||||
refStorage, ok := storage.(*Cloud189PC)
|
|
||||||
if ok {
|
|
||||||
d.ref = refStorage
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (y *Cloud189PC) Drop(ctx context.Context) error {
|
func (y *Cloud189PC) Drop(ctx context.Context) error {
|
||||||
y.ref = nil
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,11 +57,11 @@ const (
|
|||||||
|
|
||||||
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
|
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
|
||||||
dateOfGmt := getHttpDateStr()
|
dateOfGmt := getHttpDateStr()
|
||||||
sessionKey := y.getTokenInfo().SessionKey
|
sessionKey := y.tokenInfo.SessionKey
|
||||||
sessionSecret := y.getTokenInfo().SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if isFamily {
|
if isFamily {
|
||||||
sessionKey = y.getTokenInfo().FamilySessionKey
|
sessionKey = y.tokenInfo.FamilySessionKey
|
||||||
sessionSecret = y.getTokenInfo().FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
|
|
||||||
header := map[string]string{
|
header := map[string]string{
|
||||||
@ -74,9 +74,9 @@ func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
||||||
sessionSecret := y.getTokenInfo().SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if isFamily {
|
if isFamily {
|
||||||
sessionSecret = y.getTokenInfo().FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
if params != nil {
|
if params != nil {
|
||||||
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
||||||
@ -85,7 +85,7 @@ func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
|
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
|
||||||
req := y.getClient().R().SetQueryParams(clientSuffix())
|
req := y.client.R().SetQueryParams(clientSuffix())
|
||||||
|
|
||||||
// 设置params
|
// 设置params
|
||||||
paramsData := y.EncryptParams(params, isBool(isFamily...))
|
paramsData := y.EncryptParams(params, isBool(isFamily...))
|
||||||
@ -403,9 +403,6 @@ func (y *Cloud189PC) initLoginParam() error {
|
|||||||
|
|
||||||
// 刷新会话
|
// 刷新会话
|
||||||
func (y *Cloud189PC) refreshSession() (err error) {
|
func (y *Cloud189PC) refreshSession() (err error) {
|
||||||
if y.ref != nil {
|
|
||||||
return y.ref.refreshSession()
|
|
||||||
}
|
|
||||||
var erron RespErr
|
var erron RespErr
|
||||||
var userSessionResp UserSessionResp
|
var userSessionResp UserSessionResp
|
||||||
_, err = y.client.R().
|
_, err = y.client.R().
|
||||||
@ -623,7 +620,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 尝试恢复进度
|
// 尝试恢复进度
|
||||||
uploadProgress, ok := base.GetUploadProgress[*UploadProgress](y, y.getTokenInfo().SessionKey, fileMd5Hex)
|
uploadProgress, ok := base.GetUploadProgress[*UploadProgress](y, y.tokenInfo.SessionKey, fileMd5Hex)
|
||||||
if !ok {
|
if !ok {
|
||||||
//step.2 预上传
|
//step.2 预上传
|
||||||
params := Params{
|
params := Params{
|
||||||
@ -690,7 +687,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
if err = threadG.Wait(); err != nil {
|
if err = threadG.Wait(); err != nil {
|
||||||
if errors.Is(err, context.Canceled) {
|
if errors.Is(err, context.Canceled) {
|
||||||
uploadProgress.UploadParts = utils.SliceFilter(uploadProgress.UploadParts, func(s string) bool { return s != "" })
|
uploadProgress.UploadParts = utils.SliceFilter(uploadProgress.UploadParts, func(s string) bool { return s != "" })
|
||||||
base.SaveUploadProgress(y, uploadProgress, y.getTokenInfo().SessionKey, fileMd5Hex)
|
base.SaveUploadProgress(y, uploadProgress, y.tokenInfo.SessionKey, fileMd5Hex)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -1011,7 +1008,7 @@ func (y *Cloud189PC) getFamilyID() (string, error) {
|
|||||||
return "", fmt.Errorf("cannot get automatically,please input family_id")
|
return "", fmt.Errorf("cannot get automatically,please input family_id")
|
||||||
}
|
}
|
||||||
for _, info := range infos {
|
for _, info := range infos {
|
||||||
if strings.Contains(y.getTokenInfo().LoginName, info.RemarkName) {
|
if strings.Contains(y.tokenInfo.LoginName, info.RemarkName) {
|
||||||
return fmt.Sprint(info.FamilyID), nil
|
return fmt.Sprint(info.FamilyID), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1145,17 +1142,3 @@ func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration)
|
|||||||
time.Sleep(t)
|
time.Sleep(t)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) getTokenInfo() *AppSessionResp {
|
|
||||||
if y.ref != nil {
|
|
||||||
return y.ref.getTokenInfo()
|
|
||||||
}
|
|
||||||
return y.tokenInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
func (y *Cloud189PC) getClient() *resty.Client {
|
|
||||||
if y.ref != nil {
|
|
||||||
return y.ref.getClient()
|
|
||||||
}
|
|
||||||
return y.client
|
|
||||||
}
|
|
||||||
|
@ -110,16 +110,6 @@ func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
for _, dst := range dsts {
|
for _, dst := range dsts {
|
||||||
link, err := d.link(ctx, dst, sub, args)
|
link, err := d.link(ctx, dst, sub, args)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if !args.Redirect && len(link.URL) > 0 {
|
|
||||||
// 正常情况下 多并发 仅支持返回URL的驱动
|
|
||||||
// alias套娃alias 可以让crypt、mega等驱动(不返回URL的) 支持并发
|
|
||||||
if d.DownloadConcurrency > 0 {
|
|
||||||
link.Concurrency = d.DownloadConcurrency
|
|
||||||
}
|
|
||||||
if d.DownloadPartSize > 0 {
|
|
||||||
link.PartSize = d.DownloadPartSize * utils.KB
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return link, nil
|
return link, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -9,10 +9,8 @@ type Addition struct {
|
|||||||
// Usually one of two
|
// Usually one of two
|
||||||
// driver.RootPath
|
// driver.RootPath
|
||||||
// define other
|
// define other
|
||||||
Paths string `json:"paths" required:"true" type:"text"`
|
Paths string `json:"paths" required:"true" type:"text"`
|
||||||
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
|
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
|
||||||
DownloadConcurrency int `json:"download_concurrency" default:"0" required:"false" type:"number" help:"Need to enable proxy"`
|
|
||||||
DownloadPartSize int `json:"download_part_size" default:"0" type:"number" required:"false" help:"Need to enable proxy. Unit: KB"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -9,7 +9,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/internal/sign"
|
"github.com/alist-org/alist/v3/internal/sign"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
@ -95,15 +94,10 @@ func (d *Alias) list(ctx context.Context, dst, sub string, args *fs.ListArgs) ([
|
|||||||
|
|
||||||
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
|
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
|
||||||
reqPath := stdpath.Join(dst, sub)
|
reqPath := stdpath.Join(dst, sub)
|
||||||
// 参考 crypt 驱动
|
storage, err := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
|
||||||
storage, reqActualPath, err := op.GetStorageAndActualPath(reqPath)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if _, ok := storage.(*Alias); !ok && !args.Redirect {
|
|
||||||
link, _, err := op.Link(ctx, storage, reqActualPath, args)
|
|
||||||
return link, err
|
|
||||||
}
|
|
||||||
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -120,7 +114,7 @@ func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
return link, nil
|
return link, nil
|
||||||
}
|
}
|
||||||
link, _, err := op.Link(ctx, storage, reqActualPath, args)
|
link, _, err := fs.Link(ctx, reqPath, args)
|
||||||
return link, err
|
return link, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -189,15 +189,6 @@ func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
req.Header.Set("Authorization", d.Token)
|
req.Header.Set("Authorization", d.Token)
|
||||||
req.Header.Set("File-Path", path.Join(dstDir.GetPath(), stream.GetName()))
|
req.Header.Set("File-Path", path.Join(dstDir.GetPath(), stream.GetName()))
|
||||||
req.Header.Set("Password", d.MetaPassword)
|
req.Header.Set("Password", d.MetaPassword)
|
||||||
if md5 := stream.GetHash().GetHash(utils.MD5); len(md5) > 0 {
|
|
||||||
req.Header.Set("X-File-Md5", md5)
|
|
||||||
}
|
|
||||||
if sha1 := stream.GetHash().GetHash(utils.SHA1); len(sha1) > 0 {
|
|
||||||
req.Header.Set("X-File-Sha1", sha1)
|
|
||||||
}
|
|
||||||
if sha256 := stream.GetHash().GetHash(utils.SHA256); len(sha256) > 0 {
|
|
||||||
req.Header.Set("X-File-Sha256", sha256)
|
|
||||||
}
|
|
||||||
|
|
||||||
req.ContentLength = stream.GetSize()
|
req.ContentLength = stream.GetSize()
|
||||||
// client := base.NewHttpClient()
|
// client := base.NewHttpClient()
|
||||||
|
@ -19,12 +19,12 @@ import (
|
|||||||
type AliyundriveOpen struct {
|
type AliyundriveOpen struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
base string
|
||||||
|
|
||||||
DriveId string
|
DriveId string
|
||||||
|
|
||||||
limitList func(ctx context.Context, data base.Json) (*Files, error)
|
limitList func(ctx context.Context, data base.Json) (*Files, error)
|
||||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||||
ref *AliyundriveOpen
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) Config() driver.Config {
|
func (d *AliyundriveOpen) Config() driver.Config {
|
||||||
@ -58,17 +58,7 @@ func (d *AliyundriveOpen) Init(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) InitReference(storage driver.Driver) error {
|
|
||||||
refStorage, ok := storage.(*AliyundriveOpen)
|
|
||||||
if ok {
|
|
||||||
d.ref = refStorage
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return errs.NotSupport
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
||||||
d.ref = nil
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"resource"`
|
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||||
@ -32,10 +32,11 @@ var config = driver.Config{
|
|||||||
DefaultRoot: "root",
|
DefaultRoot: "root",
|
||||||
NoOverwriteUpload: true,
|
NoOverwriteUpload: true,
|
||||||
}
|
}
|
||||||
var API_URL = "https://openapi.alipan.com"
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &AliyundriveOpen{}
|
return &AliyundriveOpen{
|
||||||
|
base: "https://openapi.alipan.com",
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -126,7 +126,7 @@ func getProofRange(input string, size int64) (*ProofRange, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error) {
|
func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error) {
|
||||||
proofRange, err := getProofRange(d.getAccessToken(), stream.GetSize())
|
proofRange, err := getProofRange(d.AccessToken, stream.GetSize())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ import (
|
|||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
func (d *AliyundriveOpen) _refreshToken() (string, string, error) {
|
func (d *AliyundriveOpen) _refreshToken() (string, string, error) {
|
||||||
url := API_URL + "/oauth/access_token"
|
url := d.base + "/oauth/access_token"
|
||||||
if d.OauthTokenURL != "" && d.ClientID == "" {
|
if d.OauthTokenURL != "" && d.ClientID == "" {
|
||||||
url = d.OauthTokenURL
|
url = d.OauthTokenURL
|
||||||
}
|
}
|
||||||
@ -74,9 +74,6 @@ func getSub(token string) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) refreshToken() error {
|
func (d *AliyundriveOpen) refreshToken() error {
|
||||||
if d.ref != nil {
|
|
||||||
return d.ref.refreshToken()
|
|
||||||
}
|
|
||||||
refresh, access, err := d._refreshToken()
|
refresh, access, err := d._refreshToken()
|
||||||
for i := 0; i < 3; i++ {
|
for i := 0; i < 3; i++ {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
@ -103,7 +100,7 @@ func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback,
|
|||||||
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
// TODO check whether access_token is expired
|
// TODO check whether access_token is expired
|
||||||
req.SetHeader("Authorization", "Bearer "+d.getAccessToken())
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
if method == http.MethodPost {
|
if method == http.MethodPost {
|
||||||
req.SetHeader("Content-Type", "application/json")
|
req.SetHeader("Content-Type", "application/json")
|
||||||
}
|
}
|
||||||
@ -112,7 +109,7 @@ func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base
|
|||||||
}
|
}
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
req.SetError(&e)
|
req.SetError(&e)
|
||||||
res, err := req.Execute(method, API_URL+uri)
|
res, err := req.Execute(method, d.base+uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if res != nil {
|
if res != nil {
|
||||||
log.Errorf("[aliyundrive_open] request error: %s", res.String())
|
log.Errorf("[aliyundrive_open] request error: %s", res.String())
|
||||||
@ -121,7 +118,7 @@ func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base
|
|||||||
}
|
}
|
||||||
isRetry := len(retry) > 0 && retry[0]
|
isRetry := len(retry) > 0 && retry[0]
|
||||||
if e.Code != "" {
|
if e.Code != "" {
|
||||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.getAccessToken() == "") {
|
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||||
err = d.refreshToken()
|
err = d.refreshToken()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err, nil
|
return nil, err, nil
|
||||||
@ -179,10 +176,3 @@ func getNowTime() (time.Time, string) {
|
|||||||
nowTimeStr := nowTime.Format("2006-01-02T15:04:05.000Z")
|
nowTimeStr := nowTime.Format("2006-01-02T15:04:05.000Z")
|
||||||
return nowTime, nowTimeStr
|
return nowTime, nowTimeStr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) getAccessToken() string {
|
|
||||||
if d.ref != nil {
|
|
||||||
return d.ref.getAccessToken()
|
|
||||||
}
|
|
||||||
return d.AccessToken
|
|
||||||
}
|
|
||||||
|
@ -22,10 +22,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/febbox"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/github"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/github_releases"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
|
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
|
||||||
@ -37,7 +34,6 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/misskey"
|
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/netease_music"
|
_ "github.com/alist-org/alist/v3/drivers/netease_music"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||||
|
@ -6,7 +6,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type TokenErrResp struct {
|
type TokenErrResp struct {
|
||||||
@ -56,11 +55,11 @@ func fileToObj(f File) *model.ObjThumb {
|
|||||||
if f.ServerFilename == "" {
|
if f.ServerFilename == "" {
|
||||||
f.ServerFilename = path.Base(f.Path)
|
f.ServerFilename = path.Base(f.Path)
|
||||||
}
|
}
|
||||||
if f.ServerCtime == 0 {
|
if f.LocalCtime == 0 {
|
||||||
f.ServerCtime = f.Ctime
|
f.LocalCtime = f.Ctime
|
||||||
}
|
}
|
||||||
if f.ServerMtime == 0 {
|
if f.LocalMtime == 0 {
|
||||||
f.ServerMtime = f.Mtime
|
f.LocalMtime = f.Mtime
|
||||||
}
|
}
|
||||||
return &model.ObjThumb{
|
return &model.ObjThumb{
|
||||||
Object: model.Object{
|
Object: model.Object{
|
||||||
@ -68,12 +67,12 @@ func fileToObj(f File) *model.ObjThumb {
|
|||||||
Path: f.Path,
|
Path: f.Path,
|
||||||
Name: f.ServerFilename,
|
Name: f.ServerFilename,
|
||||||
Size: f.Size,
|
Size: f.Size,
|
||||||
Modified: time.Unix(f.ServerMtime, 0),
|
Modified: time.Unix(f.LocalMtime, 0),
|
||||||
Ctime: time.Unix(f.ServerCtime, 0),
|
Ctime: time.Unix(f.LocalCtime, 0),
|
||||||
IsFolder: f.Isdir == 1,
|
IsFolder: f.Isdir == 1,
|
||||||
|
|
||||||
// 直接获取的MD5是错误的
|
// 直接获取的MD5是错误的
|
||||||
HashInfo: utils.NewHashInfo(utils.MD5, DecryptMd5(f.Md5)),
|
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||||
},
|
},
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,11 @@
|
|||||||
package baidu_netdisk
|
package baidu_netdisk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/hex"
|
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -156,6 +153,8 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
|
|||||||
u = res.Header().Get("location")
|
u = res.Header().Get("location")
|
||||||
//}
|
//}
|
||||||
|
|
||||||
|
updateObjMd5(file, "pan.baidu.com", u)
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: u,
|
URL: u,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -179,6 +178,8 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: resp.Info[0].Dlink,
|
URL: resp.Info[0].Dlink,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -228,6 +229,19 @@ func joinTime(form map[string]string, ctime, mtime int64) {
|
|||||||
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func updateObjMd5(obj model.Obj, userAgent, u string) {
|
||||||
|
object := model.GetRawObject(obj)
|
||||||
|
if object != nil {
|
||||||
|
req, _ := http.NewRequest(http.MethodHead, u, nil)
|
||||||
|
req.Header.Add("User-Agent", userAgent)
|
||||||
|
resp, _ := base.HttpClient.Do(req)
|
||||||
|
if resp != nil {
|
||||||
|
contentMd5 := resp.Header.Get("Content-Md5")
|
||||||
|
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DefaultSliceSize int64 = 4 * utils.MB
|
DefaultSliceSize int64 = 4 * utils.MB
|
||||||
VipSliceSize = 16 * utils.MB
|
VipSliceSize = 16 * utils.MB
|
||||||
@ -253,40 +267,3 @@ func (d *BaiduNetdisk) getSliceSize() int64 {
|
|||||||
// r = strings.ReplaceAll(r, "+", "%20")
|
// r = strings.ReplaceAll(r, "+", "%20")
|
||||||
// return r
|
// return r
|
||||||
// }
|
// }
|
||||||
|
|
||||||
func DecryptMd5(encryptMd5 string) string {
|
|
||||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
|
||||||
return encryptMd5
|
|
||||||
}
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(encryptMd5))
|
|
||||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
|
||||||
if i == 9 {
|
|
||||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
|
||||||
} else {
|
|
||||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
|
||||||
}
|
|
||||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
|
||||||
}
|
|
||||||
|
|
||||||
encryptMd5 = out.String()
|
|
||||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
|
||||||
}
|
|
||||||
|
|
||||||
func EncryptMd5(originalMd5 string) string {
|
|
||||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(reversed))
|
|
||||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
|
||||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
|
||||||
n ^= int64(15 & i)
|
|
||||||
if i == 9 {
|
|
||||||
out.WriteRune(rune(n) + 'g')
|
|
||||||
} else {
|
|
||||||
out.WriteString(strconv.FormatInt(n, 16))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out.String()
|
|
||||||
}
|
|
||||||
|
@ -27,10 +27,9 @@ type BaiduPhoto struct {
|
|||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
|
||||||
// AccessToken string
|
AccessToken string
|
||||||
Uk int64
|
Uk int64
|
||||||
bdstoken string
|
root model.Obj
|
||||||
root model.Obj
|
|
||||||
|
|
||||||
uploadThread int
|
uploadThread int
|
||||||
}
|
}
|
||||||
@ -49,9 +48,9 @@ func (d *BaiduPhoto) Init(ctx context.Context) error {
|
|||||||
d.uploadThread, d.UploadThread = 3, "3"
|
d.uploadThread, d.UploadThread = 3, "3"
|
||||||
}
|
}
|
||||||
|
|
||||||
// if err := d.refreshToken(); err != nil {
|
if err := d.refreshToken(); err != nil {
|
||||||
// return err
|
return err
|
||||||
// }
|
}
|
||||||
|
|
||||||
// root
|
// root
|
||||||
if d.AlbumID != "" {
|
if d.AlbumID != "" {
|
||||||
@ -74,10 +73,6 @@ func (d *BaiduPhoto) Init(ctx context.Context) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.bdstoken, err = d.getBDStoken()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.Uk, err = strconv.ParseInt(info.YouaID, 10, 64)
|
d.Uk, err = strconv.ParseInt(info.YouaID, 10, 64)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -87,7 +82,7 @@ func (d *BaiduPhoto) GetRoot(ctx context.Context) (model.Obj, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduPhoto) Drop(ctx context.Context) error {
|
func (d *BaiduPhoto) Drop(ctx context.Context) error {
|
||||||
// d.AccessToken = ""
|
d.AccessToken = ""
|
||||||
d.Uk = 0
|
d.Uk = 0
|
||||||
d.root = nil
|
d.root = nil
|
||||||
return nil
|
return nil
|
||||||
@ -145,13 +140,14 @@ func (d *BaiduPhoto) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
// 处理共享相册
|
// 处理共享相册
|
||||||
if d.Uk != file.Uk {
|
if d.Uk != file.Uk {
|
||||||
// 有概率无法获取到链接
|
// 有概率无法获取到链接
|
||||||
// return d.linkAlbum(ctx, file, args)
|
return d.linkAlbum(ctx, file, args)
|
||||||
|
|
||||||
f, err := d.CopyAlbumFile(ctx, file)
|
// 接口被限制,只能使用cookie
|
||||||
if err != nil {
|
// f, err := d.CopyAlbumFile(ctx, file)
|
||||||
return nil, err
|
// if err != nil {
|
||||||
}
|
// return nil, err
|
||||||
return d.linkFile(ctx, f, args)
|
// }
|
||||||
|
// return d.linkFile(ctx, f, args)
|
||||||
}
|
}
|
||||||
return d.linkFile(ctx, &file.File, args)
|
return d.linkFile(ctx, &file.File, args)
|
||||||
}
|
}
|
||||||
@ -296,12 +292,11 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 尝试获取之前的进度
|
// 尝试获取之前的进度
|
||||||
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, strconv.FormatInt(d.Uk, 10), contentMd5)
|
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, d.AccessToken, contentMd5)
|
||||||
if !ok {
|
if !ok {
|
||||||
_, err = d.Post(FILE_API_URL_V1+"/precreate", func(r *resty.Request) {
|
_, err = d.Post(FILE_API_URL_V1+"/precreate", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetFormData(params)
|
r.SetFormData(params)
|
||||||
r.SetQueryParam("bdstoken", d.bdstoken)
|
|
||||||
}, &precreateResp)
|
}, &precreateResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -330,8 +325,8 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
"path": params["path"],
|
"path": params["path"],
|
||||||
"partseq": fmt.Sprint(partseq),
|
"partseq": fmt.Sprint(partseq),
|
||||||
"uploadid": precreateResp.UploadID,
|
"uploadid": precreateResp.UploadID,
|
||||||
"app_id": "16051585",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
|
_, err = d.Post("https://c3.pcs.baidu.com/rest/2.0/pcs/superfile2", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetQueryParams(uploadParams)
|
r.SetQueryParams(uploadParams)
|
||||||
@ -348,7 +343,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
if err = threadG.Wait(); err != nil {
|
if err = threadG.Wait(); err != nil {
|
||||||
if errors.Is(err, context.Canceled) {
|
if errors.Is(err, context.Canceled) {
|
||||||
precreateResp.BlockList = utils.SliceFilter(precreateResp.BlockList, func(s int) bool { return s >= 0 })
|
precreateResp.BlockList = utils.SliceFilter(precreateResp.BlockList, func(s int) bool { return s >= 0 })
|
||||||
base.SaveUploadProgress(d, strconv.FormatInt(d.Uk, 10), contentMd5)
|
base.SaveUploadProgress(d, precreateResp, d.AccessToken, contentMd5)
|
||||||
}
|
}
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -358,7 +353,6 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
_, err = d.Post(FILE_API_URL_V1+"/create", func(r *resty.Request) {
|
_, err = d.Post(FILE_API_URL_V1+"/create", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetFormData(params)
|
r.SetFormData(params)
|
||||||
r.SetQueryParam("bdstoken", d.bdstoken)
|
|
||||||
}, &precreateResp)
|
}, &precreateResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -6,14 +6,13 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
// RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
Cookie string `json:"cookie" required:"true"`
|
ShowType string `json:"show_type" type:"select" options:"root,root_only_album,root_only_file" default:"root"`
|
||||||
ShowType string `json:"show_type" type:"select" options:"root,root_only_album,root_only_file" default:"root"`
|
AlbumID string `json:"album_id"`
|
||||||
AlbumID string `json:"album_id"`
|
|
||||||
//AlbumPassword string `json:"album_password"`
|
//AlbumPassword string `json:"album_password"`
|
||||||
DeleteOrigin bool `json:"delete_origin"`
|
DeleteOrigin bool `json:"delete_origin"`
|
||||||
// ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||||
// ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,7 +72,7 @@ func (c *File) Thumb() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *File) GetHash() utils.HashInfo {
|
func (c *File) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.MD5, DecryptMd5(c.Md5))
|
return utils.NewHashInfo(utils.MD5, c.Md5)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*相册部分*/
|
/*相册部分*/
|
||||||
|
@ -2,15 +2,13 @@ package baiduphoto
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
)
|
)
|
||||||
@ -25,8 +23,7 @@ const (
|
|||||||
|
|
||||||
func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
req := client.R().
|
req := client.R().
|
||||||
// SetQueryParam("access_token", d.AccessToken)
|
SetQueryParam("access_token", d.AccessToken)
|
||||||
SetHeader("Cookie", d.Cookie)
|
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
@ -48,10 +45,10 @@ func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, c
|
|||||||
return nil, fmt.Errorf("no shared albums found")
|
return nil, fmt.Errorf("no shared albums found")
|
||||||
case 50100:
|
case 50100:
|
||||||
return nil, fmt.Errorf("illegal title, only supports 50 characters")
|
return nil, fmt.Errorf("illegal title, only supports 50 characters")
|
||||||
// case -6:
|
case -6:
|
||||||
// if err = d.refreshToken(); err != nil {
|
if err = d.refreshToken(); err != nil {
|
||||||
// return nil, err
|
return nil, err
|
||||||
// }
|
}
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("errno: %d, refer to https://photo.baidu.com/union/doc", erron)
|
return nil, fmt.Errorf("errno: %d, refer to https://photo.baidu.com/union/doc", erron)
|
||||||
}
|
}
|
||||||
@ -66,29 +63,29 @@ func (d *BaiduPhoto) Request(client *resty.Client, furl string, method string, c
|
|||||||
// return res.Body(), nil
|
// return res.Body(), nil
|
||||||
//}
|
//}
|
||||||
|
|
||||||
// func (d *BaiduPhoto) refreshToken() error {
|
func (d *BaiduPhoto) refreshToken() error {
|
||||||
// u := "https://openapi.baidu.com/oauth/2.0/token"
|
u := "https://openapi.baidu.com/oauth/2.0/token"
|
||||||
// var resp base.TokenResp
|
var resp base.TokenResp
|
||||||
// var e TokenErrResp
|
var e TokenErrResp
|
||||||
// _, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
|
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetQueryParams(map[string]string{
|
||||||
// "grant_type": "refresh_token",
|
"grant_type": "refresh_token",
|
||||||
// "refresh_token": d.RefreshToken,
|
"refresh_token": d.RefreshToken,
|
||||||
// "client_id": d.ClientID,
|
"client_id": d.ClientID,
|
||||||
// "client_secret": d.ClientSecret,
|
"client_secret": d.ClientSecret,
|
||||||
// }).Get(u)
|
}).Get(u)
|
||||||
// if err != nil {
|
if err != nil {
|
||||||
// return err
|
return err
|
||||||
// }
|
}
|
||||||
// if e.ErrorMsg != "" {
|
if e.ErrorMsg != "" {
|
||||||
// return &e
|
return &e
|
||||||
// }
|
}
|
||||||
// if resp.RefreshToken == "" {
|
if resp.RefreshToken == "" {
|
||||||
// return errs.EmptyToken
|
return errs.EmptyToken
|
||||||
// }
|
}
|
||||||
// d.AccessToken, d.RefreshToken = resp.AccessToken, resp.RefreshToken
|
d.AccessToken, d.RefreshToken = resp.AccessToken, resp.RefreshToken
|
||||||
// op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
// return nil
|
return nil
|
||||||
// }
|
}
|
||||||
|
|
||||||
func (d *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
func (d *BaiduPhoto) Get(furl string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
||||||
return d.Request(base.RestyClient, furl, http.MethodGet, callback, resp)
|
return d.Request(base.RestyClient, furl, http.MethodGet, callback, resp)
|
||||||
@ -362,6 +359,10 @@ func (d *BaiduPhoto) linkAlbum(ctx context.Context, file *AlbumFile, args model.
|
|||||||
|
|
||||||
location := resp.Header().Get("Location")
|
location := resp.Header().Get("Location")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
link := &model.Link{
|
link := &model.Link{
|
||||||
URL: location,
|
URL: location,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -383,36 +384,36 @@ func (d *BaiduPhoto) linkFile(ctx context.Context, file *File, args model.LinkAr
|
|||||||
headers["X-Forwarded-For"] = args.IP
|
headers["X-Forwarded-For"] = args.IP
|
||||||
}
|
}
|
||||||
|
|
||||||
var downloadUrl struct {
|
// var downloadUrl struct {
|
||||||
Dlink string `json:"dlink"`
|
// Dlink string `json:"dlink"`
|
||||||
}
|
// }
|
||||||
_, err := d.Get(FILE_API_URL_V2+"/download", func(r *resty.Request) {
|
// _, err := d.Get(FILE_API_URL_V1+"/download", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
|
||||||
r.SetHeaders(headers)
|
|
||||||
r.SetQueryParams(map[string]string{
|
|
||||||
"fsid": fmt.Sprint(file.Fsid),
|
|
||||||
})
|
|
||||||
}, &downloadUrl)
|
|
||||||
|
|
||||||
// resp, err := d.Request(base.NoRedirectClient, FILE_API_URL_V1+"/download", http.MethodHead, func(r *resty.Request) {
|
|
||||||
// r.SetContext(ctx)
|
// r.SetContext(ctx)
|
||||||
// r.SetHeaders(headers)
|
// r.SetHeaders(headers)
|
||||||
// r.SetQueryParams(map[string]string{
|
// r.SetQueryParams(map[string]string{
|
||||||
// "fsid": fmt.Sprint(file.Fsid),
|
// "fsid": fmt.Sprint(file.Fsid),
|
||||||
// })
|
// })
|
||||||
// }, nil)
|
// }, &downloadUrl)
|
||||||
|
|
||||||
|
resp, err := d.Request(base.NoRedirectClient, FILE_API_URL_V1+"/download", http.MethodHead, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetHeaders(headers)
|
||||||
|
r.SetQueryParams(map[string]string{
|
||||||
|
"fsid": fmt.Sprint(file.Fsid),
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// if resp.StatusCode() != 302 {
|
if resp.StatusCode() != 302 {
|
||||||
// return nil, fmt.Errorf("not found 302 redirect")
|
return nil, fmt.Errorf("not found 302 redirect")
|
||||||
// }
|
}
|
||||||
|
|
||||||
// location := resp.Header().Get("Location")
|
location := resp.Header().Get("Location")
|
||||||
link := &model.Link{
|
link := &model.Link{
|
||||||
URL: downloadUrl.Dlink,
|
URL: location,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"User-Agent": []string{headers["User-Agent"]},
|
"User-Agent": []string{headers["User-Agent"]},
|
||||||
"Referer": []string{"https://photo.baidu.com/"},
|
"Referer": []string{"https://photo.baidu.com/"},
|
||||||
@ -475,55 +476,3 @@ func (d *BaiduPhoto) uInfo() (*UInfo, error) {
|
|||||||
}
|
}
|
||||||
return &info, nil
|
return &info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduPhoto) getBDStoken() (string, error) {
|
|
||||||
var info struct {
|
|
||||||
Result struct {
|
|
||||||
Bdstoken string `json:"bdstoken"`
|
|
||||||
Token string `json:"token"`
|
|
||||||
Uk int64 `json:"uk"`
|
|
||||||
} `json:"result"`
|
|
||||||
}
|
|
||||||
_, err := d.Get("https://pan.baidu.com/api/gettemplatevariable?fields=[%22bdstoken%22,%22token%22,%22uk%22]", nil, &info)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return info.Result.Bdstoken, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func DecryptMd5(encryptMd5 string) string {
|
|
||||||
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
|
||||||
return encryptMd5
|
|
||||||
}
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(encryptMd5))
|
|
||||||
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
|
||||||
if i == 9 {
|
|
||||||
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
|
||||||
} else {
|
|
||||||
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
|
||||||
}
|
|
||||||
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
|
||||||
}
|
|
||||||
|
|
||||||
encryptMd5 = out.String()
|
|
||||||
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
|
||||||
}
|
|
||||||
|
|
||||||
func EncryptMd5(originalMd5 string) string {
|
|
||||||
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
|
||||||
|
|
||||||
var out strings.Builder
|
|
||||||
out.Grow(len(reversed))
|
|
||||||
for i, n := 0, int64(0); i < len(reversed); i++ {
|
|
||||||
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
|
||||||
n ^= int64(15 & i)
|
|
||||||
if i == 9 {
|
|
||||||
out.WriteRune(rune(n) + 'g')
|
|
||||||
} else {
|
|
||||||
out.WriteString(strconv.FormatInt(n, 16))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out.String()
|
|
||||||
}
|
|
||||||
|
@ -67,9 +67,7 @@ func (d *ChaoXing) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ChaoXing) Drop(ctx context.Context) error {
|
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||||
if d.cron != nil {
|
d.cron.Stop()
|
||||||
d.cron.Stop()
|
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,13 +4,11 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
@ -92,7 +90,7 @@ func (d *Cloudreve) MakeDir(ctx context.Context, parentDir model.Obj, dirName st
|
|||||||
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"action": "move",
|
"action": "move",
|
||||||
"src_dir": path.Dir(srcObj.GetPath()),
|
"src_dir": srcObj.GetPath(),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
@ -114,7 +112,7 @@ func (d *Cloudreve) Rename(ctx context.Context, srcObj model.Obj, newName string
|
|||||||
|
|
||||||
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"src_dir": path.Dir(srcObj.GetPath()),
|
"src_dir": srcObj.GetPath(),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
@ -135,8 +133,6 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
if io.ReadCloser(stream) == http.NoBody {
|
if io.ReadCloser(stream) == http.NoBody {
|
||||||
return d.create(ctx, dstDir, stream)
|
return d.create(ctx, dstDir, stream)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取存储策略
|
|
||||||
var r DirectoryResp
|
var r DirectoryResp
|
||||||
err := d.request(http.MethodGet, "/directory"+dstDir.GetPath(), nil, &r)
|
err := d.request(http.MethodGet, "/directory"+dstDir.GetPath(), nil, &r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -149,8 +145,6 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
"policy_id": r.Policy.Id,
|
"policy_id": r.Policy.Id,
|
||||||
"last_modified": stream.ModTime().Unix(),
|
"last_modified": stream.ModTime().Unix(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取上传会话信息
|
|
||||||
var u UploadInfo
|
var u UploadInfo
|
||||||
err = d.request(http.MethodPut, "/file/upload", func(req *resty.Request) {
|
err = d.request(http.MethodPut, "/file/upload", func(req *resty.Request) {
|
||||||
req.SetBody(uploadBody)
|
req.SetBody(uploadBody)
|
||||||
@ -158,50 +152,36 @@ func (d *Cloudreve) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
var chunkSize = u.ChunkSize
|
||||||
// 根据存储方式选择分片上传的方法
|
var buf []byte
|
||||||
switch r.Policy.Type {
|
var chunk int
|
||||||
case "onedrive":
|
for {
|
||||||
err = d.upOneDrive(ctx, stream, u, up)
|
var n int
|
||||||
case "remote": // 从机存储
|
buf = make([]byte, chunkSize)
|
||||||
err = d.upRemote(ctx, stream, u, up)
|
n, err = io.ReadAtLeast(stream, buf, chunkSize)
|
||||||
case "local": // 本机存储
|
if err != nil && err != io.ErrUnexpectedEOF {
|
||||||
var chunkSize = u.ChunkSize
|
if err == io.EOF {
|
||||||
var buf []byte
|
return nil
|
||||||
var chunk int
|
|
||||||
for {
|
|
||||||
var n int
|
|
||||||
buf = make([]byte, chunkSize)
|
|
||||||
n, err = io.ReadAtLeast(stream, buf, chunkSize)
|
|
||||||
if err != nil && err != io.ErrUnexpectedEOF {
|
|
||||||
if err == io.EOF {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
if n == 0 {
|
return err
|
||||||
break
|
|
||||||
}
|
|
||||||
buf = buf[:n]
|
|
||||||
err = d.request(http.MethodPost, "/file/upload/"+u.SessionID+"/"+strconv.Itoa(chunk), func(req *resty.Request) {
|
|
||||||
req.SetHeader("Content-Type", "application/octet-stream")
|
|
||||||
req.SetHeader("Content-Length", strconv.Itoa(n))
|
|
||||||
req.SetBody(buf)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
chunk++
|
|
||||||
}
|
}
|
||||||
default:
|
|
||||||
err = errs.NotImplement
|
if n == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
buf = buf[:n]
|
||||||
|
err = d.request(http.MethodPost, "/file/upload/"+u.SessionID+"/"+strconv.Itoa(chunk), func(req *resty.Request) {
|
||||||
|
req.SetHeader("Content-Type", "application/octet-stream")
|
||||||
|
req.SetHeader("Content-Length", strconv.Itoa(n))
|
||||||
|
req.SetBody(buf)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
chunk++
|
||||||
|
|
||||||
}
|
}
|
||||||
if err != nil {
|
return err
|
||||||
// 删除失败的会话
|
|
||||||
err = d.request(http.MethodDelete, "/file/upload/"+u.SessionID, nil, nil)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloudreve) create(ctx context.Context, dir model.Obj, file model.Obj) error {
|
func (d *Cloudreve) create(ctx context.Context, dir model.Obj, file model.Obj) error {
|
||||||
|
@ -21,11 +21,9 @@ type Policy struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type UploadInfo struct {
|
type UploadInfo struct {
|
||||||
SessionID string `json:"sessionID"`
|
SessionID string `json:"sessionID"`
|
||||||
ChunkSize int `json:"chunkSize"`
|
ChunkSize int `json:"chunkSize"`
|
||||||
Expires int `json:"expires"`
|
Expires int `json:"expires"`
|
||||||
UploadURLs []string `json:"uploadURLs"`
|
|
||||||
Credential string `json:"credential,omitempty"`
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type DirectoryResp struct {
|
type DirectoryResp struct {
|
||||||
|
@ -1,23 +1,16 @@
|
|||||||
package cloudreve
|
package cloudreve
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/setting"
|
"github.com/alist-org/alist/v3/internal/setting"
|
||||||
"github.com/alist-org/alist/v3/pkg/cookie"
|
"github.com/alist-org/alist/v3/pkg/cookie"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
json "github.com/json-iterator/go"
|
json "github.com/json-iterator/go"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
@ -179,95 +172,3 @@ func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
|
|||||||
Thumbnail: resp.Header().Get("Location"),
|
Thumbnail: resp.Header().Get("Location"),
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Cloudreve) upRemote(ctx context.Context, stream model.FileStreamer, u UploadInfo, up driver.UpdateProgress) error {
|
|
||||||
uploadUrl := u.UploadURLs[0]
|
|
||||||
credential := u.Credential
|
|
||||||
var finish int64 = 0
|
|
||||||
var chunk int = 0
|
|
||||||
DEFAULT := int64(u.ChunkSize)
|
|
||||||
for finish < stream.GetSize() {
|
|
||||||
if utils.IsCanceled(ctx) {
|
|
||||||
return ctx.Err()
|
|
||||||
}
|
|
||||||
utils.Log.Debugf("[Cloudreve-Remote] upload: %d", finish)
|
|
||||||
var byteSize = DEFAULT
|
|
||||||
left := stream.GetSize() - finish
|
|
||||||
if left < DEFAULT {
|
|
||||||
byteSize = left
|
|
||||||
}
|
|
||||||
byteData := make([]byte, byteSize)
|
|
||||||
n, err := io.ReadFull(stream, byteData)
|
|
||||||
utils.Log.Debug(err, n)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("POST", uploadUrl+"?chunk="+strconv.Itoa(chunk), bytes.NewBuffer(byteData))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req = req.WithContext(ctx)
|
|
||||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
|
||||||
req.Header.Set("Authorization", fmt.Sprint(credential))
|
|
||||||
finish += byteSize
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
res.Body.Close()
|
|
||||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
|
||||||
chunk++
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Cloudreve) upOneDrive(ctx context.Context, stream model.FileStreamer, u UploadInfo, up driver.UpdateProgress) error {
|
|
||||||
uploadUrl := u.UploadURLs[0]
|
|
||||||
var finish int64 = 0
|
|
||||||
DEFAULT := int64(u.ChunkSize)
|
|
||||||
for finish < stream.GetSize() {
|
|
||||||
if utils.IsCanceled(ctx) {
|
|
||||||
return ctx.Err()
|
|
||||||
}
|
|
||||||
utils.Log.Debugf("[Cloudreve-OneDrive] upload: %d", finish)
|
|
||||||
var byteSize = DEFAULT
|
|
||||||
left := stream.GetSize() - finish
|
|
||||||
if left < DEFAULT {
|
|
||||||
byteSize = left
|
|
||||||
}
|
|
||||||
byteData := make([]byte, byteSize)
|
|
||||||
n, err := io.ReadFull(stream, byteData)
|
|
||||||
utils.Log.Debug(err, n)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req = req.WithContext(ctx)
|
|
||||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
|
||||||
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
|
||||||
finish += byteSize
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
|
|
||||||
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
|
|
||||||
data, _ := io.ReadAll(res.Body)
|
|
||||||
res.Body.Close()
|
|
||||||
return errors.New(string(data))
|
|
||||||
}
|
|
||||||
res.Body.Close()
|
|
||||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
|
||||||
}
|
|
||||||
// 上传成功发送回调请求
|
|
||||||
err := d.request(http.MethodPost, "/callback/onedrive/finish/"+u.SessionID, func(req *resty.Request) {
|
|
||||||
req.SetBody("{}")
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
@ -13,7 +13,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/internal/sign"
|
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -161,11 +160,7 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
// discarding hash as it's encrypted
|
// discarding hash as it's encrypted
|
||||||
}
|
}
|
||||||
if d.Thumbnail && thumb == "" {
|
if d.Thumbnail && thumb == "" {
|
||||||
thumbPath := stdpath.Join(args.ReqPath, ".thumbnails", name+".webp")
|
thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
||||||
thumb = fmt.Sprintf("%s/d%s?sign=%s",
|
|
||||||
common.GetApiUrl(common.GetHttpReq(ctx)),
|
|
||||||
utils.EncodePath(thumbPath, true),
|
|
||||||
sign.Sign(thumbPath))
|
|
||||||
}
|
}
|
||||||
if !ok && !d.Thumbnail {
|
if !ok && !d.Thumbnail {
|
||||||
result = append(result, &objRes)
|
result = append(result, &objRes)
|
||||||
@ -275,6 +270,7 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
rrc = converted
|
rrc = converted
|
||||||
}
|
}
|
||||||
if rrc != nil {
|
if rrc != nil {
|
||||||
|
//remoteRangeReader, err :=
|
||||||
remoteReader, err := rrc.RangeRead(ctx, http_range.Range{Start: underlyingOffset, Length: length})
|
remoteReader, err := rrc.RangeRead(ctx, http_range.Range{Start: underlyingOffset, Length: length})
|
||||||
remoteClosers.AddClosers(rrc.GetClosers())
|
remoteClosers.AddClosers(rrc.GetClosers())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -287,8 +283,10 @@ func (d *Crypt) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
// 可以直接返回,读取完也不会调用Close,直到连接断开Close
|
//remoteClosers.Add(remoteLink.MFile)
|
||||||
return remoteLink.MFile, nil
|
//keep reuse same MFile and close at last.
|
||||||
|
remoteClosers.Add(remoteLink.MFile)
|
||||||
|
return io.NopCloser(remoteLink.MFile), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, errs.NotSupport
|
return nil, errs.NotSupport
|
||||||
|
@ -1,132 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"golang.org/x/oauth2"
|
|
||||||
"golang.org/x/oauth2/clientcredentials"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FebBox struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
accessToken string
|
|
||||||
oauth2Token oauth2.TokenSource
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Init(ctx context.Context) error {
|
|
||||||
// 初始化 oauth2Config
|
|
||||||
oauth2Config := &clientcredentials.Config{
|
|
||||||
ClientID: d.ClientID,
|
|
||||||
ClientSecret: d.ClientSecret,
|
|
||||||
AuthStyle: oauth2.AuthStyleInParams,
|
|
||||||
TokenURL: "https://api.febbox.com/oauth/token",
|
|
||||||
}
|
|
||||||
|
|
||||||
d.initializeOAuth2Token(ctx, oauth2Config, d.Addition.RefreshToken)
|
|
||||||
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.accessToken = token.AccessToken
|
|
||||||
d.Addition.RefreshToken = token.RefreshToken
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
files, err := d.getFilesList(dir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
|
||||||
return fileToObj(src), nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
var ip string
|
|
||||||
if d.Addition.UserIP != "" {
|
|
||||||
ip = d.Addition.UserIP
|
|
||||||
} else {
|
|
||||||
ip = args.IP
|
|
||||||
}
|
|
||||||
|
|
||||||
url, err := d.getDownloadLink(file.GetID(), ip)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &model.Link{
|
|
||||||
URL: url,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
err := d.makeDir(parentDir.GetID(), dirName)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
err := d.move(srcObj.GetID(), dstDir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
err := d.rename(srcObj.GetID(), newName)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
err := d.copy(srcObj.GetID(), dstDir.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
err := d.remove(obj.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*FebBox)(nil)
|
|
@ -1,36 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootID
|
|
||||||
ClientID string `json:"client_id" required:"true" default:""`
|
|
||||||
ClientSecret string `json:"client_secret" required:"true" default:""`
|
|
||||||
RefreshToken string
|
|
||||||
SortRule string `json:"sort_rule" required:"true" type:"select" options:"size_asc,size_desc,name_asc,name_desc,update_asc,update_desc,ext_asc,ext_desc" default:"name_asc"`
|
|
||||||
PageSize int64 `json:"page_size" required:"true" type:"number" default:"100" help:"list api per page size of FebBox driver"`
|
|
||||||
UserIP string `json:"user_ip" default:"" help:"user ip address for download link which can speed up the download"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "FebBox",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: true,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &FebBox{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,88 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"golang.org/x/oauth2"
|
|
||||||
"golang.org/x/oauth2/clientcredentials"
|
|
||||||
)
|
|
||||||
|
|
||||||
type customTokenSource struct {
|
|
||||||
config *clientcredentials.Config
|
|
||||||
ctx context.Context
|
|
||||||
refreshToken string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *customTokenSource) Token() (*oauth2.Token, error) {
|
|
||||||
v := url.Values{}
|
|
||||||
if c.refreshToken != "" {
|
|
||||||
v.Set("grant_type", "refresh_token")
|
|
||||||
v.Set("refresh_token", c.refreshToken)
|
|
||||||
} else {
|
|
||||||
v.Set("grant_type", "client_credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
v.Set("client_id", c.config.ClientID)
|
|
||||||
v.Set("client_secret", c.config.ClientSecret)
|
|
||||||
|
|
||||||
req, err := http.NewRequest("POST", c.config.TokenURL, strings.NewReader(v.Encode()))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
|
||||||
|
|
||||||
resp, err := http.DefaultClient.Do(req.WithContext(c.ctx))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, errors.New("oauth2: cannot fetch token")
|
|
||||||
}
|
|
||||||
|
|
||||||
var tokenResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data struct {
|
|
||||||
AccessToken string `json:"access_token"`
|
|
||||||
ExpiresIn int64 `json:"expires_in"`
|
|
||||||
TokenType string `json:"token_type"`
|
|
||||||
Scope string `json:"scope"`
|
|
||||||
RefreshToken string `json:"refresh_token"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if tokenResp.Code != 1 {
|
|
||||||
return nil, errors.New("oauth2: server response error")
|
|
||||||
}
|
|
||||||
|
|
||||||
c.refreshToken = tokenResp.Data.RefreshToken
|
|
||||||
|
|
||||||
token := &oauth2.Token{
|
|
||||||
AccessToken: tokenResp.Data.AccessToken,
|
|
||||||
TokenType: tokenResp.Data.TokenType,
|
|
||||||
RefreshToken: tokenResp.Data.RefreshToken,
|
|
||||||
Expiry: time.Now().Add(time.Duration(tokenResp.Data.ExpiresIn) * time.Second),
|
|
||||||
}
|
|
||||||
|
|
||||||
return token, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) initializeOAuth2Token(ctx context.Context, oauth2Config *clientcredentials.Config, refreshToken string) {
|
|
||||||
d.oauth2Token = oauth2.ReuseTokenSource(nil, &customTokenSource{
|
|
||||||
config: oauth2Config,
|
|
||||||
ctx: ctx,
|
|
||||||
refreshToken: refreshToken,
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,123 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ErrResp struct {
|
|
||||||
ErrorCode int64 `json:"code"`
|
|
||||||
ErrorMsg string `json:"msg"`
|
|
||||||
ServerRunTime float64 `json:"server_runtime"`
|
|
||||||
ServerName string `json:"server_name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ErrResp) IsError() bool {
|
|
||||||
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ServerRunTime != 0 || e.ServerName != ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e *ErrResp) Error() string {
|
|
||||||
return fmt.Sprintf("ErrorCode: %d ,Error: %s ,ServerRunTime: %f ,ServerName: %s", e.ErrorCode, e.ErrorMsg, e.ServerRunTime, e.ServerName)
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileListResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data struct {
|
|
||||||
FileList []File `json:"file_list"`
|
|
||||||
ShowType string `json:"show_type"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Rules struct {
|
|
||||||
AllowCopy int64 `json:"allow_copy"`
|
|
||||||
AllowDelete int64 `json:"allow_delete"`
|
|
||||||
AllowDownload int64 `json:"allow_download"`
|
|
||||||
AllowComment int64 `json:"allow_comment"`
|
|
||||||
HideLocation int64 `json:"hide_location"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type File struct {
|
|
||||||
Fid int64 `json:"fid"`
|
|
||||||
UID int64 `json:"uid"`
|
|
||||||
FileSize int64 `json:"file_size"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
Ext string `json:"ext"`
|
|
||||||
AddTime int64 `json:"add_time"`
|
|
||||||
FileCreateTime int64 `json:"file_create_time"`
|
|
||||||
FileUpdateTime int64 `json:"file_update_time"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
UpdateTime int64 `json:"update_time"`
|
|
||||||
LastOpenTime int64 `json:"last_open_time"`
|
|
||||||
IsDir int64 `json:"is_dir"`
|
|
||||||
Epub int64 `json:"epub"`
|
|
||||||
IsMusicList int64 `json:"is_music_list"`
|
|
||||||
OssFid int64 `json:"oss_fid"`
|
|
||||||
Faststart int64 `json:"faststart"`
|
|
||||||
HasVideoQuality int64 `json:"has_video_quality"`
|
|
||||||
TotalDownload int64 `json:"total_download"`
|
|
||||||
Status int64 `json:"status"`
|
|
||||||
Remark string `json:"remark"`
|
|
||||||
OldHash string `json:"old_hash"`
|
|
||||||
Hash string `json:"hash"`
|
|
||||||
HashType string `json:"hash_type"`
|
|
||||||
FromUID int64 `json:"from_uid"`
|
|
||||||
FidOrg int64 `json:"fid_org"`
|
|
||||||
ShareID int64 `json:"share_id"`
|
|
||||||
InvitePermission int64 `json:"invite_permission"`
|
|
||||||
ThumbSmall string `json:"thumb_small"`
|
|
||||||
ThumbSmallWidth int64 `json:"thumb_small_width"`
|
|
||||||
ThumbSmallHeight int64 `json:"thumb_small_height"`
|
|
||||||
Thumb string `json:"thumb"`
|
|
||||||
ThumbWidth int64 `json:"thumb_width"`
|
|
||||||
ThumbHeight int64 `json:"thumb_height"`
|
|
||||||
ThumbBig string `json:"thumb_big"`
|
|
||||||
ThumbBigWidth int64 `json:"thumb_big_width"`
|
|
||||||
ThumbBigHeight int64 `json:"thumb_big_height"`
|
|
||||||
IsCustomThumb int64 `json:"is_custom_thumb"`
|
|
||||||
Photos int64 `json:"photos"`
|
|
||||||
IsAlbum int64 `json:"is_album"`
|
|
||||||
ReadOnly int64 `json:"read_only"`
|
|
||||||
Rules Rules `json:"rules"`
|
|
||||||
IsShared int64 `json:"is_shared"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func fileToObj(f File) *model.ObjThumb {
|
|
||||||
return &model.ObjThumb{
|
|
||||||
Object: model.Object{
|
|
||||||
ID: strconv.FormatInt(f.Fid, 10),
|
|
||||||
Name: f.FileName,
|
|
||||||
Size: f.FileSize,
|
|
||||||
Ctime: time.Unix(f.FileCreateTime, 0),
|
|
||||||
Modified: time.Unix(f.FileUpdateTime, 0),
|
|
||||||
IsFolder: f.IsDir == 1,
|
|
||||||
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
|
|
||||||
},
|
|
||||||
Thumbnail: model.Thumbnail{
|
|
||||||
Thumbnail: f.Thumb,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type FileDownloadResp struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data []struct {
|
|
||||||
Error int `json:"error"`
|
|
||||||
DownloadURL string `json:"download_url"`
|
|
||||||
Hash string `json:"hash"`
|
|
||||||
HashType string `json:"hash_type"`
|
|
||||||
Fid int `json:"fid"`
|
|
||||||
FileName string `json:"file_name"`
|
|
||||||
ParentID int `json:"parent_id"`
|
|
||||||
FileSize int `json:"file_size"`
|
|
||||||
Ext string `json:"ext"`
|
|
||||||
Thumb string `json:"thumb"`
|
|
||||||
VipLink int `json:"vip_link"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
@ -1,224 +0,0 @@
|
|||||||
package febbox
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"errors"
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"net/http"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *FebBox) refreshTokenByOAuth2() error {
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.Status = "work"
|
|
||||||
d.accessToken = token.AccessToken
|
|
||||||
d.Addition.RefreshToken = token.RefreshToken
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
|
||||||
req := base.RestyClient.R()
|
|
||||||
// 使用oauth2 获取 access_token
|
|
||||||
token, err := d.oauth2Token.Token()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
|
||||||
|
|
||||||
if callback != nil {
|
|
||||||
callback(req)
|
|
||||||
}
|
|
||||||
if resp != nil {
|
|
||||||
req.SetResult(resp)
|
|
||||||
}
|
|
||||||
var e ErrResp
|
|
||||||
req.SetError(&e)
|
|
||||||
res, err := req.Execute(method, url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch e.ErrorCode {
|
|
||||||
case 0:
|
|
||||||
return res.Body(), nil
|
|
||||||
case 1:
|
|
||||||
return res.Body(), nil
|
|
||||||
case -10001:
|
|
||||||
if e.ServerName != "" {
|
|
||||||
// access_token 过期
|
|
||||||
if err = d.refreshTokenByOAuth2(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return d.request(url, method, callback, resp)
|
|
||||||
} else {
|
|
||||||
return nil, errors.New(e.Error())
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, errors.New(e.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getFilesList(id string) ([]File, error) {
|
|
||||||
if d.PageSize <= 0 {
|
|
||||||
d.PageSize = 100
|
|
||||||
}
|
|
||||||
res, err := d.listWithLimit(id, d.PageSize)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return *res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) listWithLimit(dirID string, pageLimit int64) (*[]File, error) {
|
|
||||||
var files []File
|
|
||||||
page := int64(1)
|
|
||||||
for {
|
|
||||||
result, err := d.getFiles(dirID, page, pageLimit)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, *result...)
|
|
||||||
if int64(len(*result)) < pageLimit {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
page++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return &files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getFiles(dirID string, page, pageLimit int64) (*[]File, error) {
|
|
||||||
var fileList FileListResp
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_list",
|
|
||||||
"parent_id": dirID,
|
|
||||||
"page": strconv.FormatInt(page, 10),
|
|
||||||
"pagelimit": strconv.FormatInt(pageLimit, 10),
|
|
||||||
"order": d.Addition.SortRule,
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, &fileList)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = json.Unmarshal(res, &fileList); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &fileList.Data.FileList, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) getDownloadLink(id string, ip string) (string, error) {
|
|
||||||
var fileDownloadResp FileDownloadResp
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_get_download_url",
|
|
||||||
"fids[]": id,
|
|
||||||
"ip": ip,
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, &fileDownloadResp)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
if err = json.Unmarshal(res, &fileDownloadResp); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileDownloadResp.Data[0].DownloadURL, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) makeDir(id string, name string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "create_dir",
|
|
||||||
"parent_id": id,
|
|
||||||
"name": name,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) move(id string, id2 string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_move",
|
|
||||||
"fids[]": id,
|
|
||||||
"to": id2,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) rename(id string, name string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_rename",
|
|
||||||
"fid": id,
|
|
||||||
"name": name,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) copy(id string, id2 string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_copy",
|
|
||||||
"fids[]": id,
|
|
||||||
"to": id2,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *FebBox) remove(id string) error {
|
|
||||||
queryParams := map[string]string{
|
|
||||||
"module": "file_delete",
|
|
||||||
"fids[]": id,
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetMultipartFormData(queryParams)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
@ -1,929 +0,0 @@
|
|||||||
package github
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"encoding/base64"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
stdpath "path"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"text/template"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Github struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
client *resty.Client
|
|
||||||
mkdirMsgTmpl *template.Template
|
|
||||||
deleteMsgTmpl *template.Template
|
|
||||||
putMsgTmpl *template.Template
|
|
||||||
renameMsgTmpl *template.Template
|
|
||||||
copyMsgTmpl *template.Template
|
|
||||||
moveMsgTmpl *template.Template
|
|
||||||
isOnBranch bool
|
|
||||||
commitMutex sync.Mutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Init(ctx context.Context) error {
|
|
||||||
d.RootFolderPath = utils.FixAndCleanPath(d.RootFolderPath)
|
|
||||||
if d.CommitterName != "" && d.CommitterEmail == "" {
|
|
||||||
return errors.New("committer email is required")
|
|
||||||
}
|
|
||||||
if d.CommitterName == "" && d.CommitterEmail != "" {
|
|
||||||
return errors.New("committer name is required")
|
|
||||||
}
|
|
||||||
if d.AuthorName != "" && d.AuthorEmail == "" {
|
|
||||||
return errors.New("author email is required")
|
|
||||||
}
|
|
||||||
if d.AuthorName == "" && d.AuthorEmail != "" {
|
|
||||||
return errors.New("author name is required")
|
|
||||||
}
|
|
||||||
var err error
|
|
||||||
d.mkdirMsgTmpl, err = template.New("mkdirCommitMsgTemplate").Parse(d.MkdirCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.deleteMsgTmpl, err = template.New("deleteCommitMsgTemplate").Parse(d.DeleteCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.putMsgTmpl, err = template.New("putCommitMsgTemplate").Parse(d.PutCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.renameMsgTmpl, err = template.New("renameCommitMsgTemplate").Parse(d.RenameCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.copyMsgTmpl, err = template.New("copyCommitMsgTemplate").Parse(d.CopyCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.moveMsgTmpl, err = template.New("moveCommitMsgTemplate").Parse(d.MoveCommitMsg)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.client = base.NewRestyClient().
|
|
||||||
SetHeader("Accept", "application/vnd.github.object+json").
|
|
||||||
SetHeader("Authorization", "Bearer "+d.Token).
|
|
||||||
SetHeader("X-GitHub-Api-Version", "2022-11-28").
|
|
||||||
SetLogger(log.StandardLogger()).
|
|
||||||
SetDebug(false)
|
|
||||||
if d.Ref == "" {
|
|
||||||
repo, err := d.getRepo()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.Ref = repo.DefaultBranch
|
|
||||||
d.isOnBranch = true
|
|
||||||
} else {
|
|
||||||
_, err = d.getBranchHead()
|
|
||||||
d.isOnBranch = err == nil
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
obj, err := d.get(dir.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if obj.Entries == nil {
|
|
||||||
return nil, errs.NotFolder
|
|
||||||
}
|
|
||||||
if len(obj.Entries) >= 1000 {
|
|
||||||
tree, err := d.getTree(obj.Sha)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if tree.Truncated {
|
|
||||||
return nil, fmt.Errorf("tree %s is truncated", dir.GetPath())
|
|
||||||
}
|
|
||||||
ret := make([]model.Obj, 0, len(tree.Trees))
|
|
||||||
for _, t := range tree.Trees {
|
|
||||||
if t.Path != ".gitkeep" {
|
|
||||||
ret = append(ret, t.toModelObj())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ret, nil
|
|
||||||
} else {
|
|
||||||
ret := make([]model.Obj, 0, len(obj.Entries))
|
|
||||||
for _, entry := range obj.Entries {
|
|
||||||
if entry.Name != ".gitkeep" {
|
|
||||||
ret = append(ret, entry.toModelObj())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ret, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
obj, err := d.get(file.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if obj.Type == "submodule" {
|
|
||||||
return nil, errors.New("cannot download a submodule")
|
|
||||||
}
|
|
||||||
return &model.Link{
|
|
||||||
URL: obj.DownloadURL,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
parent, err := d.get(parentDir.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if parent.Entries == nil {
|
|
||||||
return errs.NotFolder
|
|
||||||
}
|
|
||||||
// if parent folder contains .gitkeep only, mark it and delete .gitkeep later
|
|
||||||
gitKeepSha := ""
|
|
||||||
if len(parent.Entries) == 1 && parent.Entries[0].Name == ".gitkeep" {
|
|
||||||
gitKeepSha = parent.Entries[0].Sha
|
|
||||||
}
|
|
||||||
|
|
||||||
commitMessage, err := getMessage(d.mkdirMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: dirName,
|
|
||||||
ObjPath: stdpath.Join(parentDir.GetPath(), dirName),
|
|
||||||
ParentName: parentDir.GetName(),
|
|
||||||
ParentPath: parentDir.GetPath(),
|
|
||||||
}, "mkdir")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if err = d.createGitKeep(stdpath.Join(parentDir.GetPath(), dirName), commitMessage); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if gitKeepSha != "" {
|
|
||||||
err = d.delete(stdpath.Join(parentDir.GetPath(), ".gitkeep"), gitKeepSha, commitMessage)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(dstDir.GetPath(), srcObj.GetPath()) {
|
|
||||||
return errors.New("cannot move parent dir to child")
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
|
|
||||||
var rootSha string
|
|
||||||
if strings.HasPrefix(dstDir.GetPath(), stdpath.Dir(srcObj.GetPath())) { // /aa/1 -> /aa/bb/
|
|
||||||
dstOldSha, dstNewSha, ancestorOldSha, srcParentTree, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
|
||||||
dstRest := dstDir.GetPath()[len(srcParentPath):]
|
|
||||||
if dstRest[0] == '/' {
|
|
||||||
dstRest = dstRest[1:]
|
|
||||||
}
|
|
||||||
dstNextName, _, _ := strings.Cut(dstRest, "/")
|
|
||||||
dstNextPath := stdpath.Join(srcParentPath, dstNextName)
|
|
||||||
dstNextTreeSha, err := d.renewParentTrees(dstDir.GetPath(), dstOldSha, dstNewSha, dstNextPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var delSrc, dstNextTree *TreeObjReq = nil, nil
|
|
||||||
for _, t := range srcParentTree.Trees {
|
|
||||||
if t.Path == dstNextName {
|
|
||||||
dstNextTree = &t.TreeObjReq
|
|
||||||
dstNextTree.Sha = dstNextTreeSha
|
|
||||||
}
|
|
||||||
if t.Path == srcObj.GetName() {
|
|
||||||
delSrc = &t.TreeObjReq
|
|
||||||
delSrc.Sha = nil
|
|
||||||
}
|
|
||||||
if delSrc != nil && dstNextTree != nil {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if delSrc == nil || dstNextTree == nil {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
ancestorNewSha, err := d.newTree(ancestorOldSha, []interface{}{*delSrc, *dstNextTree})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err = d.renewParentTrees(srcParentPath, ancestorOldSha, ancestorNewSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else if strings.HasPrefix(srcObj.GetPath(), dstDir.GetPath()) { // /aa/bb/1 -> /aa/
|
|
||||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
|
||||||
srcParentTree, srcParentOldSha, err := d.getTreeDirectly(srcParentPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var src *TreeObjReq = nil
|
|
||||||
for _, t := range srcParentTree.Trees {
|
|
||||||
if t.Path == srcObj.GetName() {
|
|
||||||
if t.Type == "commit" {
|
|
||||||
return errors.New("cannot move a submodule")
|
|
||||||
}
|
|
||||||
src = &t.TreeObjReq
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if src == nil {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
delSrc := *src
|
|
||||||
delSrc.Sha = nil
|
|
||||||
delSrcTree := make([]interface{}, 0, 2)
|
|
||||||
delSrcTree = append(delSrcTree, delSrc)
|
|
||||||
if len(srcParentTree.Trees) == 1 {
|
|
||||||
delSrcTree = append(delSrcTree, map[string]string{
|
|
||||||
"path": ".gitkeep",
|
|
||||||
"mode": "100644",
|
|
||||||
"type": "blob",
|
|
||||||
"content": "",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
srcParentNewSha, err := d.newTree(srcParentOldSha, delSrcTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
srcRest := srcObj.GetPath()[len(dstDir.GetPath()):]
|
|
||||||
if srcRest[0] == '/' {
|
|
||||||
srcRest = srcRest[1:]
|
|
||||||
}
|
|
||||||
srcNextName, _, ok := strings.Cut(srcRest, "/")
|
|
||||||
if !ok { // /aa/1 -> /aa/
|
|
||||||
return errors.New("cannot move in place")
|
|
||||||
}
|
|
||||||
srcNextPath := stdpath.Join(dstDir.GetPath(), srcNextName)
|
|
||||||
srcNextTreeSha, err := d.renewParentTrees(srcParentPath, srcParentOldSha, srcParentNewSha, srcNextPath)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
ancestorTree, ancestorOldSha, err := d.getTreeDirectly(dstDir.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var srcNextTree *TreeObjReq = nil
|
|
||||||
for _, t := range ancestorTree.Trees {
|
|
||||||
if t.Path == srcNextName {
|
|
||||||
srcNextTree = &t.TreeObjReq
|
|
||||||
srcNextTree.Sha = srcNextTreeSha
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if srcNextTree == nil {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
ancestorNewSha, err := d.newTree(ancestorOldSha, []interface{}{*srcNextTree, *src})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err = d.renewParentTrees(dstDir.GetPath(), ancestorOldSha, ancestorNewSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else { // /aa/1 -> /bb/
|
|
||||||
// do copy
|
|
||||||
dstOldSha, dstNewSha, srcParentOldSha, srcParentTree, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// delete src object and create new tree
|
|
||||||
var srcNewTree *TreeObjReq = nil
|
|
||||||
for _, t := range srcParentTree.Trees {
|
|
||||||
if t.Path == srcObj.GetName() {
|
|
||||||
srcNewTree = &t.TreeObjReq
|
|
||||||
srcNewTree.Sha = nil
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if srcNewTree == nil {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
delSrcTree := make([]interface{}, 0, 2)
|
|
||||||
delSrcTree = append(delSrcTree, *srcNewTree)
|
|
||||||
if len(srcParentTree.Trees) == 1 {
|
|
||||||
delSrcTree = append(delSrcTree, map[string]string{
|
|
||||||
"path": ".gitkeep",
|
|
||||||
"mode": "100644",
|
|
||||||
"type": "blob",
|
|
||||||
"content": "",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
srcParentNewSha, err := d.newTree(srcParentOldSha, delSrcTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// renew but the common ancestor of srcPath and dstPath
|
|
||||||
ancestor, srcChildName, dstChildName, _, _ := getPathCommonAncestor(srcObj.GetPath(), dstDir.GetPath())
|
|
||||||
dstNextTreeSha, err := d.renewParentTrees(dstDir.GetPath(), dstOldSha, dstNewSha, stdpath.Join(ancestor, dstChildName))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
srcNextTreeSha, err := d.renewParentTrees(stdpath.Dir(srcObj.GetPath()), srcParentOldSha, srcParentNewSha, stdpath.Join(ancestor, srcChildName))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// renew the tree of the last common ancestor
|
|
||||||
ancestorTree, ancestorOldSha, err := d.getTreeDirectly(ancestor)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
newTree := make([]interface{}, 2)
|
|
||||||
srcBind := false
|
|
||||||
dstBind := false
|
|
||||||
for _, t := range ancestorTree.Trees {
|
|
||||||
if t.Path == srcChildName {
|
|
||||||
t.Sha = srcNextTreeSha
|
|
||||||
newTree[0] = t.TreeObjReq
|
|
||||||
srcBind = true
|
|
||||||
}
|
|
||||||
if t.Path == dstChildName {
|
|
||||||
t.Sha = dstNextTreeSha
|
|
||||||
newTree[1] = t.TreeObjReq
|
|
||||||
dstBind = true
|
|
||||||
}
|
|
||||||
if srcBind && dstBind {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !srcBind || !dstBind {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
ancestorNewSha, err := d.newTree(ancestorOldSha, newTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// renew until root
|
|
||||||
rootSha, err = d.renewParentTrees(ancestor, ancestorOldSha, ancestorNewSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// commit
|
|
||||||
message, err := getMessage(d.moveMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: srcObj.GetName(),
|
|
||||||
ObjPath: srcObj.GetPath(),
|
|
||||||
ParentName: stdpath.Base(stdpath.Dir(srcObj.GetPath())),
|
|
||||||
ParentPath: stdpath.Dir(srcObj.GetPath()),
|
|
||||||
TargetName: stdpath.Base(dstDir.GetPath()),
|
|
||||||
TargetPath: dstDir.GetPath(),
|
|
||||||
}, "move")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return d.commit(message, rootSha)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
parentDir := stdpath.Dir(srcObj.GetPath())
|
|
||||||
tree, _, err := d.getTreeDirectly(parentDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
newTree := make([]interface{}, 2)
|
|
||||||
operated := false
|
|
||||||
for _, t := range tree.Trees {
|
|
||||||
if t.Path == srcObj.GetName() {
|
|
||||||
if t.Type == "commit" {
|
|
||||||
return errors.New("cannot rename a submodule")
|
|
||||||
}
|
|
||||||
delCopy := t.TreeObjReq
|
|
||||||
delCopy.Sha = nil
|
|
||||||
newTree[0] = delCopy
|
|
||||||
t.Path = newName
|
|
||||||
newTree[1] = t.TreeObjReq
|
|
||||||
operated = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !operated {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
newSha, err := d.newTree(tree.Sha, newTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err := d.renewParentTrees(parentDir, tree.Sha, newSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
message, err := getMessage(d.renameMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: srcObj.GetName(),
|
|
||||||
ObjPath: srcObj.GetPath(),
|
|
||||||
ParentName: stdpath.Base(parentDir),
|
|
||||||
ParentPath: parentDir,
|
|
||||||
TargetName: newName,
|
|
||||||
TargetPath: stdpath.Join(parentDir, newName),
|
|
||||||
}, "rename")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return d.commit(message, rootSha)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(dstDir.GetPath(), srcObj.GetPath()) {
|
|
||||||
return errors.New("cannot copy parent dir to child")
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
|
|
||||||
dstSha, newSha, _, _, err := d.copyWithoutRenewTree(srcObj, dstDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err := d.renewParentTrees(dstDir.GetPath(), dstSha, newSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
message, err := getMessage(d.copyMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: srcObj.GetName(),
|
|
||||||
ObjPath: srcObj.GetPath(),
|
|
||||||
ParentName: stdpath.Base(stdpath.Dir(srcObj.GetPath())),
|
|
||||||
ParentPath: stdpath.Dir(srcObj.GetPath()),
|
|
||||||
TargetName: stdpath.Base(dstDir.GetPath()),
|
|
||||||
TargetPath: dstDir.GetPath(),
|
|
||||||
}, "copy")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return d.commit(message, rootSha)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
parentDir := stdpath.Dir(obj.GetPath())
|
|
||||||
tree, treeSha, err := d.getTreeDirectly(parentDir)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var del *TreeObjReq = nil
|
|
||||||
for _, t := range tree.Trees {
|
|
||||||
if t.Path == obj.GetName() {
|
|
||||||
if t.Type == "commit" {
|
|
||||||
return errors.New("cannot remove a submodule")
|
|
||||||
}
|
|
||||||
del = &t.TreeObjReq
|
|
||||||
del.Sha = nil
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if del == nil {
|
|
||||||
return errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
newTree := make([]interface{}, 0, 2)
|
|
||||||
newTree = append(newTree, *del)
|
|
||||||
if len(tree.Trees) == 1 { // completely emptying the repository will get a 404
|
|
||||||
newTree = append(newTree, map[string]string{
|
|
||||||
"path": ".gitkeep",
|
|
||||||
"mode": "100644",
|
|
||||||
"type": "blob",
|
|
||||||
"content": "",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
newSha, err := d.newTree(treeSha, newTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err := d.renewParentTrees(parentDir, treeSha, newSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
commitMessage, err := getMessage(d.deleteMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: obj.GetName(),
|
|
||||||
ObjPath: obj.GetPath(),
|
|
||||||
ParentName: stdpath.Base(parentDir),
|
|
||||||
ParentPath: parentDir,
|
|
||||||
}, "remove")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return d.commit(commitMessage, rootSha)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
|
||||||
if !d.isOnBranch {
|
|
||||||
return errors.New("cannot write to non-branch reference")
|
|
||||||
}
|
|
||||||
blob, err := d.putBlob(ctx, stream, up)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.commitMutex.Lock()
|
|
||||||
defer d.commitMutex.Unlock()
|
|
||||||
parent, err := d.get(dstDir.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if parent.Entries == nil {
|
|
||||||
return errs.NotFolder
|
|
||||||
}
|
|
||||||
newTree := make([]interface{}, 0, 2)
|
|
||||||
newTree = append(newTree, TreeObjReq{
|
|
||||||
Path: stream.GetName(),
|
|
||||||
Mode: "100644",
|
|
||||||
Type: "blob",
|
|
||||||
Sha: blob,
|
|
||||||
})
|
|
||||||
if len(parent.Entries) == 1 && parent.Entries[0].Name == ".gitkeep" {
|
|
||||||
newTree = append(newTree, TreeObjReq{
|
|
||||||
Path: ".gitkeep",
|
|
||||||
Mode: "100644",
|
|
||||||
Type: "blob",
|
|
||||||
Sha: nil,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
newSha, err := d.newTree(parent.Sha, newTree)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
rootSha, err := d.renewParentTrees(dstDir.GetPath(), parent.Sha, newSha, "/")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
commitMessage, err := getMessage(d.putMsgTmpl, &MessageTemplateVars{
|
|
||||||
UserName: getUsername(ctx),
|
|
||||||
ObjName: stream.GetName(),
|
|
||||||
ObjPath: stdpath.Join(dstDir.GetPath(), stream.GetName()),
|
|
||||||
ParentName: dstDir.GetName(),
|
|
||||||
ParentPath: dstDir.GetPath(),
|
|
||||||
}, "upload")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return d.commit(commitMessage, rootSha)
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Github)(nil)
|
|
||||||
|
|
||||||
func (d *Github) getContentApiUrl(path string) string {
|
|
||||||
path = utils.FixAndCleanPath(path)
|
|
||||||
return fmt.Sprintf("https://api.github.com/repos/%s/%s/contents%s", d.Owner, d.Repo, path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) get(path string) (*Object, error) {
|
|
||||||
res, err := d.client.R().SetQueryParam("ref", d.Ref).Get(d.getContentApiUrl(path))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return nil, toErr(res)
|
|
||||||
}
|
|
||||||
var resp Object
|
|
||||||
err = utils.Json.Unmarshal(res.Body(), &resp)
|
|
||||||
return &resp, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) createGitKeep(path, message string) error {
|
|
||||||
body := map[string]interface{}{
|
|
||||||
"message": message,
|
|
||||||
"content": "",
|
|
||||||
"branch": d.Ref,
|
|
||||||
}
|
|
||||||
d.addCommitterAndAuthor(&body)
|
|
||||||
|
|
||||||
res, err := d.client.R().SetBody(body).Put(d.getContentApiUrl(stdpath.Join(path, ".gitkeep")))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 && res.StatusCode() != 201 {
|
|
||||||
return toErr(res)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) putBlob(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress) (string, error) {
|
|
||||||
beforeContent := "{\"encoding\":\"base64\",\"content\":\""
|
|
||||||
afterContent := "\"}"
|
|
||||||
length := int64(len(beforeContent)) + calculateBase64Length(stream.GetSize()) + int64(len(afterContent))
|
|
||||||
beforeContentReader := strings.NewReader(beforeContent)
|
|
||||||
contentReader, contentWriter := io.Pipe()
|
|
||||||
go func() {
|
|
||||||
encoder := base64.NewEncoder(base64.StdEncoding, contentWriter)
|
|
||||||
if _, err := utils.CopyWithBuffer(encoder, stream); err != nil {
|
|
||||||
_ = contentWriter.CloseWithError(err)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
_ = encoder.Close()
|
|
||||||
_ = contentWriter.Close()
|
|
||||||
}()
|
|
||||||
afterContentReader := strings.NewReader(afterContent)
|
|
||||||
req, err := http.NewRequestWithContext(ctx, http.MethodPost,
|
|
||||||
fmt.Sprintf("https://api.github.com/repos/%s/%s/git/blobs", d.Owner, d.Repo),
|
|
||||||
&ReaderWithProgress{
|
|
||||||
Reader: io.MultiReader(beforeContentReader, contentReader, afterContentReader),
|
|
||||||
Length: length,
|
|
||||||
Progress: up,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
req.Header.Set("Accept", "application/vnd.github+json")
|
|
||||||
req.Header.Set("Authorization", "Bearer "+d.Token)
|
|
||||||
req.Header.Set("X-GitHub-Api-Version", "2022-11-28")
|
|
||||||
req.ContentLength = length
|
|
||||||
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
resBody, err := io.ReadAll(res.Body)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if res.StatusCode != 201 {
|
|
||||||
var errMsg ErrResp
|
|
||||||
if err = utils.Json.Unmarshal(resBody, &errMsg); err != nil {
|
|
||||||
return "", errors.New(res.Status)
|
|
||||||
} else {
|
|
||||||
return "", fmt.Errorf("%s: %s", res.Status, errMsg.Message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var resp PutBlobResp
|
|
||||||
if err = utils.Json.Unmarshal(resBody, &resp); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return resp.Sha, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) delete(path, sha, message string) error {
|
|
||||||
body := map[string]interface{}{
|
|
||||||
"message": message,
|
|
||||||
"sha": sha,
|
|
||||||
"branch": d.Ref,
|
|
||||||
}
|
|
||||||
d.addCommitterAndAuthor(&body)
|
|
||||||
res, err := d.client.R().SetBody(body).Delete(d.getContentApiUrl(path))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return toErr(res)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) renewParentTrees(path, prevSha, curSha, until string) (string, error) {
|
|
||||||
for path != until {
|
|
||||||
path = stdpath.Dir(path)
|
|
||||||
tree, sha, err := d.getTreeDirectly(path)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
var newTree *TreeObjReq = nil
|
|
||||||
for _, t := range tree.Trees {
|
|
||||||
if t.Sha == prevSha {
|
|
||||||
newTree = &t.TreeObjReq
|
|
||||||
newTree.Sha = curSha
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if newTree == nil {
|
|
||||||
return "", errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
curSha, err = d.newTree(sha, []interface{}{*newTree})
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
prevSha = sha
|
|
||||||
}
|
|
||||||
return curSha, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) getTree(sha string) (*TreeResp, error) {
|
|
||||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/trees/%s", d.Owner, d.Repo, sha))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return nil, toErr(res)
|
|
||||||
}
|
|
||||||
var resp TreeResp
|
|
||||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) getTreeDirectly(path string) (*TreeResp, string, error) {
|
|
||||||
p, err := d.get(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, "", err
|
|
||||||
}
|
|
||||||
if p.Entries == nil {
|
|
||||||
return nil, "", fmt.Errorf("%s is not a folder", path)
|
|
||||||
}
|
|
||||||
tree, err := d.getTree(p.Sha)
|
|
||||||
if err != nil {
|
|
||||||
return nil, "", err
|
|
||||||
}
|
|
||||||
if tree.Truncated {
|
|
||||||
return nil, "", fmt.Errorf("tree %s is truncated", path)
|
|
||||||
}
|
|
||||||
return tree, p.Sha, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) newTree(baseSha string, tree []interface{}) (string, error) {
|
|
||||||
res, err := d.client.R().
|
|
||||||
SetBody(&TreeReq{
|
|
||||||
BaseTree: baseSha,
|
|
||||||
Trees: tree,
|
|
||||||
}).
|
|
||||||
Post(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/trees", d.Owner, d.Repo))
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 201 {
|
|
||||||
return "", toErr(res)
|
|
||||||
}
|
|
||||||
var resp TreeResp
|
|
||||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return resp.Sha, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) commit(message, treeSha string) error {
|
|
||||||
oldCommit, err := d.getBranchHead()
|
|
||||||
body := map[string]interface{}{
|
|
||||||
"message": message,
|
|
||||||
"tree": treeSha,
|
|
||||||
"parents": []string{oldCommit},
|
|
||||||
}
|
|
||||||
d.addCommitterAndAuthor(&body)
|
|
||||||
res, err := d.client.R().SetBody(body).Post(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/commits", d.Owner, d.Repo))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 201 {
|
|
||||||
return toErr(res)
|
|
||||||
}
|
|
||||||
var resp CommitResp
|
|
||||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// update branch head
|
|
||||||
res, err = d.client.R().
|
|
||||||
SetBody(&UpdateRefReq{
|
|
||||||
Sha: resp.Sha,
|
|
||||||
Force: false,
|
|
||||||
}).
|
|
||||||
Patch(fmt.Sprintf("https://api.github.com/repos/%s/%s/git/refs/heads/%s", d.Owner, d.Repo, d.Ref))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return toErr(res)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) getBranchHead() (string, error) {
|
|
||||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s/branches/%s", d.Owner, d.Repo, d.Ref))
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return "", toErr(res)
|
|
||||||
}
|
|
||||||
var resp BranchResp
|
|
||||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
return resp.Commit.Sha, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) copyWithoutRenewTree(srcObj, dstDir model.Obj) (dstSha, newSha, srcParentSha string, srcParentTree *TreeResp, err error) {
|
|
||||||
dst, err := d.get(dstDir.GetPath())
|
|
||||||
if err != nil {
|
|
||||||
return "", "", "", nil, err
|
|
||||||
}
|
|
||||||
if dst.Entries == nil {
|
|
||||||
return "", "", "", nil, errs.NotFolder
|
|
||||||
}
|
|
||||||
dstSha = dst.Sha
|
|
||||||
srcParentPath := stdpath.Dir(srcObj.GetPath())
|
|
||||||
srcParentTree, srcParentSha, err = d.getTreeDirectly(srcParentPath)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", "", nil, err
|
|
||||||
}
|
|
||||||
var src *TreeObjReq = nil
|
|
||||||
for _, t := range srcParentTree.Trees {
|
|
||||||
if t.Path == srcObj.GetName() {
|
|
||||||
if t.Type == "commit" {
|
|
||||||
return "", "", "", nil, errors.New("cannot copy a submodule")
|
|
||||||
}
|
|
||||||
src = &t.TreeObjReq
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if src == nil {
|
|
||||||
return "", "", "", nil, errs.ObjectNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
newTree := make([]interface{}, 0, 2)
|
|
||||||
newTree = append(newTree, *src)
|
|
||||||
if len(dst.Entries) == 1 && dst.Entries[0].Name == ".gitkeep" {
|
|
||||||
newTree = append(newTree, TreeObjReq{
|
|
||||||
Path: ".gitkeep",
|
|
||||||
Mode: "100644",
|
|
||||||
Type: "blob",
|
|
||||||
Sha: nil,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
newSha, err = d.newTree(dstSha, newTree)
|
|
||||||
if err != nil {
|
|
||||||
return "", "", "", nil, err
|
|
||||||
}
|
|
||||||
return dstSha, newSha, srcParentSha, srcParentTree, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) getRepo() (*RepoResp, error) {
|
|
||||||
res, err := d.client.R().Get(fmt.Sprintf("https://api.github.com/repos/%s/%s", d.Owner, d.Repo))
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
return nil, toErr(res)
|
|
||||||
}
|
|
||||||
var resp RepoResp
|
|
||||||
if err = utils.Json.Unmarshal(res.Body(), &resp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Github) addCommitterAndAuthor(m *map[string]interface{}) {
|
|
||||||
if d.CommitterName != "" {
|
|
||||||
committer := map[string]string{
|
|
||||||
"name": d.CommitterName,
|
|
||||||
"email": d.CommitterEmail,
|
|
||||||
}
|
|
||||||
(*m)["committer"] = committer
|
|
||||||
}
|
|
||||||
if d.AuthorName != "" {
|
|
||||||
author := map[string]string{
|
|
||||||
"name": d.AuthorName,
|
|
||||||
"email": d.AuthorEmail,
|
|
||||||
}
|
|
||||||
(*m)["author"] = author
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,36 +0,0 @@
|
|||||||
package github
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootPath
|
|
||||||
Token string `json:"token" type:"string" required:"true"`
|
|
||||||
Owner string `json:"owner" type:"string" required:"true"`
|
|
||||||
Repo string `json:"repo" type:"string" required:"true"`
|
|
||||||
Ref string `json:"ref" type:"string" help:"A branch, a tag or a commit SHA, main branch by default."`
|
|
||||||
CommitterName string `json:"committer_name" type:"string"`
|
|
||||||
CommitterEmail string `json:"committer_email" type:"string"`
|
|
||||||
AuthorName string `json:"author_name" type:"string"`
|
|
||||||
AuthorEmail string `json:"author_email" type:"string"`
|
|
||||||
MkdirCommitMsg string `json:"mkdir_commit_message" type:"text" default:"{{.UserName}} mkdir {{.ObjPath}}"`
|
|
||||||
DeleteCommitMsg string `json:"delete_commit_message" type:"text" default:"{{.UserName}} remove {{.ObjPath}}"`
|
|
||||||
PutCommitMsg string `json:"put_commit_message" type:"text" default:"{{.UserName}} upload {{.ObjPath}}"`
|
|
||||||
RenameCommitMsg string `json:"rename_commit_message" type:"text" default:"{{.UserName}} rename {{.ObjPath}} to {{.TargetName}}"`
|
|
||||||
CopyCommitMsg string `json:"copy_commit_message" type:"text" default:"{{.UserName}} copy {{.ObjPath}} to {{.TargetPath}}"`
|
|
||||||
MoveCommitMsg string `json:"move_commit_message" type:"text" default:"{{.UserName}} move {{.ObjPath}} to {{.TargetPath}}"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "GitHub API",
|
|
||||||
LocalSort: true,
|
|
||||||
DefaultRoot: "/",
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &Github{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,102 +0,0 @@
|
|||||||
package github
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Links struct {
|
|
||||||
Git string `json:"git"`
|
|
||||||
Html string `json:"html"`
|
|
||||||
Self string `json:"self"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Object struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
Encoding string `json:"encoding" required:"false"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
Content string `json:"Content" required:"false"`
|
|
||||||
Sha string `json:"sha"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
GitURL string `json:"git_url"`
|
|
||||||
HtmlURL string `json:"html_url"`
|
|
||||||
DownloadURL string `json:"download_url"`
|
|
||||||
Entries []Object `json:"entries" required:"false"`
|
|
||||||
Links Links `json:"_links"`
|
|
||||||
SubmoduleGitURL string `json:"submodule_git_url" required:"false"`
|
|
||||||
Target string `json:"target" required:"false"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *Object) toModelObj() *model.Object {
|
|
||||||
return &model.Object{
|
|
||||||
Name: o.Name,
|
|
||||||
Size: o.Size,
|
|
||||||
Modified: time.Unix(0, 0),
|
|
||||||
IsFolder: o.Type == "dir",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type PutBlobResp struct {
|
|
||||||
URL string `json:"url"`
|
|
||||||
Sha string `json:"sha"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ErrResp struct {
|
|
||||||
Message string `json:"message"`
|
|
||||||
DocumentationURL string `json:"documentation_url"`
|
|
||||||
Status string `json:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TreeObjReq struct {
|
|
||||||
Path string `json:"path"`
|
|
||||||
Mode string `json:"mode"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Sha interface{} `json:"sha"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TreeObjResp struct {
|
|
||||||
TreeObjReq
|
|
||||||
Size int64 `json:"size" required:"false"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *TreeObjResp) toModelObj() *model.Object {
|
|
||||||
return &model.Object{
|
|
||||||
Name: o.Path,
|
|
||||||
Size: o.Size,
|
|
||||||
Modified: time.Unix(0, 0),
|
|
||||||
IsFolder: o.Type == "tree",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type TreeResp struct {
|
|
||||||
Sha string `json:"sha"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
Trees []TreeObjResp `json:"tree"`
|
|
||||||
Truncated bool `json:"truncated"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TreeReq struct {
|
|
||||||
BaseTree string `json:"base_tree"`
|
|
||||||
Trees []interface{} `json:"tree"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CommitResp struct {
|
|
||||||
Sha string `json:"sha"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BranchResp struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Commit CommitResp `json:"commit"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UpdateRefReq struct {
|
|
||||||
Sha string `json:"sha"`
|
|
||||||
Force bool `json:"force"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type RepoResp struct {
|
|
||||||
DefaultBranch string `json:"default_branch"`
|
|
||||||
}
|
|
@ -1,115 +0,0 @@
|
|||||||
package github
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"io"
|
|
||||||
"math"
|
|
||||||
"strings"
|
|
||||||
"text/template"
|
|
||||||
)
|
|
||||||
|
|
||||||
type ReaderWithProgress struct {
|
|
||||||
Reader io.Reader
|
|
||||||
Length int64
|
|
||||||
Progress func(percentage float64)
|
|
||||||
offset int64
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *ReaderWithProgress) Read(p []byte) (int, error) {
|
|
||||||
n, err := r.Reader.Read(p)
|
|
||||||
r.offset += int64(n)
|
|
||||||
r.Progress(math.Min(100.0, float64(r.offset)/float64(r.Length)*100.0))
|
|
||||||
return n, err
|
|
||||||
}
|
|
||||||
|
|
||||||
type MessageTemplateVars struct {
|
|
||||||
UserName string
|
|
||||||
ObjName string
|
|
||||||
ObjPath string
|
|
||||||
ParentName string
|
|
||||||
ParentPath string
|
|
||||||
TargetName string
|
|
||||||
TargetPath string
|
|
||||||
}
|
|
||||||
|
|
||||||
func getMessage(tmpl *template.Template, vars *MessageTemplateVars, defaultOpStr string) (string, error) {
|
|
||||||
sb := strings.Builder{}
|
|
||||||
if err := tmpl.Execute(&sb, vars); err != nil {
|
|
||||||
return fmt.Sprintf("%s %s %s", vars.UserName, defaultOpStr, vars.ObjPath), err
|
|
||||||
}
|
|
||||||
return sb.String(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func calculateBase64Length(inputLength int64) int64 {
|
|
||||||
return 4 * ((inputLength + 2) / 3)
|
|
||||||
}
|
|
||||||
|
|
||||||
func toErr(res *resty.Response) error {
|
|
||||||
var errMsg ErrResp
|
|
||||||
if err := utils.Json.Unmarshal(res.Body(), &errMsg); err != nil {
|
|
||||||
return errors.New(res.Status())
|
|
||||||
} else {
|
|
||||||
return fmt.Errorf("%s: %s", res.Status(), errMsg.Message)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Example input:
|
|
||||||
// a = /aaa/bbb/ccc
|
|
||||||
// b = /aaa/b11/ddd/ccc
|
|
||||||
//
|
|
||||||
// Output:
|
|
||||||
// ancestor = /aaa
|
|
||||||
// aChildName = bbb
|
|
||||||
// bChildName = b11
|
|
||||||
// aRest = bbb/ccc
|
|
||||||
// bRest = b11/ddd/ccc
|
|
||||||
func getPathCommonAncestor(a, b string) (ancestor, aChildName, bChildName, aRest, bRest string) {
|
|
||||||
a = utils.FixAndCleanPath(a)
|
|
||||||
b = utils.FixAndCleanPath(b)
|
|
||||||
idx := 1
|
|
||||||
for idx < len(a) && idx < len(b) {
|
|
||||||
if a[idx] != b[idx] {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
idx++
|
|
||||||
}
|
|
||||||
aNextIdx := idx
|
|
||||||
for aNextIdx < len(a) {
|
|
||||||
if a[aNextIdx] == '/' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
aNextIdx++
|
|
||||||
}
|
|
||||||
bNextIdx := idx
|
|
||||||
for bNextIdx < len(b) {
|
|
||||||
if b[bNextIdx] == '/' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
bNextIdx++
|
|
||||||
}
|
|
||||||
for idx > 0 {
|
|
||||||
if a[idx] == '/' {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
idx--
|
|
||||||
}
|
|
||||||
ancestor = utils.FixAndCleanPath(a[:idx])
|
|
||||||
aChildName = a[idx+1 : aNextIdx]
|
|
||||||
bChildName = b[idx+1 : bNextIdx]
|
|
||||||
aRest = a[idx+1:]
|
|
||||||
bRest = b[idx+1:]
|
|
||||||
return ancestor, aChildName, bChildName, aRest, bRest
|
|
||||||
}
|
|
||||||
|
|
||||||
func getUsername(ctx context.Context) string {
|
|
||||||
user, ok := ctx.Value("user").(*model.User)
|
|
||||||
if !ok {
|
|
||||||
return "<system>"
|
|
||||||
}
|
|
||||||
return user.Username
|
|
||||||
}
|
|
@ -1,153 +0,0 @@
|
|||||||
package github_releases
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
type GithubReleases struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
|
|
||||||
releases []Release
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Init(ctx context.Context) error {
|
|
||||||
SetHeader(d.Addition.Token)
|
|
||||||
repos, err := ParseRepos(d.Addition.RepoStructure, d.Addition.ShowAllVersion)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.releases = repos
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Drop(ctx context.Context) error {
|
|
||||||
ClearCache()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
files := make([]File, 0)
|
|
||||||
path := fmt.Sprintf("/%s", strings.Trim(dir.GetPath(), "/"))
|
|
||||||
|
|
||||||
for _, repo := range d.releases {
|
|
||||||
if repo.Path == path { // 与仓库路径相同
|
|
||||||
resp, err := GetRepoReleaseInfo(repo.RepoName, repo.ID, path, d.Storage.CacheExpiration)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, resp.Files...)
|
|
||||||
|
|
||||||
if d.Addition.ShowReadme {
|
|
||||||
resp, err := GetGithubOtherFile(repo.RepoName, path, d.Storage.CacheExpiration)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
files = append(files, *resp...)
|
|
||||||
}
|
|
||||||
|
|
||||||
} else if strings.HasPrefix(repo.Path, path) { // 仓库路径是目录的子目录
|
|
||||||
nextDir := GetNextDir(repo.Path, path)
|
|
||||||
if nextDir == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if d.Addition.ShowAllVersion {
|
|
||||||
files = append(files, File{
|
|
||||||
FileName: nextDir,
|
|
||||||
Size: 0,
|
|
||||||
CreateAt: time.Time{},
|
|
||||||
UpdateAt: time.Time{},
|
|
||||||
Url: "",
|
|
||||||
Type: "dir",
|
|
||||||
Path: fmt.Sprintf("%s/%s", path, nextDir),
|
|
||||||
})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
repo, _ := GetRepoReleaseInfo(repo.RepoName, repo.Version, path, d.Storage.CacheExpiration)
|
|
||||||
|
|
||||||
hasSameDir := false
|
|
||||||
for index, file := range files {
|
|
||||||
if file.FileName == nextDir {
|
|
||||||
hasSameDir = true
|
|
||||||
files[index].Size += repo.Size
|
|
||||||
files[index].UpdateAt = func(a time.Time, b time.Time) time.Time {
|
|
||||||
if a.After(b) {
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
return b
|
|
||||||
}(files[index].UpdateAt, repo.UpdateAt)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !hasSameDir {
|
|
||||||
files = append(files, File{
|
|
||||||
FileName: nextDir,
|
|
||||||
Size: repo.Size,
|
|
||||||
CreateAt: repo.CreateAt,
|
|
||||||
UpdateAt: repo.UpdateAt,
|
|
||||||
Url: repo.Url,
|
|
||||||
Type: "dir",
|
|
||||||
Path: fmt.Sprintf("%s/%s", path, nextDir),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
|
||||||
return src, nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
link := model.Link{
|
|
||||||
URL: file.GetID(),
|
|
||||||
Header: http.Header{},
|
|
||||||
}
|
|
||||||
return &link, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
return errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *GithubReleases) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*GithubReleases)(nil)
|
|
@ -1,34 +0,0 @@
|
|||||||
package github_releases
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootID
|
|
||||||
RepoStructure string `json:"repo_structure" type:"text" required:"true" default:"/path/to/alist-gh:alistGo/alist\n/path/to2/alist-web-gh:AlistGo/alist-web" help:"structure:[path:]org/repo"`
|
|
||||||
ShowReadme bool `json:"show_readme" type:"bool" default:"true" help:"show README、LICENSE file"`
|
|
||||||
Token string `json:"token" type:"string" required:"false" help:"GitHub token, if you want to access private repositories or increase the rate limit"`
|
|
||||||
ShowAllVersion bool `json:"show_all_version" type:"bool" default:"false" help:"show all versions"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "GitHub Releases",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &GithubReleases{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,68 +0,0 @@
|
|||||||
package github_releases
|
|
||||||
|
|
||||||
import (
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
type File struct {
|
|
||||||
FileName string `json:"name"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
CreateAt time.Time `json:"time"`
|
|
||||||
UpdateAt time.Time `json:"chtime"`
|
|
||||||
Url string `json:"url"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) GetHash() utils.HashInfo {
|
|
||||||
return utils.HashInfo{}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) GetPath() string {
|
|
||||||
return f.Path
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) GetSize() int64 {
|
|
||||||
return f.Size
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) GetName() string {
|
|
||||||
return f.FileName
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) ModTime() time.Time {
|
|
||||||
return f.UpdateAt
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) CreateTime() time.Time {
|
|
||||||
return f.CreateAt
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) IsDir() bool {
|
|
||||||
return f.Type == "dir"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) GetID() string {
|
|
||||||
return f.Url
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f File) Thumb() string {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type ReleasesData struct {
|
|
||||||
Files []File `json:"files"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
UpdateAt time.Time `json:"chtime"`
|
|
||||||
CreateAt time.Time `json:"time"`
|
|
||||||
Url string `json:"url"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Release struct {
|
|
||||||
Path string // 挂载路径
|
|
||||||
RepoName string // 仓库名称
|
|
||||||
Version string // 版本号, tag
|
|
||||||
ID string // 版本ID
|
|
||||||
}
|
|
@ -1,217 +0,0 @@
|
|||||||
package github_releases
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
jsoniter "github.com/json-iterator/go"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
cache = make(map[string]*resty.Response)
|
|
||||||
created = make(map[string]time.Time)
|
|
||||||
mu sync.Mutex
|
|
||||||
req *resty.Request
|
|
||||||
)
|
|
||||||
|
|
||||||
// 解析仓库列表
|
|
||||||
func ParseRepos(text string, allVersion bool) ([]Release, error) {
|
|
||||||
lines := strings.Split(text, "\n")
|
|
||||||
var repos []Release
|
|
||||||
for _, line := range lines {
|
|
||||||
line = strings.TrimSpace(line)
|
|
||||||
if line == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
parts := strings.Split(line, ":")
|
|
||||||
path, repo := "", ""
|
|
||||||
if len(parts) == 1 {
|
|
||||||
path = "/"
|
|
||||||
repo = parts[0]
|
|
||||||
} else if len(parts) == 2 {
|
|
||||||
path = fmt.Sprintf("/%s", strings.Trim(parts[0], "/"))
|
|
||||||
repo = parts[1]
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("invalid format: %s", line)
|
|
||||||
}
|
|
||||||
|
|
||||||
if allVersion {
|
|
||||||
releases, _ := GetAllVersion(repo, path)
|
|
||||||
repos = append(repos, *releases...)
|
|
||||||
} else {
|
|
||||||
repos = append(repos, Release{
|
|
||||||
Path: path,
|
|
||||||
RepoName: repo,
|
|
||||||
Version: "latest",
|
|
||||||
ID: "latest",
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return repos, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 获取下一级目录
|
|
||||||
func GetNextDir(wholePath string, basePath string) string {
|
|
||||||
if !strings.HasSuffix(basePath, "/") {
|
|
||||||
basePath += "/"
|
|
||||||
}
|
|
||||||
if !strings.HasPrefix(wholePath, basePath) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
remainingPath := strings.TrimLeft(strings.TrimPrefix(wholePath, basePath), "/")
|
|
||||||
if remainingPath != "" {
|
|
||||||
parts := strings.Split(remainingPath, "/")
|
|
||||||
return parts[0]
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// 发送 GET 请求
|
|
||||||
func GetRequest(url string, cacheExpiration int) (*resty.Response, error) {
|
|
||||||
mu.Lock()
|
|
||||||
if res, ok := cache[url]; ok && time.Now().Before(created[url].Add(time.Duration(cacheExpiration)*time.Minute)) {
|
|
||||||
mu.Unlock()
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
mu.Unlock()
|
|
||||||
|
|
||||||
res, err := req.Get(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if res.StatusCode() != 200 {
|
|
||||||
log.Warn("failed to get request: ", res.StatusCode(), res.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
mu.Lock()
|
|
||||||
cache[url] = res
|
|
||||||
created[url] = time.Now()
|
|
||||||
mu.Unlock()
|
|
||||||
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 获取 README、LICENSE 等文件
|
|
||||||
func GetGithubOtherFile(repo string, basePath string, cacheExpiration int) (*[]File, error) {
|
|
||||||
url := fmt.Sprintf("https://api.github.com/repos/%s/contents/", strings.Trim(repo, "/"))
|
|
||||||
res, _ := GetRequest(url, cacheExpiration)
|
|
||||||
body := jsoniter.Get(res.Body())
|
|
||||||
var files []File
|
|
||||||
for i := 0; i < body.Size(); i++ {
|
|
||||||
filename := body.Get(i, "name").ToString()
|
|
||||||
|
|
||||||
re := regexp.MustCompile(`(?i)^(.*\.md|LICENSE)$`)
|
|
||||||
|
|
||||||
if !re.MatchString(filename) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
files = append(files, File{
|
|
||||||
FileName: filename,
|
|
||||||
Size: body.Get(i, "size").ToInt64(),
|
|
||||||
CreateAt: time.Time{},
|
|
||||||
UpdateAt: time.Now(),
|
|
||||||
Url: body.Get(i, "download_url").ToString(),
|
|
||||||
Type: body.Get(i, "type").ToString(),
|
|
||||||
Path: fmt.Sprintf("%s/%s", basePath, filename),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return &files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 获取 GitHub Release 详细信息
|
|
||||||
func GetRepoReleaseInfo(repo string, version string, basePath string, cacheExpiration int) (*ReleasesData, error) {
|
|
||||||
url := fmt.Sprintf("https://api.github.com/repos/%s/releases/%s", strings.Trim(repo, "/"), version)
|
|
||||||
res, _ := GetRequest(url, cacheExpiration)
|
|
||||||
body := res.Body()
|
|
||||||
|
|
||||||
if jsoniter.Get(res.Body(), "status").ToInt64() != 0 {
|
|
||||||
return &ReleasesData{}, fmt.Errorf("%s", res.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
assets := jsoniter.Get(res.Body(), "assets")
|
|
||||||
var files []File
|
|
||||||
|
|
||||||
for i := 0; i < assets.Size(); i++ {
|
|
||||||
filename := assets.Get(i, "name").ToString()
|
|
||||||
|
|
||||||
files = append(files, File{
|
|
||||||
FileName: filename,
|
|
||||||
Size: assets.Get(i, "size").ToInt64(),
|
|
||||||
Url: assets.Get(i, "browser_download_url").ToString(),
|
|
||||||
Type: assets.Get(i, "content_type").ToString(),
|
|
||||||
Path: fmt.Sprintf("%s/%s", basePath, filename),
|
|
||||||
|
|
||||||
CreateAt: func() time.Time {
|
|
||||||
t, _ := time.Parse(time.RFC3339, assets.Get(i, "created_at").ToString())
|
|
||||||
return t
|
|
||||||
}(),
|
|
||||||
UpdateAt: func() time.Time {
|
|
||||||
t, _ := time.Parse(time.RFC3339, assets.Get(i, "updated_at").ToString())
|
|
||||||
return t
|
|
||||||
}(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return &ReleasesData{
|
|
||||||
Files: files,
|
|
||||||
Url: jsoniter.Get(body, "html_url").ToString(),
|
|
||||||
|
|
||||||
Size: func() int64 {
|
|
||||||
size := int64(0)
|
|
||||||
for _, file := range files {
|
|
||||||
size += file.Size
|
|
||||||
}
|
|
||||||
return size
|
|
||||||
}(),
|
|
||||||
UpdateAt: func() time.Time {
|
|
||||||
t, _ := time.Parse(time.RFC3339, jsoniter.Get(body, "published_at").ToString())
|
|
||||||
return t
|
|
||||||
}(),
|
|
||||||
CreateAt: func() time.Time {
|
|
||||||
t, _ := time.Parse(time.RFC3339, jsoniter.Get(body, "created_at").ToString())
|
|
||||||
return t
|
|
||||||
}(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 获取所有的版本号
|
|
||||||
func GetAllVersion(repo string, path string) (*[]Release, error) {
|
|
||||||
url := fmt.Sprintf("https://api.github.com/repos/%s/releases", strings.Trim(repo, "/"))
|
|
||||||
res, _ := GetRequest(url, 0)
|
|
||||||
body := jsoniter.Get(res.Body())
|
|
||||||
releases := make([]Release, 0)
|
|
||||||
for i := 0; i < body.Size(); i++ {
|
|
||||||
version := body.Get(i, "tag_name").ToString()
|
|
||||||
releases = append(releases, Release{
|
|
||||||
Path: fmt.Sprintf("%s/%s", path, version),
|
|
||||||
Version: version,
|
|
||||||
RepoName: repo,
|
|
||||||
ID: body.Get(i, "id").ToString(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return &releases, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ClearCache() {
|
|
||||||
mu.Lock()
|
|
||||||
cache = make(map[string]*resty.Response)
|
|
||||||
created = make(map[string]time.Time)
|
|
||||||
mu.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
func SetHeader(token string) {
|
|
||||||
req = base.RestyClient.R()
|
|
||||||
if token != "" {
|
|
||||||
req.SetHeader("Authorization", fmt.Sprintf("Bearer %s", token))
|
|
||||||
}
|
|
||||||
req.SetHeader("Accept", "application/vnd.github+json")
|
|
||||||
req.SetHeader("X-GitHub-Api-Version", "2022-11-28")
|
|
||||||
}
|
|
@ -58,9 +58,33 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
URL: f.BaseURL + "=d",
|
URL: f.BaseURL + "=d",
|
||||||
}, nil
|
}, nil
|
||||||
} else if strings.Contains(f.MimeType, "video/") {
|
} else if strings.Contains(f.MimeType, "video/") {
|
||||||
return &model.Link{
|
var width, height int
|
||||||
URL: f.BaseURL + "=dv",
|
|
||||||
}, nil
|
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
|
||||||
|
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
// 1080P
|
||||||
|
case width == 1920 && height == 1080:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m37",
|
||||||
|
}, nil
|
||||||
|
// 720P
|
||||||
|
case width == 1280 && height == 720:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m22",
|
||||||
|
}, nil
|
||||||
|
// 360P
|
||||||
|
case width == 640 && height == 360:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=m18",
|
||||||
|
}, nil
|
||||||
|
default:
|
||||||
|
return &model.Link{
|
||||||
|
URL: f.BaseURL + "=dv",
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return &model.Link{}, nil
|
return &model.Link{}, nil
|
||||||
}
|
}
|
||||||
|
@ -4,17 +4,12 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
|
||||||
"net/url"
|
|
||||||
"path"
|
|
||||||
"strconv"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
@ -24,6 +19,11 @@ import (
|
|||||||
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
|
||||||
"github.com/rclone/rclone/lib/readers"
|
"github.com/rclone/rclone/lib/readers"
|
||||||
"github.com/zzzhr1990/go-common-entity/userfile"
|
"github.com/zzzhr1990/go-common-entity/userfile"
|
||||||
|
"io"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type HalalCloud struct {
|
type HalalCloud struct {
|
||||||
@ -251,6 +251,7 @@ func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.Lin
|
|||||||
|
|
||||||
size := result.FileSize
|
size := result.FileSize
|
||||||
chunks := getChunkSizes(result.Sizes)
|
chunks := getChunkSizes(result.Sizes)
|
||||||
|
var finalClosers utils.Closers
|
||||||
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
||||||
length := httpRange.Length
|
length := httpRange.Length
|
||||||
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
||||||
@ -268,6 +269,7 @@ func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.Lin
|
|||||||
sha: result.Sha1,
|
sha: result.Sha1,
|
||||||
shaTemp: sha1.New(),
|
shaTemp: sha1.New(),
|
||||||
}
|
}
|
||||||
|
finalClosers.Add(oo)
|
||||||
|
|
||||||
return readers.NewLimitedReadCloser(oo, length), nil
|
return readers.NewLimitedReadCloser(oo, length), nil
|
||||||
}
|
}
|
||||||
@ -279,7 +281,7 @@ func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.Lin
|
|||||||
duration = time.Until(time.Now().Add(time.Hour))
|
duration = time.Until(time.Now().Add(time.Hour))
|
||||||
}
|
}
|
||||||
|
|
||||||
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader}
|
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: finalClosers}
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
RangeReadCloser: resultRangeReadCloser,
|
RangeReadCloser: resultRangeReadCloser,
|
||||||
Expiration: &duration,
|
Expiration: &duration,
|
||||||
|
@ -66,13 +66,12 @@ func (d *ILanZou) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
offset := 1
|
|
||||||
var res []ListItem
|
var res []ListItem
|
||||||
for {
|
for {
|
||||||
var resp ListResp
|
var resp ListResp
|
||||||
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||||
params := []string{
|
params := []string{
|
||||||
"offset=" + strconv.Itoa(offset),
|
"offset=1",
|
||||||
"limit=60",
|
"limit=60",
|
||||||
"folderId=" + dir.GetID(),
|
"folderId=" + dir.GetID(),
|
||||||
"type=0",
|
"type=0",
|
||||||
@ -84,9 +83,7 @@ func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res = append(res, resp.List...)
|
res = append(res, resp.List...)
|
||||||
if resp.Offset < resp.TotalPage {
|
if resp.TotalPage <= resp.Offset {
|
||||||
offset++
|
|
||||||
} else {
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -289,7 +286,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": "",
|
"fileId": "",
|
||||||
"fileName": stream.GetName(),
|
"fileName": stream.GetName(),
|
||||||
"fileSize": stream.GetSize()/1024 + 1,
|
"fileSize": stream.GetSize() / 1024,
|
||||||
"folderId": dstDir.GetID(),
|
"folderId": dstDir.GetID(),
|
||||||
"md5": etag,
|
"md5": etag,
|
||||||
"type": 1,
|
"type": 1,
|
||||||
|
@ -69,10 +69,9 @@ func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, pr
|
|||||||
|
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Origin": d.conf.site,
|
"Origin": d.conf.site,
|
||||||
"Referer": d.conf.site + "/",
|
"Referer": d.conf.site + "/",
|
||||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
||||||
"Accept-Encoding": "gzip, deflate, br, zstd",
|
|
||||||
})
|
})
|
||||||
|
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
|
@ -120,9 +120,9 @@ var findKVReg = regexp.MustCompile(`'(.+?)':('?([^' },]*)'?)`) // 拆分kv
|
|||||||
func findJSVarFunc(key, data string) string {
|
func findJSVarFunc(key, data string) string {
|
||||||
var values []string
|
var values []string
|
||||||
if key != "sasign" {
|
if key != "sasign" {
|
||||||
values = regexp.MustCompile(`var ` + key + `\s*=\s*['"]?(.+?)['"]?;`).FindStringSubmatch(data)
|
values = regexp.MustCompile(`var ` + key + ` = '(.+?)';`).FindStringSubmatch(data)
|
||||||
} else {
|
} else {
|
||||||
matches := regexp.MustCompile(`var `+key+`\s*=\s*['"]?(.+?)['"]?;`).FindAllStringSubmatch(data, -1)
|
matches := regexp.MustCompile(`var `+key+` = '(.+?)';`).FindAllStringSubmatch(data, -1)
|
||||||
if len(matches) == 3 {
|
if len(matches) == 3 {
|
||||||
values = matches[1]
|
values = matches[1]
|
||||||
} else {
|
} else {
|
||||||
|
@ -264,9 +264,6 @@ var findSubFolderReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/
|
|||||||
// 获取下载页面链接
|
// 获取下载页面链接
|
||||||
var findDownPageParamReg = regexp.MustCompile(`<iframe.*?src="(.+?)"`)
|
var findDownPageParamReg = regexp.MustCompile(`<iframe.*?src="(.+?)"`)
|
||||||
|
|
||||||
// 获取文件ID
|
|
||||||
var findFileIDReg = regexp.MustCompile(`'/ajaxm\.php\?file=(\d+)'`)
|
|
||||||
|
|
||||||
// 获取分享链接主界面
|
// 获取分享链接主界面
|
||||||
func (d *LanZou) getShareUrlHtml(shareID string) (string, error) {
|
func (d *LanZou) getShareUrlHtml(shareID string) (string, error) {
|
||||||
var vs string
|
var vs string
|
||||||
@ -359,16 +356,8 @@ func (d *LanZou) getFilesByShareUrl(shareID, pwd string, sharePageData string) (
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
param["p"] = pwd
|
param["p"] = pwd
|
||||||
|
|
||||||
fileIDs := findFileIDReg.FindStringSubmatch(sharePageData)
|
|
||||||
var fileID string
|
|
||||||
if len(fileIDs) > 1 {
|
|
||||||
fileID = fileIDs[1]
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("not find file id")
|
|
||||||
}
|
|
||||||
var resp FileShareInfoAndUrlResp[string]
|
var resp FileShareInfoAndUrlResp[string]
|
||||||
_, err = d.post(d.ShareUrl+"/ajaxm.php?file="+fileID, func(req *resty.Request) { req.SetFormData(param) }, &resp)
|
_, err = d.post(d.ShareUrl+"/ajaxm.php", func(req *resty.Request) { req.SetFormData(param) }, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -392,15 +381,8 @@ func (d *LanZou) getFilesByShareUrl(shareID, pwd string, sharePageData string) (
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
fileIDs := findFileIDReg.FindStringSubmatch(nextPageData)
|
|
||||||
var fileID string
|
|
||||||
if len(fileIDs) > 1 {
|
|
||||||
fileID = fileIDs[1]
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("not find file id")
|
|
||||||
}
|
|
||||||
var resp FileShareInfoAndUrlResp[int]
|
var resp FileShareInfoAndUrlResp[int]
|
||||||
_, err = d.post(d.ShareUrl+"/ajaxm.php?file="+fileID, func(req *resty.Request) { req.SetFormData(param) }, &resp)
|
_, err = d.post(d.ShareUrl+"/ajaxm.php", func(req *resty.Request) { req.SetFormData(param) }, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,6 @@ package LenovoNasShare
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
|
||||||
@ -16,8 +15,7 @@ import (
|
|||||||
type LenovoNasShare struct {
|
type LenovoNasShare struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
stoken string
|
stoken string
|
||||||
expireAt int64
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LenovoNasShare) Config() driver.Config {
|
func (d *LenovoNasShare) Config() driver.Config {
|
||||||
@ -29,9 +27,20 @@ func (d *LenovoNasShare) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *LenovoNasShare) Init(ctx context.Context) error {
|
func (d *LenovoNasShare) Init(ctx context.Context) error {
|
||||||
if err := d.getStoken(); err != nil {
|
if d.Host == "" {
|
||||||
|
d.Host = "https://siot-share.lenovo.com.cn"
|
||||||
|
}
|
||||||
|
query := map[string]string{
|
||||||
|
"code": d.ShareId,
|
||||||
|
"password": d.SharePwd,
|
||||||
|
}
|
||||||
|
resp, err := d.request(d.Host+"/oneproxy/api/share/v1/access", http.MethodGet, func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(query)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
d.stoken = utils.Json.Get(resp, "data", "stoken").ToString()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,7 +49,6 @@ func (d *LenovoNasShare) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *LenovoNasShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *LenovoNasShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
d.checkStoken() // 检查stoken是否过期
|
|
||||||
files := make([]File, 0)
|
files := make([]File, 0)
|
||||||
|
|
||||||
var resp Files
|
var resp Files
|
||||||
@ -63,33 +71,7 @@ func (d *LenovoNasShare) List(ctx context.Context, dir model.Obj, args model.Lis
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LenovoNasShare) checkStoken() { // 检查stoken是否过期
|
|
||||||
if d.expireAt < time.Now().Unix() {
|
|
||||||
d.getStoken()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *LenovoNasShare) getStoken() error { // 获取stoken
|
|
||||||
if d.Host == "" {
|
|
||||||
d.Host = "https://siot-share.lenovo.com.cn"
|
|
||||||
}
|
|
||||||
query := map[string]string{
|
|
||||||
"code": d.ShareId,
|
|
||||||
"password": d.SharePwd,
|
|
||||||
}
|
|
||||||
resp, err := d.request(d.Host+"/oneproxy/api/share/v1/access", http.MethodGet, func(req *resty.Request) {
|
|
||||||
req.SetQueryParams(query)
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
d.stoken = utils.Json.Get(resp, "data", "stoken").ToString()
|
|
||||||
d.expireAt = utils.Json.Get(resp, "data", "expires_in").ToInt64() + time.Now().Unix() - 60
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *LenovoNasShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *LenovoNasShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
d.checkStoken() // 检查stoken是否过期
|
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"code": d.ShareId,
|
"code": d.ShareId,
|
||||||
"stoken": d.stoken,
|
"stoken": d.stoken,
|
||||||
|
@ -22,7 +22,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/alist-org/times"
|
"github.com/alist-org/times"
|
||||||
cp "github.com/otiai10/copy"
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp"
|
||||||
)
|
)
|
||||||
@ -77,29 +76,7 @@ func (d *Local) Init(ctx context.Context) error {
|
|||||||
if d.thumbConcurrency == 0 {
|
if d.thumbConcurrency == 0 {
|
||||||
d.thumbTokenBucket = NewNopTokenBucket()
|
d.thumbTokenBucket = NewNopTokenBucket()
|
||||||
} else {
|
} else {
|
||||||
d.thumbTokenBucket = NewStaticTokenBucketWithMigration(d.thumbTokenBucket, d.thumbConcurrency)
|
d.thumbTokenBucket = NewStaticTokenBucket(d.thumbConcurrency)
|
||||||
}
|
|
||||||
// Check the VideoThumbPos value
|
|
||||||
if d.VideoThumbPos == "" {
|
|
||||||
d.VideoThumbPos = "20%"
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(d.VideoThumbPos, "%") {
|
|
||||||
percentage := strings.TrimSuffix(d.VideoThumbPos, "%")
|
|
||||||
val, err := strconv.ParseFloat(percentage, 64)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("invalid video_thumb_pos value: %s, err: %s", d.VideoThumbPos, err)
|
|
||||||
}
|
|
||||||
if val < 0 || val > 100 {
|
|
||||||
return fmt.Errorf("invalid video_thumb_pos value: %s, the precentage must be a number between 0 and 100", d.VideoThumbPos)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
val, err := strconv.ParseFloat(d.VideoThumbPos, 64)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("invalid video_thumb_pos value: %s, err: %s", d.VideoThumbPos, err)
|
|
||||||
}
|
|
||||||
if val < 0 {
|
|
||||||
return fmt.Errorf("invalid video_thumb_pos value: %s, the time must be a positive number", d.VideoThumbPos)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -123,17 +100,17 @@ func (d *Local) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
if !d.ShowHidden && strings.HasPrefix(f.Name(), ".") {
|
if !d.ShowHidden && strings.HasPrefix(f.Name(), ".") {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
file := d.FileInfoToObj(ctx, f, args.ReqPath, fullPath)
|
file := d.FileInfoToObj(f, args.ReqPath, fullPath)
|
||||||
files = append(files, file)
|
files = append(files, file)
|
||||||
}
|
}
|
||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
func (d *Local) FileInfoToObj(ctx context.Context, f fs.FileInfo, reqPath string, fullPath string) model.Obj {
|
func (d *Local) FileInfoToObj(f fs.FileInfo, reqPath string, fullPath string) model.Obj {
|
||||||
thumb := ""
|
thumb := ""
|
||||||
if d.Thumbnail {
|
if d.Thumbnail {
|
||||||
typeName := utils.GetFileType(f.Name())
|
typeName := utils.GetFileType(f.Name())
|
||||||
if typeName == conf.IMAGE || typeName == conf.VIDEO {
|
if typeName == conf.IMAGE || typeName == conf.VIDEO {
|
||||||
thumb = common.GetApiUrl(common.GetHttpReq(ctx)) + stdpath.Join("/d", reqPath, f.Name())
|
thumb = common.GetApiUrl(nil) + stdpath.Join("/d", reqPath, f.Name())
|
||||||
thumb = utils.EncodePath(thumb, true)
|
thumb = utils.EncodePath(thumb, true)
|
||||||
thumb += "?type=thumb&sign=" + sign.Sign(stdpath.Join(reqPath, f.Name()))
|
thumb += "?type=thumb&sign=" + sign.Sign(stdpath.Join(reqPath, f.Name()))
|
||||||
}
|
}
|
||||||
@ -171,7 +148,7 @@ func (d *Local) GetMeta(ctx context.Context, path string) (model.Obj, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
file := d.FileInfoToObj(ctx, f, path, path)
|
file := d.FileInfoToObj(f, path, path)
|
||||||
//h := "123123"
|
//h := "123123"
|
||||||
//if s, ok := f.(model.SetHash); ok && file.GetHash() == ("","") {
|
//if s, ok := f.(model.SetHash); ok && file.GetHash() == ("","") {
|
||||||
// s.SetHash(h,"SHA1")
|
// s.SetHash(h,"SHA1")
|
||||||
@ -264,22 +241,11 @@ func (d *Local) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
if utils.IsSubPath(srcPath, dstPath) {
|
if utils.IsSubPath(srcPath, dstPath) {
|
||||||
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
||||||
}
|
}
|
||||||
if err := os.Rename(srcPath, dstPath); err != nil && strings.Contains(err.Error(), "invalid cross-device link") {
|
err := os.Rename(srcPath, dstPath)
|
||||||
// Handle cross-device file move in local driver
|
if err != nil {
|
||||||
if err = d.Copy(ctx, srcObj, dstDir); err != nil {
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
// Directly remove file without check recycle bin if successfully copied
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
err = os.RemoveAll(srcObj.GetPath())
|
|
||||||
} else {
|
|
||||||
err = os.Remove(srcObj.GetPath())
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
@ -292,18 +258,22 @@ func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) er
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Copy(_ context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
srcPath := srcObj.GetPath()
|
srcPath := srcObj.GetPath()
|
||||||
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
|
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
|
||||||
if utils.IsSubPath(srcPath, dstPath) {
|
if utils.IsSubPath(srcPath, dstPath) {
|
||||||
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
||||||
}
|
}
|
||||||
// Copy using otiai10/copy to perform more secure & efficient copy
|
var err error
|
||||||
return cp.Copy(srcPath, dstPath, cp.Options{
|
if srcObj.IsDir() {
|
||||||
Sync: true, // Sync file to disk after copy, may have performance penalty in filesystem such as ZFS
|
err = utils.CopyDir(srcPath, dstPath)
|
||||||
PreserveTimes: true,
|
} else {
|
||||||
PreserveOwner: true,
|
err = utils.CopyFile(srcPath, dstPath)
|
||||||
})
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
@ -10,7 +10,6 @@ type Addition struct {
|
|||||||
Thumbnail bool `json:"thumbnail" required:"true" help:"enable thumbnail"`
|
Thumbnail bool `json:"thumbnail" required:"true" help:"enable thumbnail"`
|
||||||
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
||||||
ThumbConcurrency string `json:"thumb_concurrency" default:"16" required:"false" help:"Number of concurrent thumbnail generation goroutines. This controls how many thumbnails can be generated in parallel."`
|
ThumbConcurrency string `json:"thumb_concurrency" default:"16" required:"false" help:"Number of concurrent thumbnail generation goroutines. This controls how many thumbnails can be generated in parallel."`
|
||||||
VideoThumbPos string `json:"video_thumb_pos" default:"20%" required:"false" help:"The position of the video thumbnail. If the value is a number (integer ot floating point), it represents the time in seconds. If the value ends with '%', it represents the percentage of the video duration."`
|
|
||||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||||
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
||||||
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
|
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
|
||||||
|
@ -23,38 +23,6 @@ func NewStaticTokenBucket(size int) StaticTokenBucket {
|
|||||||
return StaticTokenBucket{bucket: bucket}
|
return StaticTokenBucket{bucket: bucket}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewStaticTokenBucketWithMigration(oldBucket TokenBucket, size int) StaticTokenBucket {
|
|
||||||
if oldBucket != nil {
|
|
||||||
oldStaticBucket, ok := oldBucket.(StaticTokenBucket)
|
|
||||||
if ok {
|
|
||||||
oldSize := cap(oldStaticBucket.bucket)
|
|
||||||
migrateSize := oldSize
|
|
||||||
if size < migrateSize {
|
|
||||||
migrateSize = size
|
|
||||||
}
|
|
||||||
|
|
||||||
bucket := make(chan struct{}, size)
|
|
||||||
for range size - migrateSize {
|
|
||||||
bucket <- struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if migrateSize != 0 {
|
|
||||||
go func() {
|
|
||||||
for range migrateSize {
|
|
||||||
<-oldStaticBucket.bucket
|
|
||||||
bucket <- struct{}{}
|
|
||||||
}
|
|
||||||
close(oldStaticBucket.bucket)
|
|
||||||
}()
|
|
||||||
}
|
|
||||||
return StaticTokenBucket{bucket: bucket}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return NewStaticTokenBucket(size)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Take channel maybe closed when local driver is modified.
|
|
||||||
// don't call Put method after the channel is closed.
|
|
||||||
func (b StaticTokenBucket) Take() <-chan struct{} {
|
func (b StaticTokenBucket) Take() <-chan struct{} {
|
||||||
return b.bucket
|
return b.bucket
|
||||||
}
|
}
|
||||||
@ -67,10 +35,8 @@ func (b StaticTokenBucket) Do(ctx context.Context, f func() error) error {
|
|||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
case _, ok := <-b.Take():
|
case <-b.bucket:
|
||||||
if ok {
|
defer b.Put()
|
||||||
defer b.Put()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return f()
|
return f()
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,11 @@ package local
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
@ -36,58 +34,10 @@ func isSymlinkDir(f fs.FileInfo, path string) bool {
|
|||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get the snapshot of the video
|
func GetSnapshot(videoPath string, frameNum int) (imgData *bytes.Buffer, err error) {
|
||||||
func (d *Local) GetSnapshot(videoPath string) (imgData *bytes.Buffer, err error) {
|
|
||||||
// Run ffprobe to get the video duration
|
|
||||||
jsonOutput, err := ffmpeg.Probe(videoPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// get format.duration from the json string
|
|
||||||
type probeFormat struct {
|
|
||||||
Duration string `json:"duration"`
|
|
||||||
}
|
|
||||||
type probeData struct {
|
|
||||||
Format probeFormat `json:"format"`
|
|
||||||
}
|
|
||||||
var probe probeData
|
|
||||||
err = json.Unmarshal([]byte(jsonOutput), &probe)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
totalDuration, err := strconv.ParseFloat(probe.Format.Duration, 64)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var ss string
|
|
||||||
if strings.HasSuffix(d.VideoThumbPos, "%") {
|
|
||||||
percentage, err := strconv.ParseFloat(strings.TrimSuffix(d.VideoThumbPos, "%"), 64)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
ss = fmt.Sprintf("%f", totalDuration*percentage/100)
|
|
||||||
} else {
|
|
||||||
val, err := strconv.ParseFloat(d.VideoThumbPos, 64)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// If the value is greater than the total duration, use the total duration
|
|
||||||
if val > totalDuration {
|
|
||||||
ss = fmt.Sprintf("%f", totalDuration)
|
|
||||||
} else {
|
|
||||||
ss = d.VideoThumbPos
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run ffmpeg to get the snapshot
|
|
||||||
srcBuf := bytes.NewBuffer(nil)
|
srcBuf := bytes.NewBuffer(nil)
|
||||||
// If the remaining time from the seek point to the end of the video is less
|
stream := ffmpeg.Input(videoPath).
|
||||||
// than the duration of a single frame, ffmpeg cannot extract any frames
|
Filter("select", ffmpeg.Args{fmt.Sprintf("gte(n,%d)", frameNum)}).
|
||||||
// within the specified range and will exit with an error.
|
|
||||||
// The "noaccurate_seek" option prevents this error and would also speed up
|
|
||||||
// the seek process.
|
|
||||||
stream := ffmpeg.Input(videoPath, ffmpeg.KwArgs{"ss": ss, "noaccurate_seek": ""}).
|
|
||||||
Output("pipe:", ffmpeg.KwArgs{"vframes": 1, "format": "image2", "vcodec": "mjpeg"}).
|
Output("pipe:", ffmpeg.KwArgs{"vframes": 1, "format": "image2", "vcodec": "mjpeg"}).
|
||||||
GlobalArgs("-loglevel", "error").Silent(true).
|
GlobalArgs("-loglevel", "error").Silent(true).
|
||||||
WithOutput(srcBuf, os.Stdout)
|
WithOutput(srcBuf, os.Stdout)
|
||||||
@ -127,7 +77,7 @@ func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
|||||||
}
|
}
|
||||||
var srcBuf *bytes.Buffer
|
var srcBuf *bytes.Buffer
|
||||||
if utils.GetFileType(file.GetName()) == conf.VIDEO {
|
if utils.GetFileType(file.GetName()) == conf.VIDEO {
|
||||||
videoBuf, err := d.GetSnapshot(fullPath)
|
videoBuf, err := GetSnapshot(fullPath, 10)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
@ -84,6 +84,7 @@ func (d *Mega) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
|
|||||||
//}
|
//}
|
||||||
|
|
||||||
size := file.GetSize()
|
size := file.GetSize()
|
||||||
|
var finalClosers utils.Closers
|
||||||
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
||||||
length := httpRange.Length
|
length := httpRange.Length
|
||||||
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
|
||||||
@ -102,10 +103,11 @@ func (d *Mega) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
|
|||||||
d: down,
|
d: down,
|
||||||
skip: httpRange.Start,
|
skip: httpRange.Start,
|
||||||
}
|
}
|
||||||
|
finalClosers.Add(oo)
|
||||||
|
|
||||||
return readers.NewLimitedReadCloser(oo, length), nil
|
return readers.NewLimitedReadCloser(oo, length), nil
|
||||||
}
|
}
|
||||||
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader}
|
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: finalClosers}
|
||||||
resultLink := &model.Link{
|
resultLink := &model.Link{
|
||||||
RangeReadCloser: resultRangeReadCloser,
|
RangeReadCloser: resultRangeReadCloser,
|
||||||
}
|
}
|
||||||
|
@ -1,74 +0,0 @@
|
|||||||
package misskey
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Misskey struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Init(ctx context.Context) error {
|
|
||||||
d.Endpoint = strings.TrimSuffix(d.Endpoint, "/")
|
|
||||||
if d.Endpoint == "" || d.AccessToken == "" {
|
|
||||||
return errs.EmptyToken
|
|
||||||
} else {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
return d.list(dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
return d.link(file)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
return d.makeDir(parentDir, dirName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return d.move(srcObj, dstDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
return d.rename(srcObj, newName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
return d.copy(srcObj, dstDir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
return d.remove(obj)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
return d.put(dstDir, stream, up)
|
|
||||||
}
|
|
||||||
|
|
||||||
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
|
||||||
// return nil, errs.NotSupport
|
|
||||||
//}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Misskey)(nil)
|
|
@ -1,35 +0,0 @@
|
|||||||
package misskey
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
// Usually one of two
|
|
||||||
driver.RootPath
|
|
||||||
// define other
|
|
||||||
// Field string `json:"field" type:"select" required:"true" options:"a,b,c" default:"a"`
|
|
||||||
Endpoint string `json:"endpoint" required:"true" default:"https://misskey.io"`
|
|
||||||
AccessToken string `json:"access_token" required:"true"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "Misskey",
|
|
||||||
LocalSort: false,
|
|
||||||
OnlyLocal: false,
|
|
||||||
OnlyProxy: false,
|
|
||||||
NoCache: false,
|
|
||||||
NoUpload: false,
|
|
||||||
NeedMs: false,
|
|
||||||
DefaultRoot: "/",
|
|
||||||
CheckStatus: false,
|
|
||||||
Alert: "",
|
|
||||||
NoOverwriteUpload: false,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &Misskey{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
package misskey
|
|
||||||
|
|
||||||
type Resp struct {
|
|
||||||
Code int
|
|
||||||
Raw []byte
|
|
||||||
}
|
|
||||||
|
|
||||||
type Properties struct {
|
|
||||||
Width int `json:"width"`
|
|
||||||
Height int `json:"height"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type MFile struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
CreatedAt string `json:"createdAt"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
MD5 string `json:"md5"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
IsSensitive bool `json:"isSensitive"`
|
|
||||||
Blurhash string `json:"blurhash"`
|
|
||||||
Properties Properties `json:"properties"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
ThumbnailURL string `json:"thumbnailUrl"`
|
|
||||||
Comment *string `json:"comment"`
|
|
||||||
FolderID *string `json:"folderId"`
|
|
||||||
Folder MFolder `json:"folder"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type MFolder struct {
|
|
||||||
ID string `json:"id"`
|
|
||||||
CreatedAt string `json:"createdAt"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
ParentID *string `json:"parentId"`
|
|
||||||
}
|
|
@ -1,256 +0,0 @@
|
|||||||
package misskey
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Base layer methods
|
|
||||||
|
|
||||||
func (d *Misskey) request(path, method string, callback base.ReqCallback, resp interface{}) error {
|
|
||||||
url := d.Endpoint + "/api/drive" + path
|
|
||||||
req := base.RestyClient.R()
|
|
||||||
|
|
||||||
req.SetAuthToken(d.AccessToken).SetHeader("Content-Type", "application/json")
|
|
||||||
|
|
||||||
if callback != nil {
|
|
||||||
callback(req)
|
|
||||||
} else {
|
|
||||||
req.SetBody("{}")
|
|
||||||
}
|
|
||||||
|
|
||||||
req.SetResult(resp)
|
|
||||||
|
|
||||||
// 启用调试模式
|
|
||||||
req.EnableTrace()
|
|
||||||
|
|
||||||
response, err := req.Execute(method, url)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if !response.IsSuccess() {
|
|
||||||
return errors.New(response.String())
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) getThumb(ctx context.Context, obj model.Obj) (io.Reader, error) {
|
|
||||||
// TODO return the thumb of obj, optional
|
|
||||||
return nil, errs.NotImplement
|
|
||||||
}
|
|
||||||
|
|
||||||
func setBody(body interface{}) base.ReqCallback {
|
|
||||||
return func(req *resty.Request) {
|
|
||||||
req.SetBody(body)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func handleFolderId(dir model.Obj) interface{} {
|
|
||||||
if dir.GetID() == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return dir.GetID()
|
|
||||||
}
|
|
||||||
|
|
||||||
// API layer methods
|
|
||||||
|
|
||||||
func (d *Misskey) getFiles(dir model.Obj) ([]model.Obj, error) {
|
|
||||||
var files []MFile
|
|
||||||
var body map[string]string
|
|
||||||
if dir.GetPath() != "/" {
|
|
||||||
body = map[string]string{"folderId": dir.GetID()}
|
|
||||||
} else {
|
|
||||||
body = map[string]string{}
|
|
||||||
}
|
|
||||||
err := d.request("/files", "POST", setBody(body), &files)
|
|
||||||
if err != nil {
|
|
||||||
return []model.Obj{}, err
|
|
||||||
}
|
|
||||||
return utils.SliceConvert(files, func(src MFile) (model.Obj, error) {
|
|
||||||
return mFile2Object(src), nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) getFolders(dir model.Obj) ([]model.Obj, error) {
|
|
||||||
var folders []MFolder
|
|
||||||
var body map[string]string
|
|
||||||
if dir.GetPath() != "/" {
|
|
||||||
body = map[string]string{"folderId": dir.GetID()}
|
|
||||||
} else {
|
|
||||||
body = map[string]string{}
|
|
||||||
}
|
|
||||||
err := d.request("/folders", "POST", setBody(body), &folders)
|
|
||||||
if err != nil {
|
|
||||||
return []model.Obj{}, err
|
|
||||||
}
|
|
||||||
return utils.SliceConvert(folders, func(src MFolder) (model.Obj, error) {
|
|
||||||
return mFolder2Object(src), nil
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) list(dir model.Obj) ([]model.Obj, error) {
|
|
||||||
files, _ := d.getFiles(dir)
|
|
||||||
folders, _ := d.getFolders(dir)
|
|
||||||
return append(files, folders...), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) link(file model.Obj) (*model.Link, error) {
|
|
||||||
var mFile MFile
|
|
||||||
err := d.request("/files/show", "POST", setBody(map[string]string{"fileId": file.GetID()}), &mFile)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &model.Link{
|
|
||||||
URL: mFile.URL,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) makeDir(parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
var folder MFolder
|
|
||||||
err := d.request("/folders/create", "POST", setBody(map[string]interface{}{"parentId": handleFolderId(parentDir), "name": dirName}), &folder)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return mFolder2Object(folder), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) move(srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
var folder MFolder
|
|
||||||
err := d.request("/folders/update", "POST", setBody(map[string]interface{}{"folderId": srcObj.GetID(), "parentId": handleFolderId(dstDir)}), &folder)
|
|
||||||
return mFolder2Object(folder), err
|
|
||||||
} else {
|
|
||||||
var file MFile
|
|
||||||
err := d.request("/files/update", "POST", setBody(map[string]interface{}{"fileId": srcObj.GetID(), "folderId": handleFolderId(dstDir)}), &file)
|
|
||||||
return mFile2Object(file), err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) rename(srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
var folder MFolder
|
|
||||||
err := d.request("/folders/update", "POST", setBody(map[string]string{"folderId": srcObj.GetID(), "name": newName}), &folder)
|
|
||||||
return mFolder2Object(folder), err
|
|
||||||
} else {
|
|
||||||
var file MFile
|
|
||||||
err := d.request("/files/update", "POST", setBody(map[string]string{"fileId": srcObj.GetID(), "name": newName}), &file)
|
|
||||||
return mFile2Object(file), err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) copy(srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
if srcObj.IsDir() {
|
|
||||||
folder, err := d.makeDir(dstDir, srcObj.GetName())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
list, err := d.list(srcObj)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
for _, obj := range list {
|
|
||||||
_, err := d.copy(obj, folder)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return folder, nil
|
|
||||||
} else {
|
|
||||||
var file MFile
|
|
||||||
url, err := d.link(srcObj)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
err = d.request("/files/upload-from-url", "POST", setBody(map[string]interface{}{"url": url.URL, "folderId": handleFolderId(dstDir)}), &file)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return mFile2Object(file), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) remove(obj model.Obj) error {
|
|
||||||
if obj.IsDir() {
|
|
||||||
err := d.request("/folders/delete", "POST", setBody(map[string]string{"folderId": obj.GetID()}), nil)
|
|
||||||
return err
|
|
||||||
} else {
|
|
||||||
err := d.request("/files/delete", "POST", setBody(map[string]string{"fileId": obj.GetID()}), nil)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Misskey) put(dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
var file MFile
|
|
||||||
|
|
||||||
fileContent, err := io.ReadAll(stream)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
req := base.RestyClient.R().
|
|
||||||
SetFileReader("file", stream.GetName(), io.NopCloser(bytes.NewReader(fileContent))).
|
|
||||||
SetFormData(map[string]string{
|
|
||||||
"folderId": handleFolderId(dstDir).(string),
|
|
||||||
"name": stream.GetName(),
|
|
||||||
"comment": "",
|
|
||||||
"isSensitive": "false",
|
|
||||||
"force": "false",
|
|
||||||
}).
|
|
||||||
SetResult(&file).SetAuthToken(d.AccessToken)
|
|
||||||
|
|
||||||
resp, err := req.Post(d.Endpoint + "/api/drive/files/create")
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if !resp.IsSuccess() {
|
|
||||||
return nil, errors.New(resp.String())
|
|
||||||
}
|
|
||||||
|
|
||||||
return mFile2Object(file), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func mFile2Object(file MFile) *model.ObjThumbURL {
|
|
||||||
ctime, err := time.Parse(time.RFC3339, file.CreatedAt)
|
|
||||||
if err != nil {
|
|
||||||
ctime = time.Time{}
|
|
||||||
}
|
|
||||||
return &model.ObjThumbURL{
|
|
||||||
Object: model.Object{
|
|
||||||
ID: file.ID,
|
|
||||||
Name: file.Name,
|
|
||||||
Ctime: ctime,
|
|
||||||
IsFolder: false,
|
|
||||||
Size: file.Size,
|
|
||||||
},
|
|
||||||
Thumbnail: model.Thumbnail{
|
|
||||||
Thumbnail: file.ThumbnailURL,
|
|
||||||
},
|
|
||||||
Url: model.Url{
|
|
||||||
Url: file.URL,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func mFolder2Object(folder MFolder) *model.Object {
|
|
||||||
ctime, err := time.Parse(time.RFC3339, folder.CreatedAt)
|
|
||||||
if err != nil {
|
|
||||||
ctime = time.Time{}
|
|
||||||
}
|
|
||||||
return &model.Object{
|
|
||||||
ID: folder.ID,
|
|
||||||
Name: folder.Name,
|
|
||||||
Ctime: ctime,
|
|
||||||
IsFolder: true,
|
|
||||||
}
|
|
||||||
}
|
|
@ -64,6 +64,7 @@ func (lrc *LyricObj) getLyricLink() *model.Link {
|
|||||||
sr := io.NewSectionReader(reader, httpRange.Start, httpRange.Length)
|
sr := io.NewSectionReader(reader, httpRange.Start, httpRange.Length)
|
||||||
return io.NopCloser(sr), nil
|
return io.NopCloser(sr), nil
|
||||||
},
|
},
|
||||||
|
Closers: utils.EmptyClosers(),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,7 @@ func (u *uploader) init(stream model.FileStreamer) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
h := md5.New()
|
h := md5.New()
|
||||||
utils.CopyWithBuffer(h, stream)
|
io.Copy(h, stream)
|
||||||
u.md5 = hex.EncodeToString(h.Sum(nil))
|
u.md5 = hex.EncodeToString(h.Sum(nil))
|
||||||
_, err := u.file.Seek(0, io.SeekStart)
|
_, err := u.file.Seek(0, io.SeekStart)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -127,7 +127,7 @@ func (d *Onedrive) Request(url string, method string, callback base.ReqCallback,
|
|||||||
|
|
||||||
func (d *Onedrive) getFiles(path string) ([]File, error) {
|
func (d *Onedrive) getFiles(path string) ([]File, error) {
|
||||||
var res []File
|
var res []File
|
||||||
nextLink := d.GetMetaUrl(false, path) + "/children?$top=1000&$expand=thumbnails($select=medium)&$select=id,name,size,fileSystemInfo,content.downloadUrl,file,parentReference"
|
nextLink := d.GetMetaUrl(false, path) + "/children?$top=5000&$expand=thumbnails($select=medium)&$select=id,name,size,fileSystemInfo,content.downloadUrl,file,parentReference"
|
||||||
for nextLink != "" {
|
for nextLink != "" {
|
||||||
var files Files
|
var files Files
|
||||||
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
||||||
|
@ -118,7 +118,7 @@ func (d *OnedriveAPP) Request(url string, method string, callback base.ReqCallba
|
|||||||
|
|
||||||
func (d *OnedriveAPP) getFiles(path string) ([]File, error) {
|
func (d *OnedriveAPP) getFiles(path string) ([]File, error) {
|
||||||
var res []File
|
var res []File
|
||||||
nextLink := d.GetMetaUrl(false, path) + "/children?$top=1000&$expand=thumbnails($select=medium)&$select=id,name,size,lastModifiedDateTime,content.downloadUrl,file,parentReference"
|
nextLink := d.GetMetaUrl(false, path) + "/children?$top=5000&$expand=thumbnails($select=medium)&$select=id,name,size,lastModifiedDateTime,content.downloadUrl,file,parentReference"
|
||||||
for nextLink != "" {
|
for nextLink != "" {
|
||||||
var files Files
|
var files Files
|
||||||
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
||||||
|
@ -12,7 +12,9 @@ import (
|
|||||||
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
@ -23,6 +25,7 @@ type PikPak struct {
|
|||||||
*Common
|
*Common
|
||||||
RefreshToken string
|
RefreshToken string
|
||||||
AccessToken string
|
AccessToken string
|
||||||
|
oauth2Token oauth2.TokenSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Config() driver.Config {
|
func (d *PikPak) Config() driver.Config {
|
||||||
@ -46,6 +49,7 @@ func (d *PikPak) Init(ctx context.Context) (err error) {
|
|||||||
d.Common.CaptchaToken = token
|
d.Common.CaptchaToken = token
|
||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
},
|
},
|
||||||
|
LowLatencyAddr: "",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -82,20 +86,45 @@ func (d *PikPak) Init(ctx context.Context) (err error) {
|
|||||||
d.Addition.DeviceID = d.Common.DeviceID
|
d.Addition.DeviceID = d.Common.DeviceID
|
||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
}
|
}
|
||||||
|
// 初始化 oauth2Config
|
||||||
|
oauth2Config := &oauth2.Config{
|
||||||
|
ClientID: d.ClientID,
|
||||||
|
ClientSecret: d.ClientSecret,
|
||||||
|
Endpoint: oauth2.Endpoint{
|
||||||
|
AuthURL: "https://user.mypikpak.com/v1/auth/signin",
|
||||||
|
TokenURL: "https://user.mypikpak.com/v1/auth/token",
|
||||||
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
// 如果已经有RefreshToken,直接获取AccessToken
|
// 如果已经有RefreshToken,直接获取AccessToken
|
||||||
if d.Addition.RefreshToken != "" {
|
if d.Addition.RefreshToken != "" {
|
||||||
if err = d.refreshToken(d.Addition.RefreshToken); err != nil {
|
if d.RefreshTokenMethod == "oauth2" {
|
||||||
return err
|
// 使用 oauth2 刷新令牌
|
||||||
|
// 初始化 oauth2Token
|
||||||
|
d.initializeOAuth2Token(ctx, oauth2Config, d.Addition.RefreshToken)
|
||||||
|
if err := d.refreshTokenByOAuth2(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err := d.refreshToken(d.Addition.RefreshToken); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
// 如果没有填写RefreshToken,尝试登录 获取 refreshToken
|
// 如果没有填写RefreshToken,尝试登录 获取 refreshToken
|
||||||
if err = d.login(); err != nil {
|
if err := d.login(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
if d.RefreshTokenMethod == "oauth2" {
|
||||||
|
d.initializeOAuth2Token(ctx, oauth2Config, d.RefreshToken)
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 获取CaptchaToken
|
// 获取CaptchaToken
|
||||||
err = d.RefreshCaptchaTokenAtLogin(GetAction(http.MethodGet, "https://api-drive.mypikpak.net/drive/v1/files"), d.Common.GetUserID())
|
err = d.RefreshCaptchaTokenAtLogin(GetAction(http.MethodGet, "https://api-drive.mypikpak.com/drive/v1/files"), d.Common.GetUserID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -109,6 +138,14 @@ func (d *PikPak) Init(ctx context.Context) (err error) {
|
|||||||
d.Addition.RefreshToken = d.RefreshToken
|
d.Addition.RefreshToken = d.RefreshToken
|
||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
|
|
||||||
|
if d.UseLowLatencyAddress && d.Addition.CustomLowLatencyAddress != "" {
|
||||||
|
d.Common.LowLatencyAddr = d.Addition.CustomLowLatencyAddress
|
||||||
|
} else if d.UseLowLatencyAddress {
|
||||||
|
d.Common.LowLatencyAddr = findLowestLatencyAddress(DlAddr)
|
||||||
|
d.Addition.CustomLowLatencyAddress = d.Common.LowLatencyAddr
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,7 +174,7 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
if !d.DisableMediaLink {
|
if !d.DisableMediaLink {
|
||||||
queryParams["usage"] = "CACHE"
|
queryParams["usage"] = "CACHE"
|
||||||
}
|
}
|
||||||
_, err := d.request(fmt.Sprintf("https://api-drive.mypikpak.net/drive/v1/files/%s", file.GetID()),
|
_, err := d.request(fmt.Sprintf("https://api-drive.mypikpak.com/drive/v1/files/%s", file.GetID()),
|
||||||
http.MethodGet, func(req *resty.Request) {
|
http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(queryParams)
|
req.SetQueryParams(queryParams)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
@ -151,13 +188,19 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
url = resp.Medias[0].Link.Url
|
url = resp.Medias[0].Link.Url
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.UseLowLatencyAddress && d.Common.LowLatencyAddr != "" {
|
||||||
|
// 替换为加速链接
|
||||||
|
re := regexp.MustCompile(`https://[^/]+/download/`)
|
||||||
|
url = re.ReplaceAllString(url, "https://"+d.Common.LowLatencyAddr+"/download/")
|
||||||
|
}
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: url,
|
URL: url,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"kind": "drive#folder",
|
"kind": "drive#folder",
|
||||||
"parent_id": parentDir.GetID(),
|
"parent_id": parentDir.GetID(),
|
||||||
@ -168,7 +211,7 @@ func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchMove", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchMove", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{srcObj.GetID()},
|
"ids": []string{srcObj.GetID()},
|
||||||
"to": base.Json{
|
"to": base.Json{
|
||||||
@ -180,7 +223,7 @@ func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files/"+srcObj.GetID(), http.MethodPatch, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files/"+srcObj.GetID(), http.MethodPatch, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"name": newName,
|
"name": newName,
|
||||||
})
|
})
|
||||||
@ -189,7 +232,7 @@ func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchCopy", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchCopy", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{srcObj.GetID()},
|
"ids": []string{srcObj.GetID()},
|
||||||
"to": base.Json{
|
"to": base.Json{
|
||||||
@ -201,7 +244,7 @@ func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchTrash", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchTrash", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{obj.GetID()},
|
"ids": []string{obj.GetID()},
|
||||||
})
|
})
|
||||||
@ -225,7 +268,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
|
|
||||||
var resp UploadTaskData
|
var resp UploadTaskData
|
||||||
res, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"kind": "drive#file",
|
"kind": "drive#file",
|
||||||
"name": stream.GetName(),
|
"name": stream.GetName(),
|
||||||
@ -249,9 +292,9 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
|
|
||||||
params := resp.Resumable.Params
|
params := resp.Resumable.Params
|
||||||
//endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
//endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
||||||
// web 端上传 返回的endpoint 为 `mypikpak.net` | android 端上传 返回的endpoint 为 `vip-lixian-07.mypikpak.net`·
|
// web 端上传 返回的endpoint 为 `mypikpak.com` | android 端上传 返回的endpoint 为 `vip-lixian-07.mypikpak.com`·
|
||||||
if d.Addition.Platform == "android" {
|
if d.Addition.Platform == "android" {
|
||||||
params.Endpoint = "mypikpak.net"
|
params.Endpoint = "mypikpak.com"
|
||||||
}
|
}
|
||||||
|
|
||||||
if stream.GetSize() <= 10*utils.MB { // 文件大小 小于10MB,改用普通模式上传
|
if stream.GetSize() <= 10*utils.MB { // 文件大小 小于10MB,改用普通模式上传
|
||||||
@ -275,7 +318,7 @@ func (d *PikPak) OfflineDownload(ctx context.Context, fileUrl string, parentDir
|
|||||||
}
|
}
|
||||||
|
|
||||||
var resp OfflineDownloadResp
|
var resp OfflineDownloadResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(requestBody)
|
req.SetBody(requestBody)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -293,7 +336,7 @@ PHASE_TYPE_RUNNING, PHASE_TYPE_ERROR, PHASE_TYPE_COMPLETE, PHASE_TYPE_PENDING
|
|||||||
*/
|
*/
|
||||||
func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []string) ([]OfflineTask, error) {
|
func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []string) ([]OfflineTask, error) {
|
||||||
res := make([]OfflineTask, 0)
|
res := make([]OfflineTask, 0)
|
||||||
url := "https://api-drive.mypikpak.net/drive/v1/tasks"
|
url := "https://api-drive.mypikpak.com/drive/v1/tasks"
|
||||||
|
|
||||||
if len(phase) == 0 {
|
if len(phase) == 0 {
|
||||||
phase = []string{"PHASE_TYPE_RUNNING", "PHASE_TYPE_ERROR", "PHASE_TYPE_COMPLETE", "PHASE_TYPE_PENDING"}
|
phase = []string{"PHASE_TYPE_RUNNING", "PHASE_TYPE_ERROR", "PHASE_TYPE_COMPLETE", "PHASE_TYPE_PENDING"}
|
||||||
@ -334,7 +377,7 @@ func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) DeleteOfflineTasks(ctx context.Context, taskIDs []string, deleteFiles bool) error {
|
func (d *PikPak) DeleteOfflineTasks(ctx context.Context, taskIDs []string, deleteFiles bool) error {
|
||||||
url := "https://api-drive.mypikpak.net/drive/v1/tasks"
|
url := "https://api-drive.mypikpak.com/drive/v1/tasks"
|
||||||
params := map[string]string{
|
params := map[string]string{
|
||||||
"task_ids": strings.Join(taskIDs, ","),
|
"task_ids": strings.Join(taskIDs, ","),
|
||||||
"delete_files": strconv.FormatBool(deleteFiles),
|
"delete_files": strconv.FormatBool(deleteFiles),
|
||||||
|
@ -7,13 +7,16 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
Platform string `json:"platform" required:"true" default:"web" type:"select" options:"android,web,pc"`
|
Platform string `json:"platform" required:"true" type:"select" options:"android,web,pc"`
|
||||||
RefreshToken string `json:"refresh_token" required:"true" default:""`
|
RefreshToken string `json:"refresh_token" required:"true" default:""`
|
||||||
CaptchaToken string `json:"captcha_token" default:""`
|
RefreshTokenMethod string `json:"refresh_token_method" required:"true" type:"select" options:"oauth2,http"`
|
||||||
DeviceID string `json:"device_id" required:"false" default:""`
|
CaptchaToken string `json:"captcha_token" default:""`
|
||||||
DisableMediaLink bool `json:"disable_media_link" default:"true"`
|
DeviceID string `json:"device_id" required:"false" default:""`
|
||||||
|
DisableMediaLink bool `json:"disable_media_link" default:"true"`
|
||||||
|
UseLowLatencyAddress bool `json:"use_low_latency_address" default:"false"`
|
||||||
|
CustomLowLatencyAddress string `json:"custom_low_latency_address" default:""`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -2,6 +2,7 @@ package pikpak
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
@ -13,6 +14,7 @@ import (
|
|||||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
@ -25,35 +27,35 @@ import (
|
|||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
var AndroidAlgorithms = []string{
|
var AndroidAlgorithms = []string{
|
||||||
"7xOq4Z8s",
|
"aDhgaSE3MsjROCmpmsWqP1sJdFJ",
|
||||||
"QE9/9+IQco",
|
"+oaVkqdd8MJuKT+uMr2AYKcd9tdWge3XPEPR2hcePUknd",
|
||||||
"WdX5J9CPLZp",
|
"u/sd2GgT2fTytRcKzGicHodhvIltMntA3xKw2SRv7S48OdnaQIS5mn",
|
||||||
"NmQ5qFAXqH3w984cYhMeC5TJR8j",
|
"2WZiae2QuqTOxBKaaqCNHCW3olu2UImelkDzBn",
|
||||||
"cc44M+l7GDhav",
|
"/vJ3upic39lgmrkX855Qx",
|
||||||
"KxGjo/wHB+Yx8Lf7kMP+/m9I+",
|
"yNc9ruCVMV7pGV7XvFeuLMOcy1",
|
||||||
"wla81BUVSmDkctHDpUT",
|
"4FPq8mT3JQ1jzcVxMVfwFftLQm33M7i",
|
||||||
"c6wMr1sm1WxiR3i8LDAm3W",
|
"xozoy5e3Ea",
|
||||||
"hRLrEQCFNYi0PFPV",
|
|
||||||
"o1J41zIraDtJPNuhBu7Ifb/q3",
|
|
||||||
"U",
|
|
||||||
"RrbZvV0CTu3gaZJ56PVKki4IeP",
|
|
||||||
"NNuRbLckJqUp1Do0YlrKCUP",
|
|
||||||
"UUwnBbipMTvInA0U0E9",
|
|
||||||
"VzGc",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var WebAlgorithms = []string{
|
var WebAlgorithms = []string{
|
||||||
"fyZ4+p77W1U4zcWBUwefAIFhFxvADWtT1wzolCxhg9q7etmGUjXr",
|
"C9qPpZLN8ucRTaTiUMWYS9cQvWOE",
|
||||||
"uSUX02HYJ1IkyLdhINEFcCf7l2",
|
"+r6CQVxjzJV6LCV",
|
||||||
"iWt97bqD/qvjIaPXB2Ja5rsBWtQtBZZmaHH2rMR41",
|
"F",
|
||||||
"3binT1s/5a1pu3fGsN",
|
"pFJRC",
|
||||||
"8YCCU+AIr7pg+yd7CkQEY16lDMwi8Rh4WNp5",
|
"9WXYIDGrwTCz2OiVlgZa90qpECPD6olt",
|
||||||
"DYS3StqnAEKdGddRP8CJrxUSFh",
|
"/750aCr4lm/Sly/c",
|
||||||
"crquW+4",
|
"RB+DT/gZCrbV",
|
||||||
"ryKqvW9B9hly+JAymXCIfag5Z",
|
"",
|
||||||
"Hr08T/NDTX1oSJfHk90c",
|
"CyLsf7hdkIRxRm215hl",
|
||||||
"i",
|
"7xHvLi2tOYP0Y92b",
|
||||||
|
"ZGTXXxu8E/MIWaEDB+Sm/",
|
||||||
|
"1UI3",
|
||||||
|
"E7fP5Pfijd+7K+t6Tg/NhuLq0eEUVChpJSkrKxpO",
|
||||||
|
"ihtqpG6FMt65+Xk+tWUH2",
|
||||||
|
"NhXXU9rg4XXdzo7u5o",
|
||||||
}
|
}
|
||||||
|
|
||||||
var PCAlgorithms = []string{
|
var PCAlgorithms = []string{
|
||||||
@ -78,13 +80,13 @@ const (
|
|||||||
const (
|
const (
|
||||||
AndroidClientID = "YNxT9w7GMdWvEOKa"
|
AndroidClientID = "YNxT9w7GMdWvEOKa"
|
||||||
AndroidClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
AndroidClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
AndroidClientVersion = "1.49.3"
|
AndroidClientVersion = "1.48.3"
|
||||||
AndroidPackageName = "com.pikcloud.pikpak"
|
AndroidPackageName = "com.pikcloud.pikpak"
|
||||||
AndroidSdkVersion = "2.0.4.204101"
|
AndroidSdkVersion = "2.0.4.204101"
|
||||||
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
||||||
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
WebClientVersion = "undefined"
|
WebClientVersion = "2.0.0"
|
||||||
WebPackageName = "drive.mypikpak.com"
|
WebPackageName = "mypikpak.com"
|
||||||
WebSdkVersion = "8.0.3"
|
WebSdkVersion = "8.0.3"
|
||||||
PCClientID = "YvtoWO6GNHiuCl7x"
|
PCClientID = "YvtoWO6GNHiuCl7x"
|
||||||
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
||||||
@ -93,13 +95,51 @@ const (
|
|||||||
PCSdkVersion = "8.0.3"
|
PCSdkVersion = "8.0.3"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var DlAddr = []string{
|
||||||
|
"dl-a10b-0621.mypikpak.com",
|
||||||
|
"dl-a10b-0622.mypikpak.com",
|
||||||
|
"dl-a10b-0623.mypikpak.com",
|
||||||
|
"dl-a10b-0624.mypikpak.com",
|
||||||
|
"dl-a10b-0625.mypikpak.com",
|
||||||
|
"dl-a10b-0858.mypikpak.com",
|
||||||
|
"dl-a10b-0859.mypikpak.com",
|
||||||
|
"dl-a10b-0860.mypikpak.com",
|
||||||
|
"dl-a10b-0861.mypikpak.com",
|
||||||
|
"dl-a10b-0862.mypikpak.com",
|
||||||
|
"dl-a10b-0863.mypikpak.com",
|
||||||
|
"dl-a10b-0864.mypikpak.com",
|
||||||
|
"dl-a10b-0865.mypikpak.com",
|
||||||
|
"dl-a10b-0866.mypikpak.com",
|
||||||
|
"dl-a10b-0867.mypikpak.com",
|
||||||
|
"dl-a10b-0868.mypikpak.com",
|
||||||
|
"dl-a10b-0869.mypikpak.com",
|
||||||
|
"dl-a10b-0870.mypikpak.com",
|
||||||
|
"dl-a10b-0871.mypikpak.com",
|
||||||
|
"dl-a10b-0872.mypikpak.com",
|
||||||
|
"dl-a10b-0873.mypikpak.com",
|
||||||
|
"dl-a10b-0874.mypikpak.com",
|
||||||
|
"dl-a10b-0875.mypikpak.com",
|
||||||
|
"dl-a10b-0876.mypikpak.com",
|
||||||
|
"dl-a10b-0877.mypikpak.com",
|
||||||
|
"dl-a10b-0878.mypikpak.com",
|
||||||
|
"dl-a10b-0879.mypikpak.com",
|
||||||
|
"dl-a10b-0880.mypikpak.com",
|
||||||
|
"dl-a10b-0881.mypikpak.com",
|
||||||
|
"dl-a10b-0882.mypikpak.com",
|
||||||
|
"dl-a10b-0883.mypikpak.com",
|
||||||
|
"dl-a10b-0884.mypikpak.com",
|
||||||
|
"dl-a10b-0885.mypikpak.com",
|
||||||
|
"dl-a10b-0886.mypikpak.com",
|
||||||
|
"dl-a10b-0887.mypikpak.com",
|
||||||
|
}
|
||||||
|
|
||||||
func (d *PikPak) login() error {
|
func (d *PikPak) login() error {
|
||||||
// 检查用户名和密码是否为空
|
// 检查用户名和密码是否为空
|
||||||
if d.Addition.Username == "" || d.Addition.Password == "" {
|
if d.Addition.Username == "" || d.Addition.Password == "" {
|
||||||
return errors.New("username or password is empty")
|
return errors.New("username or password is empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
url := "https://user.mypikpak.net/v1/auth/signin"
|
url := "https://user.mypikpak.com/v1/auth/signin"
|
||||||
// 使用 用户填写的 CaptchaToken —————— (验证后的captcha_token)
|
// 使用 用户填写的 CaptchaToken —————— (验证后的captcha_token)
|
||||||
if d.GetCaptchaToken() == "" {
|
if d.GetCaptchaToken() == "" {
|
||||||
if err := d.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), d.Username); err != nil {
|
if err := d.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), d.Username); err != nil {
|
||||||
@ -129,7 +169,7 @@ func (d *PikPak) login() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) refreshToken(refreshToken string) error {
|
func (d *PikPak) refreshToken(refreshToken string) error {
|
||||||
url := "https://user.mypikpak.net/v1/auth/token"
|
url := "https://user.mypikpak.com/v1/auth/token"
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
res, err := base.RestyClient.SetRetryCount(1).R().SetError(&e).
|
res, err := base.RestyClient.SetRetryCount(1).R().SetError(&e).
|
||||||
SetHeader("user-agent", "").SetBody(base.Json{
|
SetHeader("user-agent", "").SetBody(base.Json{
|
||||||
@ -167,6 +207,30 @@ func (d *PikPak) refreshToken(refreshToken string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *PikPak) initializeOAuth2Token(ctx context.Context, oauth2Config *oauth2.Config, refreshToken string) {
|
||||||
|
d.oauth2Token = oauth2.ReuseTokenSource(nil, utils.TokenSource(func() (*oauth2.Token, error) {
|
||||||
|
return oauth2Config.TokenSource(ctx, &oauth2.Token{
|
||||||
|
RefreshToken: refreshToken,
|
||||||
|
}).Token()
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *PikPak) refreshTokenByOAuth2() error {
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.Status = "work"
|
||||||
|
d.RefreshToken = token.RefreshToken
|
||||||
|
d.AccessToken = token.AccessToken
|
||||||
|
// 获取用户ID
|
||||||
|
userID := token.Extra("sub").(string)
|
||||||
|
d.Common.SetUserID(userID)
|
||||||
|
d.Addition.RefreshToken = d.RefreshToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (d *PikPak) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *PikPak) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
@ -175,7 +239,14 @@ func (d *PikPak) request(url string, method string, callback base.ReqCallback, r
|
|||||||
"X-Device-ID": d.GetDeviceID(),
|
"X-Device-ID": d.GetDeviceID(),
|
||||||
"X-Captcha-Token": d.GetCaptchaToken(),
|
"X-Captcha-Token": d.GetCaptchaToken(),
|
||||||
})
|
})
|
||||||
if d.AccessToken != "" {
|
if d.RefreshTokenMethod == "oauth2" && d.oauth2Token != nil {
|
||||||
|
// 使用oauth2 获取 access_token
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
||||||
|
} else if d.AccessToken != "" {
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -197,9 +268,16 @@ func (d *PikPak) request(url string, method string, callback base.ReqCallback, r
|
|||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
case 4122, 4121, 16:
|
case 4122, 4121, 16:
|
||||||
// access_token 过期
|
// access_token 过期
|
||||||
if err1 := d.refreshToken(d.RefreshToken); err1 != nil {
|
if d.RefreshTokenMethod == "oauth2" {
|
||||||
return nil, err1
|
if err1 := d.refreshTokenByOAuth2(); err1 != nil {
|
||||||
|
return nil, err1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if err1 := d.refreshToken(d.RefreshToken); err1 != nil {
|
||||||
|
return nil, err1
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return d.request(url, method, callback, resp)
|
return d.request(url, method, callback, resp)
|
||||||
case 9: // 验证码token过期
|
case 9: // 验证码token过期
|
||||||
if err = d.RefreshCaptchaTokenAtLogin(GetAction(method, url), d.GetUserID()); err != nil {
|
if err = d.RefreshCaptchaTokenAtLogin(GetAction(method, url), d.GetUserID()); err != nil {
|
||||||
@ -229,7 +307,7 @@ func (d *PikPak) getFiles(id string) ([]File, error) {
|
|||||||
"page_token": pageToken,
|
"page_token": pageToken,
|
||||||
}
|
}
|
||||||
var resp Files
|
var resp Files
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -260,6 +338,7 @@ type Common struct {
|
|||||||
UserAgent string
|
UserAgent string
|
||||||
// 验证码token刷新成功回调
|
// 验证码token刷新成功回调
|
||||||
RefreshCTokenCk func(token string)
|
RefreshCTokenCk func(token string)
|
||||||
|
LowLatencyAddr string
|
||||||
}
|
}
|
||||||
|
|
||||||
func generateDeviceSign(deviceID, packageName string) string {
|
func generateDeviceSign(deviceID, packageName string) string {
|
||||||
@ -394,7 +473,7 @@ func (d *PikPak) refreshCaptchaToken(action string, metas map[string]string) err
|
|||||||
}
|
}
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
var resp CaptchaTokenResponse
|
var resp CaptchaTokenResponse
|
||||||
_, err := d.request("https://user.mypikpak.net/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://user.mypikpak.com/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetError(&e).SetBody(param).SetQueryParam("client_id", d.ClientID)
|
req.SetError(&e).SetBody(param).SetQueryParam("client_id", d.ClientID)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -650,3 +729,46 @@ func OssOption(params *S3Params) []oss.Option {
|
|||||||
}
|
}
|
||||||
return options
|
return options
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AddressLatency struct {
|
||||||
|
Address string
|
||||||
|
Latency time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkLatency(address string, wg *sync.WaitGroup, ch chan<- AddressLatency) {
|
||||||
|
defer wg.Done()
|
||||||
|
start := time.Now()
|
||||||
|
resp, err := http.Get("https://" + address + "/generate_204")
|
||||||
|
if err != nil {
|
||||||
|
ch <- AddressLatency{Address: address, Latency: time.Hour} // Set high latency on error
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
latency := time.Since(start)
|
||||||
|
ch <- AddressLatency{Address: address, Latency: latency}
|
||||||
|
}
|
||||||
|
|
||||||
|
func findLowestLatencyAddress(addresses []string) string {
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
ch := make(chan AddressLatency, len(addresses))
|
||||||
|
|
||||||
|
for _, address := range addresses {
|
||||||
|
wg.Add(1)
|
||||||
|
go checkLatency(address, &wg, ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
close(ch)
|
||||||
|
|
||||||
|
var lowestLatencyAddress string
|
||||||
|
lowestLatency := time.Hour
|
||||||
|
|
||||||
|
for result := range ch {
|
||||||
|
if result.Latency < lowestLatency {
|
||||||
|
lowestLatency = result.Latency
|
||||||
|
lowestLatencyAddress = result.Address
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return lowestLatencyAddress
|
||||||
|
}
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -36,6 +37,7 @@ func (d *PikPakShare) Init(ctx context.Context) error {
|
|||||||
d.Common.CaptchaToken = token
|
d.Common.CaptchaToken = token
|
||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
},
|
},
|
||||||
|
LowLatencyAddr: "",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -69,8 +71,16 @@ func (d *PikPakShare) Init(ctx context.Context) error {
|
|||||||
d.UserAgent = "MainWindow Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) PikPak/2.5.6.4831 Chrome/100.0.4896.160 Electron/18.3.15 Safari/537.36"
|
d.UserAgent = "MainWindow Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) PikPak/2.5.6.4831 Chrome/100.0.4896.160 Electron/18.3.15 Safari/537.36"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.UseLowLatencyAddress && d.Addition.CustomLowLatencyAddress != "" {
|
||||||
|
d.Common.LowLatencyAddr = d.Addition.CustomLowLatencyAddress
|
||||||
|
} else if d.UseLowLatencyAddress {
|
||||||
|
d.Common.LowLatencyAddr = findLowestLatencyAddress(DlAddr)
|
||||||
|
d.Addition.CustomLowLatencyAddress = d.Common.LowLatencyAddr
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
}
|
||||||
|
|
||||||
// 获取CaptchaToken
|
// 获取CaptchaToken
|
||||||
err := d.RefreshCaptchaToken(GetAction(http.MethodGet, "https://api-drive.mypikpak.net/drive/v1/share:batch_file_info"), "")
|
err := d.RefreshCaptchaToken(GetAction(http.MethodGet, "https://api-drive.mypikpak.com/drive/v1/share:batch_file_info"), "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -103,7 +113,7 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
"file_id": file.GetID(),
|
"file_id": file.GetID(),
|
||||||
"pass_code_token": d.PassCodeToken,
|
"pass_code_token": d.PassCodeToken,
|
||||||
}
|
}
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share/file_info", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share/file_info", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -121,6 +131,12 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if d.UseLowLatencyAddress && d.Common.LowLatencyAddr != "" {
|
||||||
|
// 替换为加速链接
|
||||||
|
re := regexp.MustCompile(`https://[^/]+/download/`)
|
||||||
|
downloadUrl = re.ReplaceAllString(downloadUrl, "https://"+d.Common.LowLatencyAddr+"/download/")
|
||||||
|
}
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: downloadUrl,
|
URL: downloadUrl,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -7,11 +7,13 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootID
|
driver.RootID
|
||||||
ShareId string `json:"share_id" required:"true"`
|
ShareId string `json:"share_id" required:"true"`
|
||||||
SharePwd string `json:"share_pwd"`
|
SharePwd string `json:"share_pwd"`
|
||||||
Platform string `json:"platform" default:"web" required:"true" type:"select" options:"android,web,pc"`
|
Platform string `json:"platform" required:"true" type:"select" options:"android,web,pc"`
|
||||||
DeviceID string `json:"device_id" required:"false" default:""`
|
DeviceID string `json:"device_id" required:"false" default:""`
|
||||||
UseTransCodingAddress bool `json:"use_transcoding_address" required:"true" default:"false"`
|
UseTransCodingAddress bool `json:"use_transcoding_address" required:"true" default:"false"`
|
||||||
|
UseLowLatencyAddress bool `json:"use_low_latency_address" default:"false"`
|
||||||
|
CustomLowLatencyAddress string `json:"custom_low_latency_address" default:""`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
@ -17,34 +18,32 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
var AndroidAlgorithms = []string{
|
var AndroidAlgorithms = []string{
|
||||||
"7xOq4Z8s",
|
"aDhgaSE3MsjROCmpmsWqP1sJdFJ",
|
||||||
"QE9/9+IQco",
|
"+oaVkqdd8MJuKT+uMr2AYKcd9tdWge3XPEPR2hcePUknd",
|
||||||
"WdX5J9CPLZp",
|
"u/sd2GgT2fTytRcKzGicHodhvIltMntA3xKw2SRv7S48OdnaQIS5mn",
|
||||||
"NmQ5qFAXqH3w984cYhMeC5TJR8j",
|
"2WZiae2QuqTOxBKaaqCNHCW3olu2UImelkDzBn",
|
||||||
"cc44M+l7GDhav",
|
"/vJ3upic39lgmrkX855Qx",
|
||||||
"KxGjo/wHB+Yx8Lf7kMP+/m9I+",
|
"yNc9ruCVMV7pGV7XvFeuLMOcy1",
|
||||||
"wla81BUVSmDkctHDpUT",
|
"4FPq8mT3JQ1jzcVxMVfwFftLQm33M7i",
|
||||||
"c6wMr1sm1WxiR3i8LDAm3W",
|
"xozoy5e3Ea",
|
||||||
"hRLrEQCFNYi0PFPV",
|
|
||||||
"o1J41zIraDtJPNuhBu7Ifb/q3",
|
|
||||||
"U",
|
|
||||||
"RrbZvV0CTu3gaZJ56PVKki4IeP",
|
|
||||||
"NNuRbLckJqUp1Do0YlrKCUP",
|
|
||||||
"UUwnBbipMTvInA0U0E9",
|
|
||||||
"VzGc",
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var WebAlgorithms = []string{
|
var WebAlgorithms = []string{
|
||||||
"fyZ4+p77W1U4zcWBUwefAIFhFxvADWtT1wzolCxhg9q7etmGUjXr",
|
"C9qPpZLN8ucRTaTiUMWYS9cQvWOE",
|
||||||
"uSUX02HYJ1IkyLdhINEFcCf7l2",
|
"+r6CQVxjzJV6LCV",
|
||||||
"iWt97bqD/qvjIaPXB2Ja5rsBWtQtBZZmaHH2rMR41",
|
"F",
|
||||||
"3binT1s/5a1pu3fGsN",
|
"pFJRC",
|
||||||
"8YCCU+AIr7pg+yd7CkQEY16lDMwi8Rh4WNp5",
|
"9WXYIDGrwTCz2OiVlgZa90qpECPD6olt",
|
||||||
"DYS3StqnAEKdGddRP8CJrxUSFh",
|
"/750aCr4lm/Sly/c",
|
||||||
"crquW+4",
|
"RB+DT/gZCrbV",
|
||||||
"ryKqvW9B9hly+JAymXCIfag5Z",
|
"",
|
||||||
"Hr08T/NDTX1oSJfHk90c",
|
"CyLsf7hdkIRxRm215hl",
|
||||||
"i",
|
"7xHvLi2tOYP0Y92b",
|
||||||
|
"ZGTXXxu8E/MIWaEDB+Sm/",
|
||||||
|
"1UI3",
|
||||||
|
"E7fP5Pfijd+7K+t6Tg/NhuLq0eEUVChpJSkrKxpO",
|
||||||
|
"ihtqpG6FMt65+Xk+tWUH2",
|
||||||
|
"NhXXU9rg4XXdzo7u5o",
|
||||||
}
|
}
|
||||||
|
|
||||||
var PCAlgorithms = []string{
|
var PCAlgorithms = []string{
|
||||||
@ -63,13 +62,13 @@ var PCAlgorithms = []string{
|
|||||||
const (
|
const (
|
||||||
AndroidClientID = "YNxT9w7GMdWvEOKa"
|
AndroidClientID = "YNxT9w7GMdWvEOKa"
|
||||||
AndroidClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
AndroidClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
AndroidClientVersion = "1.49.3"
|
AndroidClientVersion = "1.48.3"
|
||||||
AndroidPackageName = "com.pikcloud.pikpak"
|
AndroidPackageName = "com.pikcloud.pikpak"
|
||||||
AndroidSdkVersion = "2.0.4.204101"
|
AndroidSdkVersion = "2.0.4.204101"
|
||||||
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
||||||
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
WebClientVersion = "undefined"
|
WebClientVersion = "2.0.0"
|
||||||
WebPackageName = "drive.mypikpak.com"
|
WebPackageName = "mypikpak.com"
|
||||||
WebSdkVersion = "8.0.3"
|
WebSdkVersion = "8.0.3"
|
||||||
PCClientID = "YvtoWO6GNHiuCl7x"
|
PCClientID = "YvtoWO6GNHiuCl7x"
|
||||||
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
||||||
@ -78,6 +77,44 @@ const (
|
|||||||
PCSdkVersion = "8.0.3"
|
PCSdkVersion = "8.0.3"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var DlAddr = []string{
|
||||||
|
"dl-a10b-0621.mypikpak.com",
|
||||||
|
"dl-a10b-0622.mypikpak.com",
|
||||||
|
"dl-a10b-0623.mypikpak.com",
|
||||||
|
"dl-a10b-0624.mypikpak.com",
|
||||||
|
"dl-a10b-0625.mypikpak.com",
|
||||||
|
"dl-a10b-0858.mypikpak.com",
|
||||||
|
"dl-a10b-0859.mypikpak.com",
|
||||||
|
"dl-a10b-0860.mypikpak.com",
|
||||||
|
"dl-a10b-0861.mypikpak.com",
|
||||||
|
"dl-a10b-0862.mypikpak.com",
|
||||||
|
"dl-a10b-0863.mypikpak.com",
|
||||||
|
"dl-a10b-0864.mypikpak.com",
|
||||||
|
"dl-a10b-0865.mypikpak.com",
|
||||||
|
"dl-a10b-0866.mypikpak.com",
|
||||||
|
"dl-a10b-0867.mypikpak.com",
|
||||||
|
"dl-a10b-0868.mypikpak.com",
|
||||||
|
"dl-a10b-0869.mypikpak.com",
|
||||||
|
"dl-a10b-0870.mypikpak.com",
|
||||||
|
"dl-a10b-0871.mypikpak.com",
|
||||||
|
"dl-a10b-0872.mypikpak.com",
|
||||||
|
"dl-a10b-0873.mypikpak.com",
|
||||||
|
"dl-a10b-0874.mypikpak.com",
|
||||||
|
"dl-a10b-0875.mypikpak.com",
|
||||||
|
"dl-a10b-0876.mypikpak.com",
|
||||||
|
"dl-a10b-0877.mypikpak.com",
|
||||||
|
"dl-a10b-0878.mypikpak.com",
|
||||||
|
"dl-a10b-0879.mypikpak.com",
|
||||||
|
"dl-a10b-0880.mypikpak.com",
|
||||||
|
"dl-a10b-0881.mypikpak.com",
|
||||||
|
"dl-a10b-0882.mypikpak.com",
|
||||||
|
"dl-a10b-0883.mypikpak.com",
|
||||||
|
"dl-a10b-0884.mypikpak.com",
|
||||||
|
"dl-a10b-0885.mypikpak.com",
|
||||||
|
"dl-a10b-0886.mypikpak.com",
|
||||||
|
"dl-a10b-0887.mypikpak.com",
|
||||||
|
}
|
||||||
|
|
||||||
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
@ -122,7 +159,7 @@ func (d *PikPakShare) getSharePassToken() error {
|
|||||||
"limit": "100",
|
"limit": "100",
|
||||||
}
|
}
|
||||||
var resp ShareResp
|
var resp ShareResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -150,7 +187,7 @@ func (d *PikPakShare) getFiles(id string) ([]File, error) {
|
|||||||
"pass_code_token": d.PassCodeToken,
|
"pass_code_token": d.PassCodeToken,
|
||||||
}
|
}
|
||||||
var resp ShareResp
|
var resp ShareResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share/detail", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share/detail", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -190,6 +227,7 @@ type Common struct {
|
|||||||
UserAgent string
|
UserAgent string
|
||||||
// 验证码token刷新成功回调
|
// 验证码token刷新成功回调
|
||||||
RefreshCTokenCk func(token string)
|
RefreshCTokenCk func(token string)
|
||||||
|
LowLatencyAddr string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Common) SetUserAgent(userAgent string) {
|
func (c *Common) SetUserAgent(userAgent string) {
|
||||||
@ -307,7 +345,7 @@ func (d *PikPakShare) refreshCaptchaToken(action string, metas map[string]string
|
|||||||
}
|
}
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
var resp CaptchaTokenResponse
|
var resp CaptchaTokenResponse
|
||||||
_, err := d.request("https://user.mypikpak.net/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://user.mypikpak.com/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetError(&e).SetBody(param)
|
req.SetError(&e).SetBody(param)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -329,3 +367,46 @@ func (d *PikPakShare) refreshCaptchaToken(action string, metas map[string]string
|
|||||||
d.Common.SetCaptchaToken(resp.CaptchaToken)
|
d.Common.SetCaptchaToken(resp.CaptchaToken)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type AddressLatency struct {
|
||||||
|
Address string
|
||||||
|
Latency time.Duration
|
||||||
|
}
|
||||||
|
|
||||||
|
func checkLatency(address string, wg *sync.WaitGroup, ch chan<- AddressLatency) {
|
||||||
|
defer wg.Done()
|
||||||
|
start := time.Now()
|
||||||
|
resp, err := http.Get("https://" + address + "/generate_204")
|
||||||
|
if err != nil {
|
||||||
|
ch <- AddressLatency{Address: address, Latency: time.Hour} // Set high latency on error
|
||||||
|
return
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
latency := time.Since(start)
|
||||||
|
ch <- AddressLatency{Address: address, Latency: latency}
|
||||||
|
}
|
||||||
|
|
||||||
|
func findLowestLatencyAddress(addresses []string) string {
|
||||||
|
var wg sync.WaitGroup
|
||||||
|
ch := make(chan AddressLatency, len(addresses))
|
||||||
|
|
||||||
|
for _, address := range addresses {
|
||||||
|
wg.Add(1)
|
||||||
|
go checkLatency(address, &wg, ch)
|
||||||
|
}
|
||||||
|
|
||||||
|
wg.Wait()
|
||||||
|
close(ch)
|
||||||
|
|
||||||
|
var lowestLatencyAddress string
|
||||||
|
lowestLatency := time.Hour
|
||||||
|
|
||||||
|
for result := range ch {
|
||||||
|
if result.Latency < lowestLatency {
|
||||||
|
lowestLatency = result.Latency
|
||||||
|
lowestLatencyAddress = result.Address
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return lowestLatencyAddress
|
||||||
|
}
|
||||||
|
@ -300,7 +300,9 @@ func (d *Quqi) linkFromCDN(id string) (*model.Link, error) {
|
|||||||
bufferReader := bufio.NewReader(decryptReader)
|
bufferReader := bufio.NewReader(decryptReader)
|
||||||
bufferReader.Discard(int(decryptedOffset))
|
bufferReader.Discard(int(decryptedOffset))
|
||||||
|
|
||||||
return io.NopCloser(bufferReader), nil
|
return utils.NewReadCloser(bufferReader, func() error {
|
||||||
|
return nil
|
||||||
|
}), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
|
@ -99,12 +99,8 @@ func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*mo
|
|||||||
var link model.Link
|
var link model.Link
|
||||||
var err error
|
var err error
|
||||||
if d.CustomHost != "" {
|
if d.CustomHost != "" {
|
||||||
if d.EnableCustomHostPresign {
|
err = req.Build()
|
||||||
link.URL, err = req.Presign(time.Hour * time.Duration(d.SignURLExpire))
|
link.URL = req.HTTPRequest.URL.String()
|
||||||
} else {
|
|
||||||
err = req.Build()
|
|
||||||
link.URL = req.HTTPRequest.URL.String()
|
|
||||||
}
|
|
||||||
if d.RemoveBucket {
|
if d.RemoveBucket {
|
||||||
link.URL = strings.Replace(link.URL, "/"+d.Bucket, "", 1)
|
link.URL = strings.Replace(link.URL, "/"+d.Bucket, "", 1)
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,6 @@ type Addition struct {
|
|||||||
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
||||||
SessionToken string `json:"session_token"`
|
SessionToken string `json:"session_token"`
|
||||||
CustomHost string `json:"custom_host"`
|
CustomHost string `json:"custom_host"`
|
||||||
EnableCustomHostPresign bool `json:"enable_custom_host_presign"`
|
|
||||||
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
||||||
Placeholder string `json:"placeholder"`
|
Placeholder string `json:"placeholder"`
|
||||||
ForcePathStyle bool `json:"force_path_style"`
|
ForcePathStyle bool `json:"force_path_style"`
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
|
||||||
"path"
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -199,7 +198,7 @@ func (d *S3) copyFile(ctx context.Context, src string, dst string) error {
|
|||||||
dstKey := getKey(dst, false)
|
dstKey := getKey(dst, false)
|
||||||
input := &s3.CopyObjectInput{
|
input := &s3.CopyObjectInput{
|
||||||
Bucket: &d.Bucket,
|
Bucket: &d.Bucket,
|
||||||
CopySource: aws.String(url.PathEscape("/" + d.Bucket + "/" + srcKey)),
|
CopySource: aws.String("/" + d.Bucket + "/" + srcKey),
|
||||||
Key: &dstKey,
|
Key: &dstKey,
|
||||||
}
|
}
|
||||||
_, err := d.client.CopyObject(input)
|
_, err := d.client.CopyObject(input)
|
||||||
|
@ -10,6 +10,7 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -22,9 +23,7 @@ import (
|
|||||||
type Terabox struct {
|
type Terabox struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
JsToken string
|
JsToken string
|
||||||
url_domain_prefix string
|
|
||||||
base_url string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) Config() driver.Config {
|
func (d *Terabox) Config() driver.Config {
|
||||||
@ -37,8 +36,6 @@ func (d *Terabox) GetAddition() driver.Additional {
|
|||||||
|
|
||||||
func (d *Terabox) Init(ctx context.Context) error {
|
func (d *Terabox) Init(ctx context.Context) error {
|
||||||
var resp CheckLoginResp
|
var resp CheckLoginResp
|
||||||
d.base_url = "https://www.terabox.com"
|
|
||||||
d.url_domain_prefix = "jp"
|
|
||||||
_, err := d.get("/api/check/login", nil, &resp)
|
_, err := d.get("/api/check/login", nil, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -74,16 +71,7 @@ func (d *Terabox) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *Terabox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
params := map[string]string{
|
_, err := d.create(stdpath.Join(parentDir.GetPath(), dirName), 0, 1, "", "")
|
||||||
"a": "commit",
|
|
||||||
}
|
|
||||||
data := map[string]string{
|
|
||||||
"path": stdpath.Join(parentDir.GetPath(), dirName),
|
|
||||||
"isdir": "1",
|
|
||||||
"block_list": "[]",
|
|
||||||
}
|
|
||||||
res, err := d.post_form("/api/create", params, data, nil)
|
|
||||||
log.Debugln(string(res))
|
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -129,61 +117,63 @@ func (d *Terabox) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
resp, err := base.RestyClient.R().
|
tempFile, err := stream.CacheFullInTempFile()
|
||||||
SetContext(ctx).
|
|
||||||
Get("https://" + d.url_domain_prefix + "-data.terabox.com/rest/2.0/pcs/file?method=locateupload")
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
var locateupload_resp LocateUploadResp
|
var Default int64 = 4 * 1024 * 1024
|
||||||
err = utils.Json.Unmarshal(resp.Body(), &locateupload_resp)
|
defaultByteData := make([]byte, Default)
|
||||||
if err != nil {
|
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
||||||
log.Debugln(resp)
|
// cal md5
|
||||||
return err
|
h1 := md5.New()
|
||||||
|
h2 := md5.New()
|
||||||
|
block_list := make([]string, 0)
|
||||||
|
left := stream.GetSize()
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
byteSize := Default
|
||||||
|
var byteData []byte
|
||||||
|
if left < Default {
|
||||||
|
byteSize = left
|
||||||
|
byteData = make([]byte, byteSize)
|
||||||
|
} else {
|
||||||
|
byteData = defaultByteData
|
||||||
|
}
|
||||||
|
left -= byteSize
|
||||||
|
_, err = io.ReadFull(tempFile, byteData)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
h1.Write(byteData)
|
||||||
|
h2.Write(byteData)
|
||||||
|
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
||||||
|
h2.Reset()
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
log.Debugln(locateupload_resp)
|
|
||||||
|
|
||||||
// precreate file
|
|
||||||
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||||
path := encodeURIComponent(rawPath)
|
path := encodeURIComponent(rawPath)
|
||||||
|
block_list_str := fmt.Sprintf("[%s]", strings.Join(block_list, ","))
|
||||||
var precreateBlockListStr string
|
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&block_list=%s",
|
||||||
if stream.GetSize() > initialChunkSize {
|
path, stream.GetSize(),
|
||||||
precreateBlockListStr = `["5910a591dd8fc18c32a8f3df4fdc1761","a5fc157d78e6ad1c7e114b056c92821e"]`
|
block_list_str)
|
||||||
} else {
|
params := map[string]string{}
|
||||||
precreateBlockListStr = `["5910a591dd8fc18c32a8f3df4fdc1761"]`
|
|
||||||
}
|
|
||||||
|
|
||||||
data := map[string]string{
|
|
||||||
"path": rawPath,
|
|
||||||
"autoinit": "1",
|
|
||||||
"target_path": dstDir.GetPath(),
|
|
||||||
"block_list": precreateBlockListStr,
|
|
||||||
"local_mtime": strconv.FormatInt(stream.ModTime().Unix(), 10),
|
|
||||||
"file_limit_switch_v34": "true",
|
|
||||||
}
|
|
||||||
var precreateResp PrecreateResp
|
var precreateResp PrecreateResp
|
||||||
log.Debugln(data)
|
_, err = d.post("/api/precreate", params, data, &precreateResp)
|
||||||
res, err := d.post_form("/api/precreate", nil, data, &precreateResp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Debugf("%+v", precreateResp)
|
log.Debugf("%+v", precreateResp)
|
||||||
if precreateResp.Errno != 0 {
|
if precreateResp.Errno != 0 {
|
||||||
log.Debugln(string(res))
|
|
||||||
return fmt.Errorf("[terabox] failed to precreate file, errno: %d", precreateResp.Errno)
|
return fmt.Errorf("[terabox] failed to precreate file, errno: %d", precreateResp.Errno)
|
||||||
}
|
}
|
||||||
if precreateResp.ReturnType == 2 {
|
if precreateResp.ReturnType == 2 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
params = map[string]string{
|
||||||
// upload chunks
|
|
||||||
tempFile, err := stream.CacheFullInTempFile()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
params := map[string]string{
|
|
||||||
"method": "upload",
|
"method": "upload",
|
||||||
"path": path,
|
"path": path,
|
||||||
"uploadid": precreateResp.Uploadid,
|
"uploadid": precreateResp.Uploadid,
|
||||||
@ -192,38 +182,25 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
"channel": "dubox",
|
"channel": "dubox",
|
||||||
"clienttype": "0",
|
"clienttype": "0",
|
||||||
}
|
}
|
||||||
|
left = stream.GetSize()
|
||||||
streamSize := stream.GetSize()
|
for i, partseq := range precreateResp.BlockList {
|
||||||
chunkSize := calculateChunkSize(streamSize)
|
|
||||||
chunkByteData := make([]byte, chunkSize)
|
|
||||||
count := int(math.Ceil(float64(streamSize) / float64(chunkSize)))
|
|
||||||
left := streamSize
|
|
||||||
uploadBlockList := make([]string, 0, count)
|
|
||||||
h := md5.New()
|
|
||||||
for partseq := 0; partseq < count; partseq++ {
|
|
||||||
if utils.IsCanceled(ctx) {
|
if utils.IsCanceled(ctx) {
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
}
|
}
|
||||||
byteSize := chunkSize
|
byteSize := Default
|
||||||
var byteData []byte
|
var byteData []byte
|
||||||
if left >= chunkSize {
|
if left < Default {
|
||||||
byteData = chunkByteData
|
|
||||||
} else {
|
|
||||||
byteSize = left
|
byteSize = left
|
||||||
byteData = make([]byte, byteSize)
|
byteData = make([]byte, byteSize)
|
||||||
|
} else {
|
||||||
|
byteData = defaultByteData
|
||||||
}
|
}
|
||||||
left -= byteSize
|
left -= byteSize
|
||||||
_, err = io.ReadFull(tempFile, byteData)
|
_, err = io.ReadFull(tempFile, byteData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
u := "https://c-jp.terabox.com/rest/2.0/pcs/superfile2"
|
||||||
// calculate md5
|
|
||||||
h.Write(byteData)
|
|
||||||
uploadBlockList = append(uploadBlockList, hex.EncodeToString(h.Sum(nil)))
|
|
||||||
h.Reset()
|
|
||||||
|
|
||||||
u := "https://" + locateupload_resp.Host + "/rest/2.0/pcs/superfile2"
|
|
||||||
params["partseq"] = strconv.Itoa(partseq)
|
params["partseq"] = strconv.Itoa(partseq)
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetContext(ctx).
|
SetContext(ctx).
|
||||||
@ -235,39 +212,12 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Debugln(res.String())
|
log.Debugln(res.String())
|
||||||
if count > 0 {
|
if len(precreateResp.BlockList) > 0 {
|
||||||
up(float64(partseq) * 100 / float64(count))
|
up(float64(i) * 100 / float64(len(precreateResp.BlockList)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
_, err = d.create(rawPath, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
|
||||||
// create file
|
return err
|
||||||
params = map[string]string{
|
|
||||||
"isdir": "0",
|
|
||||||
"rtype": "1",
|
|
||||||
}
|
|
||||||
|
|
||||||
uploadBlockListStr, err := utils.Json.MarshalToString(uploadBlockList)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
data = map[string]string{
|
|
||||||
"path": rawPath,
|
|
||||||
"size": strconv.FormatInt(stream.GetSize(), 10),
|
|
||||||
"uploadid": precreateResp.Uploadid,
|
|
||||||
"target_path": dstDir.GetPath(),
|
|
||||||
"block_list": uploadBlockListStr,
|
|
||||||
"local_mtime": strconv.FormatInt(stream.ModTime().Unix(), 10),
|
|
||||||
}
|
|
||||||
var createResp CreateResp
|
|
||||||
res, err = d.post_form("/api/create", params, data, &createResp)
|
|
||||||
log.Debugln(string(res))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if createResp.Errno != 0 {
|
|
||||||
return fmt.Errorf("[terabox] failed to create file, errno: %d", createResp.Errno)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Terabox)(nil)
|
var _ driver.Driver = (*Terabox)(nil)
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user