Compare commits
25 Commits
Author | SHA1 | Date | |
---|---|---|---|
a4ad98ee3e | |||
1c01dc6839 | |||
c3c5843dce | |||
6c38c5972d | |||
0a46979c51 | |||
67c93eed2b | |||
f58de9923a | |||
2671c876f1 | |||
e707fa38f1 | |||
b803b0070e | |||
64ceb5afb6 | |||
10c7ebb1c0 | |||
d0cda62703 | |||
ce0b99a510 | |||
34a148c83d | |||
4955d8cec8 | |||
216e3909f3 | |||
a701432b8b | |||
a2dc45a80b | |||
48ac23c8de | |||
2830575490 | |||
e8538bd215 | |||
c3e43ff605 | |||
5f19d73fcc | |||
bdf4b52885 |
8
.github/workflows/beta_release.yml
vendored
8
.github/workflows/beta_release.yml
vendored
@ -8,6 +8,9 @@ concurrency:
|
|||||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
changelog:
|
changelog:
|
||||||
strategy:
|
strategy:
|
||||||
@ -54,7 +57,7 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- target: '!(*musl*|*windows-arm64*|*android*)' # xgo
|
- target: '!(*musl*|*windows-arm64*|*android*|*freebsd*)' # xgo
|
||||||
hash: "md5"
|
hash: "md5"
|
||||||
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
- target: 'linux-!(arm*)-musl*' #musl-not-arm
|
||||||
hash: "md5-linux-musl"
|
hash: "md5-linux-musl"
|
||||||
@ -64,6 +67,9 @@ jobs:
|
|||||||
hash: "md5-windows-arm64"
|
hash: "md5-windows-arm64"
|
||||||
- target: 'android-*' #android
|
- target: 'android-*' #android
|
||||||
hash: "md5-android"
|
hash: "md5-android"
|
||||||
|
- target: 'freebsd-*' #freebsd
|
||||||
|
hash: "md5-freebsd"
|
||||||
|
|
||||||
name: Beta Release
|
name: Beta Release
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
6
.github/workflows/build_docker.yml
vendored
6
.github/workflows/build_docker.yml
vendored
@ -53,7 +53,7 @@ jobs:
|
|||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: build/musl-libs
|
path: build/musl-libs
|
||||||
key: docker-musl-libs
|
key: docker-musl-libs-v2
|
||||||
|
|
||||||
- name: Download Musl Library
|
- name: Download Musl Library
|
||||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
@ -84,7 +84,7 @@ jobs:
|
|||||||
push: ${{ github.event_name == 'push' }}
|
push: ${{ github.event_name == 'push' }}
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
||||||
|
|
||||||
- name: Build and push with ffmpeg
|
- name: Build and push with ffmpeg
|
||||||
id: docker_build_ffmpeg
|
id: docker_build_ffmpeg
|
||||||
@ -96,7 +96,7 @@ jobs:
|
|||||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
build-args: INSTALL_FFMPEG=true
|
build-args: INSTALL_FFMPEG=true
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
||||||
|
|
||||||
build_docker_with_aria2:
|
build_docker_with_aria2:
|
||||||
needs: build_docker
|
needs: build_docker
|
||||||
|
6
.github/workflows/release_docker.yml
vendored
6
.github/workflows/release_docker.yml
vendored
@ -22,7 +22,7 @@ jobs:
|
|||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: build/musl-libs
|
path: build/musl-libs
|
||||||
key: docker-musl-libs
|
key: docker-musl-libs-v2
|
||||||
|
|
||||||
- name: Download Musl Library
|
- name: Download Musl Library
|
||||||
if: steps.cache-musl.outputs.cache-hit != 'true'
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
@ -58,7 +58,7 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
||||||
|
|
||||||
- name: Docker meta with ffmpeg
|
- name: Docker meta with ffmpeg
|
||||||
id: meta-ffmpeg
|
id: meta-ffmpeg
|
||||||
@ -79,7 +79,7 @@ jobs:
|
|||||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||||
build-args: INSTALL_FFMPEG=true
|
build-args: INSTALL_FFMPEG=true
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64
|
||||||
|
|
||||||
release_docker_with_aria2:
|
release_docker_with_aria2:
|
||||||
needs: release_docker
|
needs: release_docker
|
||||||
|
34
.github/workflows/release_freebsd.yml
vendored
Normal file
34
.github/workflows/release_freebsd.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
name: release_freebsd
|
||||||
|
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [ published ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release_freebsd:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
platform: [ ubuntu-latest ]
|
||||||
|
go-version: [ '1.21' ]
|
||||||
|
name: Release
|
||||||
|
runs-on: ${{ matrix.platform }}
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Setup Go
|
||||||
|
uses: actions/setup-go@v5
|
||||||
|
with:
|
||||||
|
go-version: ${{ matrix.go-version }}
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Build
|
||||||
|
run: |
|
||||||
|
bash build.sh release freebsd
|
||||||
|
|
||||||
|
- name: Upload assets
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
files: build/compress/*
|
37
build.sh
37
build.sh
@ -93,7 +93,7 @@ BuildDocker() {
|
|||||||
PrepareBuildDockerMusl() {
|
PrepareBuildDockerMusl() {
|
||||||
mkdir -p build/musl-libs
|
mkdir -p build/musl-libs
|
||||||
BASE="https://musl.cc/"
|
BASE="https://musl.cc/"
|
||||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross riscv64-linux-musl-cross powerpc64le-linux-musl-cross)
|
||||||
for i in "${FILES[@]}"; do
|
for i in "${FILES[@]}"; do
|
||||||
url="${BASE}${i}.tgz"
|
url="${BASE}${i}.tgz"
|
||||||
lib_tgz="build/${i}.tgz"
|
lib_tgz="build/${i}.tgz"
|
||||||
@ -112,8 +112,8 @@ BuildDockerMultiplatform() {
|
|||||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||||
export CGO_ENABLED=1
|
export CGO_ENABLED=1
|
||||||
|
|
||||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
|
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x linux-riscv64 linux-ppc64le)
|
||||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
|
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc riscv64-linux-musl-gcc powerpc64le-linux-musl-gcc)
|
||||||
for i in "${!OS_ARCHES[@]}"; do
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
os_arch=${OS_ARCHES[$i]}
|
os_arch=${OS_ARCHES[$i]}
|
||||||
cgo_cc=${CGO_ARGS[$i]}
|
cgo_cc=${CGO_ARGS[$i]}
|
||||||
@ -233,6 +233,29 @@ BuildReleaseAndroid() {
|
|||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
|
BuildReleaseFreeBSD() {
|
||||||
|
rm -rf .git/
|
||||||
|
mkdir -p "build/freebsd"
|
||||||
|
OS_ARCHES=(amd64 arm64 i386)
|
||||||
|
GO_ARCHES=(amd64 arm64 386)
|
||||||
|
CGO_ARGS=(x86_64-unknown-freebsd14.1 aarch64-unknown-freebsd14.1 i386-unknown-freebsd14.1)
|
||||||
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
|
os_arch=${OS_ARCHES[$i]}
|
||||||
|
cgo_cc="clang --target=${CGO_ARGS[$i]} --sysroot=/opt/freebsd/${os_arch}"
|
||||||
|
echo building for freebsd-${os_arch}
|
||||||
|
sudo mkdir -p "/opt/freebsd/${os_arch}"
|
||||||
|
wget -q https://download.freebsd.org/releases/${os_arch}/14.1-RELEASE/base.txz
|
||||||
|
sudo tar -xf ./base.txz -C /opt/freebsd/${os_arch}
|
||||||
|
rm base.txz
|
||||||
|
export GOOS=freebsd
|
||||||
|
export GOARCH=${GO_ARCHES[$i]}
|
||||||
|
export CC=${cgo_cc}
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
export CGO_LDFLAGS="-fuse-ld=lld"
|
||||||
|
go build -o ./build/$appName-freebsd-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
MakeRelease() {
|
MakeRelease() {
|
||||||
cd build
|
cd build
|
||||||
mkdir compress
|
mkdir compress
|
||||||
@ -251,6 +274,11 @@ MakeRelease() {
|
|||||||
tar -czvf compress/"$i".tar.gz alist
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
rm -f alist
|
rm -f alist
|
||||||
done
|
done
|
||||||
|
for i in $(find . -type f -name "$appName-freebsd-*"); do
|
||||||
|
cp "$i" alist
|
||||||
|
tar -czvf compress/"$i".tar.gz alist
|
||||||
|
rm -f alist
|
||||||
|
done
|
||||||
for i in $(find . -type f -name "$appName-windows-*"); do
|
for i in $(find . -type f -name "$appName-windows-*"); do
|
||||||
cp "$i" alist.exe
|
cp "$i" alist.exe
|
||||||
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
|
||||||
@ -288,6 +316,9 @@ elif [ "$1" = "release" ]; then
|
|||||||
elif [ "$2" = "android" ]; then
|
elif [ "$2" = "android" ]; then
|
||||||
BuildReleaseAndroid
|
BuildReleaseAndroid
|
||||||
MakeRelease "md5-android.txt"
|
MakeRelease "md5-android.txt"
|
||||||
|
elif [ "$2" = "freebsd" ]; then
|
||||||
|
BuildReleaseFreeBSD
|
||||||
|
MakeRelease "md5-freebsd.txt"
|
||||||
elif [ "$2" = "web" ]; then
|
elif [ "$2" = "web" ]; then
|
||||||
echo "web only"
|
echo "web only"
|
||||||
else
|
else
|
||||||
|
43
drivers/115/appver.go
Normal file
43
drivers/115/appver.go
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
package _115
|
||||||
|
|
||||||
|
import (
|
||||||
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
md5Salt = "Qclm8MGWUv59TnrR0XPg"
|
||||||
|
appVer = "27.0.5.7"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *Pan115) getAppVersion() ([]driver115.AppVersion, error) {
|
||||||
|
result := driver115.VersionResp{}
|
||||||
|
resp, err := base.RestyClient.R().Get(driver115.ApiGetVersion)
|
||||||
|
|
||||||
|
err = driver115.CheckErr(err, &result, resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.Data.GetAppVersions(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115) getAppVer() string {
|
||||||
|
// todo add some cache?
|
||||||
|
vers, err := d.getAppVersion()
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("[115] get app version failed: %v", err)
|
||||||
|
return appVer
|
||||||
|
}
|
||||||
|
for _, ver := range vers {
|
||||||
|
if ver.AppName == "win" {
|
||||||
|
return ver.Version
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return appVer
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115) initAppVer() {
|
||||||
|
appVer = d.getAppVer()
|
||||||
|
}
|
@ -3,6 +3,7 @@ package _115
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -16,8 +17,9 @@ import (
|
|||||||
type Pan115 struct {
|
type Pan115 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
client *driver115.Pan115Client
|
client *driver115.Pan115Client
|
||||||
limiter *rate.Limiter
|
limiter *rate.Limiter
|
||||||
|
appVerOnce sync.Once
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Config() driver.Config {
|
func (d *Pan115) Config() driver.Config {
|
||||||
@ -29,6 +31,7 @@ func (d *Pan115) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Init(ctx context.Context) error {
|
func (d *Pan115) Init(ctx context.Context) error {
|
||||||
|
d.appVerOnce.Do(d.initAppVer)
|
||||||
if d.LimitRate > 0 {
|
if d.LimitRate > 0 {
|
||||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||||
}
|
}
|
||||||
@ -76,28 +79,60 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
return link, nil
|
return link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
|
||||||
return err
|
result := driver115.MkdirResp{}
|
||||||
|
form := map[string]string{
|
||||||
|
"pid": parentDir.GetID(),
|
||||||
|
"cname": dirName,
|
||||||
}
|
}
|
||||||
return nil
|
req := d.client.NewRequest().
|
||||||
|
SetFormData(form).
|
||||||
|
SetResult(&result).
|
||||||
|
ForceContentType("application/json;charset=UTF-8")
|
||||||
|
|
||||||
|
resp, err := req.Post(driver115.ApiDirAdd)
|
||||||
|
|
||||||
|
err = driver115.CheckErr(err, &result, resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := d.getNewFile(result.FileID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
if err := d.client.Move(dstDir.GetID(), srcObj.GetID()); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := d.getNewFile(srcObj.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
return d.client.Rename(srcObj.GetID(), newName)
|
if err := d.client.Rename(srcObj.GetID(), newName); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := d.getNewFile((srcObj.GetID()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
@ -114,9 +149,9 @@ func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return d.client.Delete(obj.GetID())
|
return d.client.Delete(obj.GetID())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
if err := d.WaitLimit(ctx); err != nil {
|
if err := d.WaitLimit(ctx); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -125,10 +160,10 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
)
|
)
|
||||||
|
|
||||||
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
|
||||||
return driver115.ErrUploadTooLarge
|
return nil, driver115.ErrUploadTooLarge
|
||||||
}
|
}
|
||||||
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
//if digest, err = d.client.GetDigestResult(stream); err != nil {
|
||||||
// return err
|
// return err
|
||||||
@ -141,22 +176,22 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
preHash, err := utils.HashReader(utils.SHA1, reader)
|
preHash, err := utils.HashReader(utils.SHA1, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
preHash = strings.ToUpper(preHash)
|
preHash = strings.ToUpper(preHash)
|
||||||
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
fullHash := stream.GetHash().GetHash(utils.SHA1)
|
||||||
if len(fullHash) <= 0 {
|
if len(fullHash) <= 0 {
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fullHash = strings.ToUpper(fullHash)
|
fullHash = strings.ToUpper(fullHash)
|
||||||
@ -165,20 +200,36 @@ func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
// note that 115 add timeout for rapid-upload,
|
// note that 115 add timeout for rapid-upload,
|
||||||
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
// and "sig invalid" err is thrown even when the hash is correct after timeout.
|
||||||
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
if matched, err := fastInfo.Ok(); err != nil {
|
if matched, err := fastInfo.Ok(); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
} else if matched {
|
} else if matched {
|
||||||
return nil
|
f, err := d.getNewFileByPickCode(fastInfo.PickCode)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return f, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var uploadResult *UploadResult
|
||||||
// 闪传失败,上传
|
// 闪传失败,上传
|
||||||
if stream.GetSize() <= utils.KB { // 文件大小小于1KB,改用普通模式上传
|
if stream.GetSize() <= 10*utils.MB { // 文件大小小于10MB,改用普通模式上传
|
||||||
return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
|
if uploadResult, err = d.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 分片上传
|
||||||
|
if uploadResult, err = d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// 分片上传
|
|
||||||
return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
|
file, err := d.getNewFile(uploadResult.Data.FileID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return file, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
func (d *Pan115) OfflineList(ctx context.Context) ([]*driver115.OfflineTask, error) {
|
||||||
|
@ -9,8 +9,8 @@ type Addition struct {
|
|||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate ([limit]r/1s)"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
package _115
|
package _115
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"time"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var _ model.Obj = (*FileObj)(nil)
|
var _ model.Obj = (*FileObj)(nil)
|
||||||
@ -20,3 +21,18 @@ func (f *FileObj) CreateTime() time.Time {
|
|||||||
func (f *FileObj) GetHash() utils.HashInfo {
|
func (f *FileObj) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UploadResult struct {
|
||||||
|
driver.BasicResp
|
||||||
|
Data struct {
|
||||||
|
PickCode string `json:"pick_code"`
|
||||||
|
FileSize int `json:"file_size"`
|
||||||
|
FileID string `json:"file_id"`
|
||||||
|
ThumbURL string `json:"thumb_url"`
|
||||||
|
Sha1 string `json:"sha1"`
|
||||||
|
Aid int `json:"aid"`
|
||||||
|
FileName string `json:"file_name"`
|
||||||
|
Cid string `json:"cid"`
|
||||||
|
IsVideo int `json:"is_video"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
@ -2,13 +2,14 @@ package _115
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"crypto/md5"
|
||||||
"crypto/tls"
|
"crypto/tls"
|
||||||
|
"encoding/hex"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
@ -26,12 +27,12 @@ import (
|
|||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
var UserAgent = driver115.UA115Browser
|
//var UserAgent = driver115.UA115Browser
|
||||||
|
|
||||||
func (d *Pan115) login() error {
|
func (d *Pan115) login() error {
|
||||||
var err error
|
var err error
|
||||||
opts := []driver115.Option{
|
opts := []driver115.Option{
|
||||||
driver115.UA(UserAgent),
|
driver115.UA(d.getUA()),
|
||||||
func(c *driver115.Pan115Client) {
|
func(c *driver115.Pan115Client) {
|
||||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
},
|
},
|
||||||
@ -73,25 +74,39 @@ func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
const (
|
func (d *Pan115) getNewFile(fileId string) (*FileObj, error) {
|
||||||
appVer = "27.0.3.7"
|
file, err := d.client.GetFile(fileId)
|
||||||
)
|
|
||||||
|
|
||||||
func (c *Pan115) getAppVer() string {
|
|
||||||
// todo add some cache?
|
|
||||||
vers, err := c.client.GetAppVersion()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return appVer
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, ver := range vers {
|
return &FileObj{*file}, nil
|
||||||
if ver.AppName == "win" {
|
|
||||||
return ver.Version
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return appVer
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
func (d *Pan115) getNewFileByPickCode(pickCode string) (*FileObj, error) {
|
||||||
|
result := driver115.GetFileInfoResponse{}
|
||||||
|
req := d.client.NewRequest().
|
||||||
|
SetQueryParam("pick_code", pickCode).
|
||||||
|
ForceContentType("application/json;charset=UTF-8").
|
||||||
|
SetResult(&result)
|
||||||
|
resp, err := req.Get(driver115.ApiFileInfo)
|
||||||
|
if err := driver115.CheckErr(err, &result, resp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(result.Files) == 0 {
|
||||||
|
return nil, errors.New("not get file info")
|
||||||
|
}
|
||||||
|
fileInfo := result.Files[0]
|
||||||
|
|
||||||
|
f := &FileObj{}
|
||||||
|
f.From(fileInfo)
|
||||||
|
return f, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115) getUA() string {
|
||||||
|
return fmt.Sprintf("Mozilla/5.0 115Browser/%s", appVer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||||
key := crypto.GenerateKey()
|
key := crypto.GenerateKey()
|
||||||
result := driver115.DownloadResp{}
|
result := driver115.DownloadResp{}
|
||||||
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||||
@ -105,10 +120,10 @@ func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
req.Header.Set("Cookie", c.Cookie)
|
req.Header.Set("Cookie", d.Cookie)
|
||||||
req.Header.Set("User-Agent", ua)
|
req.Header.Set("User-Agent", ua)
|
||||||
|
|
||||||
resp, err := c.client.Client.GetClient().Do(req)
|
resp, err := d.client.Client.GetClient().Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -146,6 +161,13 @@ func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, e
|
|||||||
return nil, driver115.ErrUnexpected
|
return nil, driver115.ErrUnexpected
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Pan115) GenerateToken(fileID, preID, timeStamp, fileSize, signKey, signVal string) string {
|
||||||
|
userID := strconv.FormatInt(c.client.UserID, 10)
|
||||||
|
userIDMd5 := md5.Sum([]byte(userID))
|
||||||
|
tokenMd5 := md5.Sum([]byte(md5Salt + fileID + fileSize + signKey + signVal + userID + timeStamp + hex.EncodeToString(userIDMd5[:]) + appVer))
|
||||||
|
return hex.EncodeToString(tokenMd5[:])
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||||
var (
|
var (
|
||||||
ecdhCipher *cipher.EcdhCipher
|
ecdhCipher *cipher.EcdhCipher
|
||||||
@ -165,7 +187,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
userID := strconv.FormatInt(d.client.UserID, 10)
|
userID := strconv.FormatInt(d.client.UserID, 10)
|
||||||
form := url.Values{}
|
form := url.Values{}
|
||||||
form.Set("appid", "0")
|
form.Set("appid", "0")
|
||||||
form.Set("appversion", d.getAppVer())
|
form.Set("appversion", appVer)
|
||||||
form.Set("userid", userID)
|
form.Set("userid", userID)
|
||||||
form.Set("filename", fileName)
|
form.Set("filename", fileName)
|
||||||
form.Set("filesize", fileSizeStr)
|
form.Set("filesize", fileSizeStr)
|
||||||
@ -186,7 +208,7 @@ func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID stri
|
|||||||
}
|
}
|
||||||
|
|
||||||
form.Set("t", t.String())
|
form.Set("t", t.String())
|
||||||
form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
form.Set("token", d.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
|
||||||
if signKey != "" && signVal != "" {
|
if signKey != "" && signVal != "" {
|
||||||
form.Set("sign_key", signKey)
|
form.Set("sign_key", signKey)
|
||||||
form.Set("sign_val", signVal)
|
form.Set("sign_val", signVal)
|
||||||
@ -250,8 +272,38 @@ func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result stri
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UploadByOSS use aliyun sdk to upload
|
||||||
|
func (c *Pan115) UploadByOSS(params *driver115.UploadOSSParams, r io.Reader, dirID string) (*UploadResult, error) {
|
||||||
|
ossToken, err := c.client.GetOSSToken()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ossClient, err := oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
bucket, err := ossClient.Bucket(params.Bucket)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var bodyBytes []byte
|
||||||
|
if err = bucket.PutObject(params.Object, r, append(
|
||||||
|
driver115.OssOption(params, ossToken),
|
||||||
|
oss.CallbackResult(&bodyBytes),
|
||||||
|
)...); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var uploadResult UploadResult
|
||||||
|
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||||
|
}
|
||||||
|
|
||||||
// UploadByMultipart upload by mutipart blocks
|
// UploadByMultipart upload by mutipart blocks
|
||||||
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
|
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) (*UploadResult, error) {
|
||||||
var (
|
var (
|
||||||
chunks []oss.FileChunk
|
chunks []oss.FileChunk
|
||||||
parts []oss.UploadPart
|
parts []oss.UploadPart
|
||||||
@ -259,12 +311,13 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
ossClient *oss.Client
|
ossClient *oss.Client
|
||||||
bucket *oss.Bucket
|
bucket *oss.Bucket
|
||||||
ossToken *driver115.UploadOSSTokenResp
|
ossToken *driver115.UploadOSSTokenResp
|
||||||
|
bodyBytes []byte
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
tmpF, err := stream.CacheFullInTempFile()
|
tmpF, err := stream.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
options := driver115.DefalutUploadMultipartOptions()
|
options := driver115.DefalutUploadMultipartOptions()
|
||||||
@ -273,17 +326,19 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
f(options)
|
f(options)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// oss 启用Sequential必须按顺序上传
|
||||||
|
options.ThreadsNum = 1
|
||||||
|
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
|
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret, oss.EnableMD5(true), oss.EnableCRC(true)); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
// ossToken一小时后就会失效,所以每50分钟重新获取一次
|
||||||
@ -293,14 +348,15 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
timeout := time.NewTimer(options.Timeout)
|
timeout := time.NewTimer(options.Timeout)
|
||||||
|
|
||||||
if chunks, err = SplitFile(fileSize); err != nil {
|
if chunks, err = SplitFile(fileSize); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
if imur, err = bucket.InitiateMultipartUpload(params.Object,
|
||||||
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
|
||||||
oss.UserAgentHeader(driver115.OSSUserAgent),
|
oss.UserAgentHeader(driver115.OSSUserAgent),
|
||||||
|
oss.EnableSha1(), oss.Sequential(),
|
||||||
); err != nil {
|
); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
wg := sync.WaitGroup{}
|
wg := sync.WaitGroup{}
|
||||||
@ -342,8 +398,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
b := bytes.NewBuffer(buf)
|
if part, err = bucket.UploadPart(imur, bytes.NewBuffer(buf), chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
||||||
if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
|
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -367,25 +422,31 @@ LOOP:
|
|||||||
case <-ticker.C:
|
case <-ticker.C:
|
||||||
// 到时重新获取ossToken
|
// 到时重新获取ossToken
|
||||||
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
if ossToken, err = d.client.GetOSSToken(); err != nil {
|
||||||
return err
|
return nil, err
|
||||||
}
|
}
|
||||||
case <-quit:
|
case <-quit:
|
||||||
break LOOP
|
break LOOP
|
||||||
case <-errCh:
|
case <-errCh:
|
||||||
return err
|
return nil, err
|
||||||
case <-timeout.C:
|
case <-timeout.C:
|
||||||
return fmt.Errorf("time out")
|
return nil, fmt.Errorf("time out")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// EOF错误是xml的Unmarshal导致的,响应其实是json格式,所以实际上上传是成功的
|
// 不知道啥原因,oss那边分片上传不计算sha1,导致115服务器校验错误
|
||||||
if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
|
// params.Callback.Callback = strings.ReplaceAll(params.Callback.Callback, "${sha1}", params.SHA1)
|
||||||
// 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误,实际上上传是成功的
|
if _, err := bucket.CompleteMultipartUpload(imur, parts, append(
|
||||||
if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
|
driver115.OssOption(params, ossToken),
|
||||||
return err
|
oss.CallbackResult(&bodyBytes),
|
||||||
}
|
)...); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
return d.checkUploadStatus(dirID, params.SHA1)
|
|
||||||
|
var uploadResult UploadResult
|
||||||
|
if err = json.Unmarshal(bodyBytes, &uploadResult); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &uploadResult, uploadResult.Err(string(bodyBytes))
|
||||||
}
|
}
|
||||||
|
|
||||||
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
||||||
@ -394,27 +455,6 @@ func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
|
|
||||||
// 验证上传是否成功
|
|
||||||
req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
|
|
||||||
opts := []driver115.GetFileOptions{
|
|
||||||
driver115.WithOrder(driver115.FileOrderByTime),
|
|
||||||
driver115.WithShowDirEnable(false),
|
|
||||||
driver115.WithAsc(false),
|
|
||||||
driver115.WithLimit(500),
|
|
||||||
}
|
|
||||||
fResp, err := driver115.GetFiles(req, dirID, opts...)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, fileInfo := range fResp.Files {
|
|
||||||
if fileInfo.Sha1 == sha1 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return driver115.ErrUploadFailed
|
|
||||||
}
|
|
||||||
|
|
||||||
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
|
||||||
for i := int64(1); i < 10; i++ {
|
for i := int64(1); i < 10; i++ {
|
||||||
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
|
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
|
||||||
|
@ -9,7 +9,7 @@ type Addition struct {
|
|||||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,tv,alipaymini,wechatmini,qandroid" default:"linux" help:"select the QR code device, default linux"`
|
||||||
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
PageSize int64 `json:"page_size" type:"number" default:"1000" help:"list api per page size of 115 driver"`
|
||||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||||
|
@ -82,6 +82,7 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
"type": f.Type,
|
"type": f.Type,
|
||||||
}
|
}
|
||||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||||
|
|
||||||
req.SetBody(data).SetHeaders(headers)
|
req.SetBody(data).SetHeaders(headers)
|
||||||
}, nil)
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -26,8 +26,9 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
|
LoginApi = "https://login.123pan.com/api"
|
||||||
MainApi = BApi
|
MainApi = BApi
|
||||||
SignIn = MainApi + "/user/sign_in"
|
SignIn = LoginApi + "/user/sign_in"
|
||||||
Logout = MainApi + "/user/logout"
|
Logout = MainApi + "/user/logout"
|
||||||
UserInfo = MainApi + "/user/info"
|
UserInfo = MainApi + "/user/info"
|
||||||
FileList = MainApi + "/file/list/new"
|
FileList = MainApi + "/file/list/new"
|
||||||
|
@ -6,7 +6,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
|
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"resource"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||||
|
@ -22,6 +22,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/febbox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type TokenErrResp struct {
|
type TokenErrResp struct {
|
||||||
@ -72,7 +73,7 @@ func fileToObj(f File) *model.ObjThumb {
|
|||||||
IsFolder: f.Isdir == 1,
|
IsFolder: f.Isdir == 1,
|
||||||
|
|
||||||
// 直接获取的MD5是错误的
|
// 直接获取的MD5是错误的
|
||||||
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
HashInfo: utils.NewHashInfo(utils.MD5, DecryptMd5(f.Md5)),
|
||||||
},
|
},
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,14 @@
|
|||||||
package baidu_netdisk
|
package baidu_netdisk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -153,8 +156,6 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
|
|||||||
u = res.Header().Get("location")
|
u = res.Header().Get("location")
|
||||||
//}
|
//}
|
||||||
|
|
||||||
updateObjMd5(file, "pan.baidu.com", u)
|
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: u,
|
URL: u,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -178,8 +179,6 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
|
|
||||||
|
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: resp.Info[0].Dlink,
|
URL: resp.Info[0].Dlink,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
@ -229,19 +228,6 @@ func joinTime(form map[string]string, ctime, mtime int64) {
|
|||||||
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateObjMd5(obj model.Obj, userAgent, u string) {
|
|
||||||
object := model.GetRawObject(obj)
|
|
||||||
if object != nil {
|
|
||||||
req, _ := http.NewRequest(http.MethodHead, u, nil)
|
|
||||||
req.Header.Add("User-Agent", userAgent)
|
|
||||||
resp, _ := base.HttpClient.Do(req)
|
|
||||||
if resp != nil {
|
|
||||||
contentMd5 := resp.Header.Get("Content-Md5")
|
|
||||||
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
DefaultSliceSize int64 = 4 * utils.MB
|
DefaultSliceSize int64 = 4 * utils.MB
|
||||||
VipSliceSize = 16 * utils.MB
|
VipSliceSize = 16 * utils.MB
|
||||||
@ -267,3 +253,40 @@ func (d *BaiduNetdisk) getSliceSize() int64 {
|
|||||||
// r = strings.ReplaceAll(r, "+", "%20")
|
// r = strings.ReplaceAll(r, "+", "%20")
|
||||||
// return r
|
// return r
|
||||||
// }
|
// }
|
||||||
|
|
||||||
|
func DecryptMd5(encryptMd5 string) string {
|
||||||
|
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
||||||
|
return encryptMd5
|
||||||
|
}
|
||||||
|
|
||||||
|
var out strings.Builder
|
||||||
|
out.Grow(len(encryptMd5))
|
||||||
|
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
||||||
|
if i == 9 {
|
||||||
|
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
||||||
|
} else {
|
||||||
|
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
||||||
|
}
|
||||||
|
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
||||||
|
}
|
||||||
|
|
||||||
|
encryptMd5 = out.String()
|
||||||
|
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
||||||
|
}
|
||||||
|
|
||||||
|
func EncryptMd5(originalMd5 string) string {
|
||||||
|
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
||||||
|
|
||||||
|
var out strings.Builder
|
||||||
|
out.Grow(len(reversed))
|
||||||
|
for i, n := 0, int64(0); i < len(reversed); i++ {
|
||||||
|
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
||||||
|
n ^= int64(15 & i)
|
||||||
|
if i == 9 {
|
||||||
|
out.WriteRune(rune(n) + 'g')
|
||||||
|
} else {
|
||||||
|
out.WriteString(strconv.FormatInt(n, 16))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out.String()
|
||||||
|
}
|
||||||
|
@ -72,7 +72,7 @@ func (c *File) Thumb() string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *File) GetHash() utils.HashInfo {
|
func (c *File) GetHash() utils.HashInfo {
|
||||||
return utils.NewHashInfo(utils.MD5, c.Md5)
|
return utils.NewHashInfo(utils.MD5, DecryptMd5(c.Md5))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*相册部分*/
|
/*相册部分*/
|
||||||
|
@ -2,8 +2,12 @@ package baiduphoto
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -476,3 +480,40 @@ func (d *BaiduPhoto) uInfo() (*UInfo, error) {
|
|||||||
}
|
}
|
||||||
return &info, nil
|
return &info, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func DecryptMd5(encryptMd5 string) string {
|
||||||
|
if _, err := hex.DecodeString(encryptMd5); err == nil {
|
||||||
|
return encryptMd5
|
||||||
|
}
|
||||||
|
|
||||||
|
var out strings.Builder
|
||||||
|
out.Grow(len(encryptMd5))
|
||||||
|
for i, n := 0, int64(0); i < len(encryptMd5); i++ {
|
||||||
|
if i == 9 {
|
||||||
|
n = int64(unicode.ToLower(rune(encryptMd5[i])) - 'g')
|
||||||
|
} else {
|
||||||
|
n, _ = strconv.ParseInt(encryptMd5[i:i+1], 16, 64)
|
||||||
|
}
|
||||||
|
out.WriteString(strconv.FormatInt(n^int64(15&i), 16))
|
||||||
|
}
|
||||||
|
|
||||||
|
encryptMd5 = out.String()
|
||||||
|
return encryptMd5[8:16] + encryptMd5[:8] + encryptMd5[24:32] + encryptMd5[16:24]
|
||||||
|
}
|
||||||
|
|
||||||
|
func EncryptMd5(originalMd5 string) string {
|
||||||
|
reversed := originalMd5[8:16] + originalMd5[:8] + originalMd5[24:32] + originalMd5[16:24]
|
||||||
|
|
||||||
|
var out strings.Builder
|
||||||
|
out.Grow(len(reversed))
|
||||||
|
for i, n := 0, int64(0); i < len(reversed); i++ {
|
||||||
|
n, _ = strconv.ParseInt(reversed[i:i+1], 16, 64)
|
||||||
|
n ^= int64(15 & i)
|
||||||
|
if i == 9 {
|
||||||
|
out.WriteRune(rune(n) + 'g')
|
||||||
|
} else {
|
||||||
|
out.WriteString(strconv.FormatInt(n, 16))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out.String()
|
||||||
|
}
|
||||||
|
@ -67,7 +67,9 @@ func (d *ChaoXing) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ChaoXing) Drop(ctx context.Context) error {
|
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||||
d.cron.Stop()
|
if d.cron != nil {
|
||||||
|
d.cron.Stop()
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
@ -90,7 +91,7 @@ func (d *Cloudreve) MakeDir(ctx context.Context, parentDir model.Obj, dirName st
|
|||||||
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"action": "move",
|
"action": "move",
|
||||||
"src_dir": srcObj.GetPath(),
|
"src_dir": path.Dir(srcObj.GetPath()),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
@ -112,7 +113,7 @@ func (d *Cloudreve) Rename(ctx context.Context, srcObj model.Obj, newName string
|
|||||||
|
|
||||||
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Cloudreve) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
body := base.Json{
|
body := base.Json{
|
||||||
"src_dir": srcObj.GetPath(),
|
"src_dir": path.Dir(srcObj.GetPath()),
|
||||||
"dst": dstDir.GetPath(),
|
"dst": dstDir.GetPath(),
|
||||||
"src": convertSrc(srcObj),
|
"src": convertSrc(srcObj),
|
||||||
}
|
}
|
||||||
|
132
drivers/febbox/driver.go
Normal file
132
drivers/febbox/driver.go
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
package febbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
|
"golang.org/x/oauth2/clientcredentials"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FebBox struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
accessToken string
|
||||||
|
oauth2Token oauth2.TokenSource
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Init(ctx context.Context) error {
|
||||||
|
// 初始化 oauth2Config
|
||||||
|
oauth2Config := &clientcredentials.Config{
|
||||||
|
ClientID: d.ClientID,
|
||||||
|
ClientSecret: d.ClientSecret,
|
||||||
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
|
TokenURL: "https://api.febbox.com/oauth/token",
|
||||||
|
}
|
||||||
|
|
||||||
|
d.initializeOAuth2Token(ctx, oauth2Config, d.Addition.RefreshToken)
|
||||||
|
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.accessToken = token.AccessToken
|
||||||
|
d.Addition.RefreshToken = token.RefreshToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
files, err := d.getFilesList(dir.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||||
|
return fileToObj(src), nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
var ip string
|
||||||
|
if d.Addition.UserIP != "" {
|
||||||
|
ip = d.Addition.UserIP
|
||||||
|
} else {
|
||||||
|
ip = args.IP
|
||||||
|
}
|
||||||
|
|
||||||
|
url, err := d.getDownloadLink(file.GetID(), ip)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &model.Link{
|
||||||
|
URL: url,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
err := d.makeDir(parentDir.GetID(), dirName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
err := d.move(srcObj.GetID(), dstDir.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
err := d.rename(srcObj.GetID(), newName)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
err := d.copy(srcObj.GetID(), dstDir.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
err := d.remove(obj.GetID())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*FebBox)(nil)
|
36
drivers/febbox/meta.go
Normal file
36
drivers/febbox/meta.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package febbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
driver.RootID
|
||||||
|
ClientID string `json:"client_id" required:"true" default:""`
|
||||||
|
ClientSecret string `json:"client_secret" required:"true" default:""`
|
||||||
|
RefreshToken string
|
||||||
|
SortRule string `json:"sort_rule" required:"true" type:"select" options:"size_asc,size_desc,name_asc,name_desc,update_asc,update_desc,ext_asc,ext_desc" default:"name_asc"`
|
||||||
|
PageSize int64 `json:"page_size" required:"true" type:"number" default:"100" help:"list api per page size of FebBox driver"`
|
||||||
|
UserIP string `json:"user_ip" default:"" help:"user ip address for download link which can speed up the download"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "FebBox",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: true,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: false,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &FebBox{}
|
||||||
|
})
|
||||||
|
}
|
88
drivers/febbox/oauth2.go
Normal file
88
drivers/febbox/oauth2.go
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
package febbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"golang.org/x/oauth2"
|
||||||
|
"golang.org/x/oauth2/clientcredentials"
|
||||||
|
)
|
||||||
|
|
||||||
|
type customTokenSource struct {
|
||||||
|
config *clientcredentials.Config
|
||||||
|
ctx context.Context
|
||||||
|
refreshToken string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *customTokenSource) Token() (*oauth2.Token, error) {
|
||||||
|
v := url.Values{}
|
||||||
|
if c.refreshToken != "" {
|
||||||
|
v.Set("grant_type", "refresh_token")
|
||||||
|
v.Set("refresh_token", c.refreshToken)
|
||||||
|
} else {
|
||||||
|
v.Set("grant_type", "client_credentials")
|
||||||
|
}
|
||||||
|
|
||||||
|
v.Set("client_id", c.config.ClientID)
|
||||||
|
v.Set("client_secret", c.config.ClientSecret)
|
||||||
|
|
||||||
|
req, err := http.NewRequest("POST", c.config.TokenURL, strings.NewReader(v.Encode()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||||
|
|
||||||
|
resp, err := http.DefaultClient.Do(req.WithContext(c.ctx))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.New("oauth2: cannot fetch token")
|
||||||
|
}
|
||||||
|
|
||||||
|
var tokenResp struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Data struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
ExpiresIn int64 `json:"expires_in"`
|
||||||
|
TokenType string `json:"token_type"`
|
||||||
|
Scope string `json:"scope"`
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := json.NewDecoder(resp.Body).Decode(&tokenResp); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if tokenResp.Code != 1 {
|
||||||
|
return nil, errors.New("oauth2: server response error")
|
||||||
|
}
|
||||||
|
|
||||||
|
c.refreshToken = tokenResp.Data.RefreshToken
|
||||||
|
|
||||||
|
token := &oauth2.Token{
|
||||||
|
AccessToken: tokenResp.Data.AccessToken,
|
||||||
|
TokenType: tokenResp.Data.TokenType,
|
||||||
|
RefreshToken: tokenResp.Data.RefreshToken,
|
||||||
|
Expiry: time.Now().Add(time.Duration(tokenResp.Data.ExpiresIn) * time.Second),
|
||||||
|
}
|
||||||
|
|
||||||
|
return token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) initializeOAuth2Token(ctx context.Context, oauth2Config *clientcredentials.Config, refreshToken string) {
|
||||||
|
d.oauth2Token = oauth2.ReuseTokenSource(nil, &customTokenSource{
|
||||||
|
config: oauth2Config,
|
||||||
|
ctx: ctx,
|
||||||
|
refreshToken: refreshToken,
|
||||||
|
})
|
||||||
|
}
|
123
drivers/febbox/types.go
Normal file
123
drivers/febbox/types.go
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
package febbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ErrResp struct {
|
||||||
|
ErrorCode int64 `json:"code"`
|
||||||
|
ErrorMsg string `json:"msg"`
|
||||||
|
ServerRunTime float64 `json:"server_runtime"`
|
||||||
|
ServerName string `json:"server_name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrResp) IsError() bool {
|
||||||
|
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ServerRunTime != 0 || e.ServerName != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrResp) Error() string {
|
||||||
|
return fmt.Sprintf("ErrorCode: %d ,Error: %s ,ServerRunTime: %f ,ServerName: %s", e.ErrorCode, e.ErrorMsg, e.ServerRunTime, e.ServerName)
|
||||||
|
}
|
||||||
|
|
||||||
|
type FileListResp struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Data struct {
|
||||||
|
FileList []File `json:"file_list"`
|
||||||
|
ShowType string `json:"show_type"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Rules struct {
|
||||||
|
AllowCopy int64 `json:"allow_copy"`
|
||||||
|
AllowDelete int64 `json:"allow_delete"`
|
||||||
|
AllowDownload int64 `json:"allow_download"`
|
||||||
|
AllowComment int64 `json:"allow_comment"`
|
||||||
|
HideLocation int64 `json:"hide_location"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type File struct {
|
||||||
|
Fid int64 `json:"fid"`
|
||||||
|
UID int64 `json:"uid"`
|
||||||
|
FileSize int64 `json:"file_size"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
FileName string `json:"file_name"`
|
||||||
|
Ext string `json:"ext"`
|
||||||
|
AddTime int64 `json:"add_time"`
|
||||||
|
FileCreateTime int64 `json:"file_create_time"`
|
||||||
|
FileUpdateTime int64 `json:"file_update_time"`
|
||||||
|
ParentID int64 `json:"parent_id"`
|
||||||
|
UpdateTime int64 `json:"update_time"`
|
||||||
|
LastOpenTime int64 `json:"last_open_time"`
|
||||||
|
IsDir int64 `json:"is_dir"`
|
||||||
|
Epub int64 `json:"epub"`
|
||||||
|
IsMusicList int64 `json:"is_music_list"`
|
||||||
|
OssFid int64 `json:"oss_fid"`
|
||||||
|
Faststart int64 `json:"faststart"`
|
||||||
|
HasVideoQuality int64 `json:"has_video_quality"`
|
||||||
|
TotalDownload int64 `json:"total_download"`
|
||||||
|
Status int64 `json:"status"`
|
||||||
|
Remark string `json:"remark"`
|
||||||
|
OldHash string `json:"old_hash"`
|
||||||
|
Hash string `json:"hash"`
|
||||||
|
HashType string `json:"hash_type"`
|
||||||
|
FromUID int64 `json:"from_uid"`
|
||||||
|
FidOrg int64 `json:"fid_org"`
|
||||||
|
ShareID int64 `json:"share_id"`
|
||||||
|
InvitePermission int64 `json:"invite_permission"`
|
||||||
|
ThumbSmall string `json:"thumb_small"`
|
||||||
|
ThumbSmallWidth int64 `json:"thumb_small_width"`
|
||||||
|
ThumbSmallHeight int64 `json:"thumb_small_height"`
|
||||||
|
Thumb string `json:"thumb"`
|
||||||
|
ThumbWidth int64 `json:"thumb_width"`
|
||||||
|
ThumbHeight int64 `json:"thumb_height"`
|
||||||
|
ThumbBig string `json:"thumb_big"`
|
||||||
|
ThumbBigWidth int64 `json:"thumb_big_width"`
|
||||||
|
ThumbBigHeight int64 `json:"thumb_big_height"`
|
||||||
|
IsCustomThumb int64 `json:"is_custom_thumb"`
|
||||||
|
Photos int64 `json:"photos"`
|
||||||
|
IsAlbum int64 `json:"is_album"`
|
||||||
|
ReadOnly int64 `json:"read_only"`
|
||||||
|
Rules Rules `json:"rules"`
|
||||||
|
IsShared int64 `json:"is_shared"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileToObj(f File) *model.ObjThumb {
|
||||||
|
return &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: strconv.FormatInt(f.Fid, 10),
|
||||||
|
Name: f.FileName,
|
||||||
|
Size: f.FileSize,
|
||||||
|
Ctime: time.Unix(f.FileCreateTime, 0),
|
||||||
|
Modified: time.Unix(f.FileUpdateTime, 0),
|
||||||
|
IsFolder: f.IsDir == 1,
|
||||||
|
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{
|
||||||
|
Thumbnail: f.Thumb,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type FileDownloadResp struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Data []struct {
|
||||||
|
Error int `json:"error"`
|
||||||
|
DownloadURL string `json:"download_url"`
|
||||||
|
Hash string `json:"hash"`
|
||||||
|
HashType string `json:"hash_type"`
|
||||||
|
Fid int `json:"fid"`
|
||||||
|
FileName string `json:"file_name"`
|
||||||
|
ParentID int `json:"parent_id"`
|
||||||
|
FileSize int `json:"file_size"`
|
||||||
|
Ext string `json:"ext"`
|
||||||
|
Thumb string `json:"thumb"`
|
||||||
|
VipLink int `json:"vip_link"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
224
drivers/febbox/util.go
Normal file
224
drivers/febbox/util.go
Normal file
@ -0,0 +1,224 @@
|
|||||||
|
package febbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *FebBox) refreshTokenByOAuth2() error {
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.Status = "work"
|
||||||
|
d.accessToken = token.AccessToken
|
||||||
|
d.Addition.RefreshToken = token.RefreshToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
// 使用oauth2 获取 access_token
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
||||||
|
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
req.SetResult(resp)
|
||||||
|
}
|
||||||
|
var e ErrResp
|
||||||
|
req.SetError(&e)
|
||||||
|
res, err := req.Execute(method, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch e.ErrorCode {
|
||||||
|
case 0:
|
||||||
|
return res.Body(), nil
|
||||||
|
case 1:
|
||||||
|
return res.Body(), nil
|
||||||
|
case -10001:
|
||||||
|
if e.ServerName != "" {
|
||||||
|
// access_token 过期
|
||||||
|
if err = d.refreshTokenByOAuth2(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.request(url, method, callback, resp)
|
||||||
|
} else {
|
||||||
|
return nil, errors.New(e.Error())
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, errors.New(e.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) getFilesList(id string) ([]File, error) {
|
||||||
|
if d.PageSize <= 0 {
|
||||||
|
d.PageSize = 100
|
||||||
|
}
|
||||||
|
res, err := d.listWithLimit(id, d.PageSize)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return *res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) listWithLimit(dirID string, pageLimit int64) (*[]File, error) {
|
||||||
|
var files []File
|
||||||
|
page := int64(1)
|
||||||
|
for {
|
||||||
|
result, err := d.getFiles(dirID, page, pageLimit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
files = append(files, *result...)
|
||||||
|
if int64(len(*result)) < pageLimit {
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
page++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) getFiles(dirID string, page, pageLimit int64) (*[]File, error) {
|
||||||
|
var fileList FileListResp
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_list",
|
||||||
|
"parent_id": dirID,
|
||||||
|
"page": strconv.FormatInt(page, 10),
|
||||||
|
"pagelimit": strconv.FormatInt(pageLimit, 10),
|
||||||
|
"order": d.Addition.SortRule,
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, &fileList)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = json.Unmarshal(res, &fileList); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &fileList.Data.FileList, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) getDownloadLink(id string, ip string) (string, error) {
|
||||||
|
var fileDownloadResp FileDownloadResp
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_get_download_url",
|
||||||
|
"fids[]": id,
|
||||||
|
"ip": ip,
|
||||||
|
}
|
||||||
|
|
||||||
|
res, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, &fileDownloadResp)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = json.Unmarshal(res, &fileDownloadResp); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileDownloadResp.Data[0].DownloadURL, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) makeDir(id string, name string) error {
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "create_dir",
|
||||||
|
"parent_id": id,
|
||||||
|
"name": name,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) move(id string, id2 string) error {
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_move",
|
||||||
|
"fids[]": id,
|
||||||
|
"to": id2,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) rename(id string, name string) error {
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_rename",
|
||||||
|
"fid": id,
|
||||||
|
"name": name,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) copy(id string, id2 string) error {
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_copy",
|
||||||
|
"fids[]": id,
|
||||||
|
"to": id2,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *FebBox) remove(id string) error {
|
||||||
|
queryParams := map[string]string{
|
||||||
|
"module": "file_delete",
|
||||||
|
"fids[]": id,
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := d.request("https://api.febbox.com/oauth", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetMultipartFormData(queryParams)
|
||||||
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -66,12 +66,13 @@ func (d *ILanZou) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
offset := 1
|
||||||
var res []ListItem
|
var res []ListItem
|
||||||
for {
|
for {
|
||||||
var resp ListResp
|
var resp ListResp
|
||||||
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||||
params := []string{
|
params := []string{
|
||||||
"offset=1",
|
"offset=" + strconv.Itoa(offset),
|
||||||
"limit=60",
|
"limit=60",
|
||||||
"folderId=" + dir.GetID(),
|
"folderId=" + dir.GetID(),
|
||||||
"type=0",
|
"type=0",
|
||||||
@ -83,7 +84,9 @@ func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
res = append(res, resp.List...)
|
res = append(res, resp.List...)
|
||||||
if resp.TotalPage <= resp.Offset {
|
if resp.Offset < resp.TotalPage {
|
||||||
|
offset++
|
||||||
|
} else {
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -286,7 +289,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"fileId": "",
|
"fileId": "",
|
||||||
"fileName": stream.GetName(),
|
"fileName": stream.GetName(),
|
||||||
"fileSize": stream.GetSize() / 1024,
|
"fileSize": stream.GetSize()/1024 + 1,
|
||||||
"folderId": dstDir.GetID(),
|
"folderId": dstDir.GetID(),
|
||||||
"md5": etag,
|
"md5": etag,
|
||||||
"type": 1,
|
"type": 1,
|
||||||
|
@ -22,6 +22,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/alist-org/times"
|
"github.com/alist-org/times"
|
||||||
|
cp "github.com/otiai10/copy"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
_ "golang.org/x/image/webp"
|
_ "golang.org/x/image/webp"
|
||||||
)
|
)
|
||||||
@ -76,7 +77,7 @@ func (d *Local) Init(ctx context.Context) error {
|
|||||||
if d.thumbConcurrency == 0 {
|
if d.thumbConcurrency == 0 {
|
||||||
d.thumbTokenBucket = NewNopTokenBucket()
|
d.thumbTokenBucket = NewNopTokenBucket()
|
||||||
} else {
|
} else {
|
||||||
d.thumbTokenBucket = NewStaticTokenBucket(d.thumbConcurrency)
|
d.thumbTokenBucket = NewStaticTokenBucketWithMigration(d.thumbTokenBucket, d.thumbConcurrency)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -241,11 +242,22 @@ func (d *Local) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
if utils.IsSubPath(srcPath, dstPath) {
|
if utils.IsSubPath(srcPath, dstPath) {
|
||||||
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
||||||
}
|
}
|
||||||
err := os.Rename(srcPath, dstPath)
|
if err := os.Rename(srcPath, dstPath); err != nil && strings.Contains(err.Error(), "invalid cross-device link") {
|
||||||
if err != nil {
|
// Handle cross-device file move in local driver
|
||||||
|
if err = d.Copy(ctx, srcObj, dstDir); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
// Directly remove file without check recycle bin if successfully copied
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
err = os.RemoveAll(srcObj.GetPath())
|
||||||
|
} else {
|
||||||
|
err = os.Remove(srcObj.GetPath())
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
@ -258,22 +270,18 @@ func (d *Local) Rename(ctx context.Context, srcObj model.Obj, newName string) er
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *Local) Copy(_ context.Context, srcObj, dstDir model.Obj) error {
|
||||||
srcPath := srcObj.GetPath()
|
srcPath := srcObj.GetPath()
|
||||||
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
|
dstPath := filepath.Join(dstDir.GetPath(), srcObj.GetName())
|
||||||
if utils.IsSubPath(srcPath, dstPath) {
|
if utils.IsSubPath(srcPath, dstPath) {
|
||||||
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
return fmt.Errorf("the destination folder is a subfolder of the source folder")
|
||||||
}
|
}
|
||||||
var err error
|
// Copy using otiai10/copy to perform more secure & efficient copy
|
||||||
if srcObj.IsDir() {
|
return cp.Copy(srcPath, dstPath, cp.Options{
|
||||||
err = utils.CopyDir(srcPath, dstPath)
|
Sync: true, // Sync file to disk after copy, may have performance penalty in filesystem such as ZFS
|
||||||
} else {
|
PreserveTimes: true,
|
||||||
err = utils.CopyFile(srcPath, dstPath)
|
NumOfWorkers: 0, // Serialized copy without using goroutine
|
||||||
}
|
})
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
@ -23,6 +23,38 @@ func NewStaticTokenBucket(size int) StaticTokenBucket {
|
|||||||
return StaticTokenBucket{bucket: bucket}
|
return StaticTokenBucket{bucket: bucket}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewStaticTokenBucketWithMigration(oldBucket TokenBucket, size int) StaticTokenBucket {
|
||||||
|
if oldBucket != nil {
|
||||||
|
oldStaticBucket, ok := oldBucket.(StaticTokenBucket)
|
||||||
|
if ok {
|
||||||
|
oldSize := cap(oldStaticBucket.bucket)
|
||||||
|
migrateSize := oldSize
|
||||||
|
if size < migrateSize {
|
||||||
|
migrateSize = size
|
||||||
|
}
|
||||||
|
|
||||||
|
bucket := make(chan struct{}, size)
|
||||||
|
for range size - migrateSize {
|
||||||
|
bucket <- struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if migrateSize != 0 {
|
||||||
|
go func() {
|
||||||
|
for range migrateSize {
|
||||||
|
<-oldStaticBucket.bucket
|
||||||
|
bucket <- struct{}{}
|
||||||
|
}
|
||||||
|
close(oldStaticBucket.bucket)
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
return StaticTokenBucket{bucket: bucket}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return NewStaticTokenBucket(size)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Take channel maybe closed when local driver is modified.
|
||||||
|
// don't call Put method after the channel is closed.
|
||||||
func (b StaticTokenBucket) Take() <-chan struct{} {
|
func (b StaticTokenBucket) Take() <-chan struct{} {
|
||||||
return b.bucket
|
return b.bucket
|
||||||
}
|
}
|
||||||
@ -35,8 +67,10 @@ func (b StaticTokenBucket) Do(ctx context.Context, f func() error) error {
|
|||||||
select {
|
select {
|
||||||
case <-ctx.Done():
|
case <-ctx.Done():
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
case <-b.bucket:
|
case _, ok := <-b.Take():
|
||||||
defer b.Put()
|
if ok {
|
||||||
|
defer b.Put()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return f()
|
return f()
|
||||||
}
|
}
|
||||||
|
@ -91,8 +91,8 @@ func (d *PikPak) Init(ctx context.Context) (err error) {
|
|||||||
ClientID: d.ClientID,
|
ClientID: d.ClientID,
|
||||||
ClientSecret: d.ClientSecret,
|
ClientSecret: d.ClientSecret,
|
||||||
Endpoint: oauth2.Endpoint{
|
Endpoint: oauth2.Endpoint{
|
||||||
AuthURL: "https://user.mypikpak.com/v1/auth/signin",
|
AuthURL: "https://user.mypikpak.net/v1/auth/signin",
|
||||||
TokenURL: "https://user.mypikpak.com/v1/auth/token",
|
TokenURL: "https://user.mypikpak.net/v1/auth/token",
|
||||||
AuthStyle: oauth2.AuthStyleInParams,
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -124,7 +124,7 @@ func (d *PikPak) Init(ctx context.Context) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 获取CaptchaToken
|
// 获取CaptchaToken
|
||||||
err = d.RefreshCaptchaTokenAtLogin(GetAction(http.MethodGet, "https://api-drive.mypikpak.com/drive/v1/files"), d.Common.GetUserID())
|
err = d.RefreshCaptchaTokenAtLogin(GetAction(http.MethodGet, "https://api-drive.mypikpak.net/drive/v1/files"), d.Common.GetUserID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -174,7 +174,7 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
if !d.DisableMediaLink {
|
if !d.DisableMediaLink {
|
||||||
queryParams["usage"] = "CACHE"
|
queryParams["usage"] = "CACHE"
|
||||||
}
|
}
|
||||||
_, err := d.request(fmt.Sprintf("https://api-drive.mypikpak.com/drive/v1/files/%s", file.GetID()),
|
_, err := d.request(fmt.Sprintf("https://api-drive.mypikpak.net/drive/v1/files/%s", file.GetID()),
|
||||||
http.MethodGet, func(req *resty.Request) {
|
http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(queryParams)
|
req.SetQueryParams(queryParams)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
@ -200,7 +200,7 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"kind": "drive#folder",
|
"kind": "drive#folder",
|
||||||
"parent_id": parentDir.GetID(),
|
"parent_id": parentDir.GetID(),
|
||||||
@ -211,7 +211,7 @@ func (d *PikPak) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchMove", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchMove", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{srcObj.GetID()},
|
"ids": []string{srcObj.GetID()},
|
||||||
"to": base.Json{
|
"to": base.Json{
|
||||||
@ -223,7 +223,7 @@ func (d *PikPak) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files/"+srcObj.GetID(), http.MethodPatch, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files/"+srcObj.GetID(), http.MethodPatch, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"name": newName,
|
"name": newName,
|
||||||
})
|
})
|
||||||
@ -232,7 +232,7 @@ func (d *PikPak) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchCopy", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchCopy", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{srcObj.GetID()},
|
"ids": []string{srcObj.GetID()},
|
||||||
"to": base.Json{
|
"to": base.Json{
|
||||||
@ -244,7 +244,7 @@ func (d *PikPak) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files:batchTrash", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files:batchTrash", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"ids": []string{obj.GetID()},
|
"ids": []string{obj.GetID()},
|
||||||
})
|
})
|
||||||
@ -268,7 +268,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
|
|
||||||
var resp UploadTaskData
|
var resp UploadTaskData
|
||||||
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
res, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
"kind": "drive#file",
|
"kind": "drive#file",
|
||||||
"name": stream.GetName(),
|
"name": stream.GetName(),
|
||||||
@ -292,9 +292,9 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
|
|
||||||
params := resp.Resumable.Params
|
params := resp.Resumable.Params
|
||||||
//endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
//endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
||||||
// web 端上传 返回的endpoint 为 `mypikpak.com` | android 端上传 返回的endpoint 为 `vip-lixian-07.mypikpak.com`·
|
// web 端上传 返回的endpoint 为 `mypikpak.net` | android 端上传 返回的endpoint 为 `vip-lixian-07.mypikpak.net`·
|
||||||
if d.Addition.Platform == "android" {
|
if d.Addition.Platform == "android" {
|
||||||
params.Endpoint = "mypikpak.com"
|
params.Endpoint = "mypikpak.net"
|
||||||
}
|
}
|
||||||
|
|
||||||
if stream.GetSize() <= 10*utils.MB { // 文件大小 小于10MB,改用普通模式上传
|
if stream.GetSize() <= 10*utils.MB { // 文件大小 小于10MB,改用普通模式上传
|
||||||
@ -318,7 +318,7 @@ func (d *PikPak) OfflineDownload(ctx context.Context, fileUrl string, parentDir
|
|||||||
}
|
}
|
||||||
|
|
||||||
var resp OfflineDownloadResp
|
var resp OfflineDownloadResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(requestBody)
|
req.SetBody(requestBody)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
@ -336,7 +336,7 @@ PHASE_TYPE_RUNNING, PHASE_TYPE_ERROR, PHASE_TYPE_COMPLETE, PHASE_TYPE_PENDING
|
|||||||
*/
|
*/
|
||||||
func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []string) ([]OfflineTask, error) {
|
func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []string) ([]OfflineTask, error) {
|
||||||
res := make([]OfflineTask, 0)
|
res := make([]OfflineTask, 0)
|
||||||
url := "https://api-drive.mypikpak.com/drive/v1/tasks"
|
url := "https://api-drive.mypikpak.net/drive/v1/tasks"
|
||||||
|
|
||||||
if len(phase) == 0 {
|
if len(phase) == 0 {
|
||||||
phase = []string{"PHASE_TYPE_RUNNING", "PHASE_TYPE_ERROR", "PHASE_TYPE_COMPLETE", "PHASE_TYPE_PENDING"}
|
phase = []string{"PHASE_TYPE_RUNNING", "PHASE_TYPE_ERROR", "PHASE_TYPE_COMPLETE", "PHASE_TYPE_PENDING"}
|
||||||
@ -377,7 +377,7 @@ func (d *PikPak) OfflineList(ctx context.Context, nextPageToken string, phase []
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) DeleteOfflineTasks(ctx context.Context, taskIDs []string, deleteFiles bool) error {
|
func (d *PikPak) DeleteOfflineTasks(ctx context.Context, taskIDs []string, deleteFiles bool) error {
|
||||||
url := "https://api-drive.mypikpak.com/drive/v1/tasks"
|
url := "https://api-drive.mypikpak.net/drive/v1/tasks"
|
||||||
params := map[string]string{
|
params := map[string]string{
|
||||||
"task_ids": strings.Join(taskIDs, ","),
|
"task_ids": strings.Join(taskIDs, ","),
|
||||||
"delete_files": strconv.FormatBool(deleteFiles),
|
"delete_files": strconv.FormatBool(deleteFiles),
|
||||||
|
@ -86,51 +86,51 @@ const (
|
|||||||
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
||||||
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
WebClientVersion = "2.0.0"
|
WebClientVersion = "2.0.0"
|
||||||
WebPackageName = "mypikpak.com"
|
WebPackageName = "mypikpak.net"
|
||||||
WebSdkVersion = "8.0.3"
|
WebSdkVersion = "8.0.3"
|
||||||
PCClientID = "YvtoWO6GNHiuCl7x"
|
PCClientID = "YvtoWO6GNHiuCl7x"
|
||||||
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
||||||
PCClientVersion = "undefined" // 2.5.6.4831
|
PCClientVersion = "undefined" // 2.5.6.4831
|
||||||
PCPackageName = "mypikpak.com"
|
PCPackageName = "mypikpak.net"
|
||||||
PCSdkVersion = "8.0.3"
|
PCSdkVersion = "8.0.3"
|
||||||
)
|
)
|
||||||
|
|
||||||
var DlAddr = []string{
|
var DlAddr = []string{
|
||||||
"dl-a10b-0621.mypikpak.com",
|
"dl-a10b-0621.mypikpak.net",
|
||||||
"dl-a10b-0622.mypikpak.com",
|
"dl-a10b-0622.mypikpak.net",
|
||||||
"dl-a10b-0623.mypikpak.com",
|
"dl-a10b-0623.mypikpak.net",
|
||||||
"dl-a10b-0624.mypikpak.com",
|
"dl-a10b-0624.mypikpak.net",
|
||||||
"dl-a10b-0625.mypikpak.com",
|
"dl-a10b-0625.mypikpak.net",
|
||||||
"dl-a10b-0858.mypikpak.com",
|
"dl-a10b-0858.mypikpak.net",
|
||||||
"dl-a10b-0859.mypikpak.com",
|
"dl-a10b-0859.mypikpak.net",
|
||||||
"dl-a10b-0860.mypikpak.com",
|
"dl-a10b-0860.mypikpak.net",
|
||||||
"dl-a10b-0861.mypikpak.com",
|
"dl-a10b-0861.mypikpak.net",
|
||||||
"dl-a10b-0862.mypikpak.com",
|
"dl-a10b-0862.mypikpak.net",
|
||||||
"dl-a10b-0863.mypikpak.com",
|
"dl-a10b-0863.mypikpak.net",
|
||||||
"dl-a10b-0864.mypikpak.com",
|
"dl-a10b-0864.mypikpak.net",
|
||||||
"dl-a10b-0865.mypikpak.com",
|
"dl-a10b-0865.mypikpak.net",
|
||||||
"dl-a10b-0866.mypikpak.com",
|
"dl-a10b-0866.mypikpak.net",
|
||||||
"dl-a10b-0867.mypikpak.com",
|
"dl-a10b-0867.mypikpak.net",
|
||||||
"dl-a10b-0868.mypikpak.com",
|
"dl-a10b-0868.mypikpak.net",
|
||||||
"dl-a10b-0869.mypikpak.com",
|
"dl-a10b-0869.mypikpak.net",
|
||||||
"dl-a10b-0870.mypikpak.com",
|
"dl-a10b-0870.mypikpak.net",
|
||||||
"dl-a10b-0871.mypikpak.com",
|
"dl-a10b-0871.mypikpak.net",
|
||||||
"dl-a10b-0872.mypikpak.com",
|
"dl-a10b-0872.mypikpak.net",
|
||||||
"dl-a10b-0873.mypikpak.com",
|
"dl-a10b-0873.mypikpak.net",
|
||||||
"dl-a10b-0874.mypikpak.com",
|
"dl-a10b-0874.mypikpak.net",
|
||||||
"dl-a10b-0875.mypikpak.com",
|
"dl-a10b-0875.mypikpak.net",
|
||||||
"dl-a10b-0876.mypikpak.com",
|
"dl-a10b-0876.mypikpak.net",
|
||||||
"dl-a10b-0877.mypikpak.com",
|
"dl-a10b-0877.mypikpak.net",
|
||||||
"dl-a10b-0878.mypikpak.com",
|
"dl-a10b-0878.mypikpak.net",
|
||||||
"dl-a10b-0879.mypikpak.com",
|
"dl-a10b-0879.mypikpak.net",
|
||||||
"dl-a10b-0880.mypikpak.com",
|
"dl-a10b-0880.mypikpak.net",
|
||||||
"dl-a10b-0881.mypikpak.com",
|
"dl-a10b-0881.mypikpak.net",
|
||||||
"dl-a10b-0882.mypikpak.com",
|
"dl-a10b-0882.mypikpak.net",
|
||||||
"dl-a10b-0883.mypikpak.com",
|
"dl-a10b-0883.mypikpak.net",
|
||||||
"dl-a10b-0884.mypikpak.com",
|
"dl-a10b-0884.mypikpak.net",
|
||||||
"dl-a10b-0885.mypikpak.com",
|
"dl-a10b-0885.mypikpak.net",
|
||||||
"dl-a10b-0886.mypikpak.com",
|
"dl-a10b-0886.mypikpak.net",
|
||||||
"dl-a10b-0887.mypikpak.com",
|
"dl-a10b-0887.mypikpak.net",
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) login() error {
|
func (d *PikPak) login() error {
|
||||||
@ -139,7 +139,7 @@ func (d *PikPak) login() error {
|
|||||||
return errors.New("username or password is empty")
|
return errors.New("username or password is empty")
|
||||||
}
|
}
|
||||||
|
|
||||||
url := "https://user.mypikpak.com/v1/auth/signin"
|
url := "https://user.mypikpak.net/v1/auth/signin"
|
||||||
// 使用 用户填写的 CaptchaToken —————— (验证后的captcha_token)
|
// 使用 用户填写的 CaptchaToken —————— (验证后的captcha_token)
|
||||||
if d.GetCaptchaToken() == "" {
|
if d.GetCaptchaToken() == "" {
|
||||||
if err := d.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), d.Username); err != nil {
|
if err := d.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), d.Username); err != nil {
|
||||||
@ -169,7 +169,7 @@ func (d *PikPak) login() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) refreshToken(refreshToken string) error {
|
func (d *PikPak) refreshToken(refreshToken string) error {
|
||||||
url := "https://user.mypikpak.com/v1/auth/token"
|
url := "https://user.mypikpak.net/v1/auth/token"
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
res, err := base.RestyClient.SetRetryCount(1).R().SetError(&e).
|
res, err := base.RestyClient.SetRetryCount(1).R().SetError(&e).
|
||||||
SetHeader("user-agent", "").SetBody(base.Json{
|
SetHeader("user-agent", "").SetBody(base.Json{
|
||||||
@ -307,7 +307,7 @@ func (d *PikPak) getFiles(id string) ([]File, error) {
|
|||||||
"page_token": pageToken,
|
"page_token": pageToken,
|
||||||
}
|
}
|
||||||
var resp Files
|
var resp Files
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/files", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -473,7 +473,7 @@ func (d *PikPak) refreshCaptchaToken(action string, metas map[string]string) err
|
|||||||
}
|
}
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
var resp CaptchaTokenResponse
|
var resp CaptchaTokenResponse
|
||||||
_, err := d.request("https://user.mypikpak.com/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://user.mypikpak.net/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetError(&e).SetBody(param).SetQueryParam("client_id", d.ClientID)
|
req.SetError(&e).SetBody(param).SetQueryParam("client_id", d.ClientID)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
|
@ -80,7 +80,7 @@ func (d *PikPakShare) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 获取CaptchaToken
|
// 获取CaptchaToken
|
||||||
err := d.RefreshCaptchaToken(GetAction(http.MethodGet, "https://api-drive.mypikpak.com/drive/v1/share:batch_file_info"), "")
|
err := d.RefreshCaptchaToken(GetAction(http.MethodGet, "https://api-drive.mypikpak.net/drive/v1/share:batch_file_info"), "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -113,7 +113,7 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
"file_id": file.GetID(),
|
"file_id": file.GetID(),
|
||||||
"pass_code_token": d.PassCodeToken,
|
"pass_code_token": d.PassCodeToken,
|
||||||
}
|
}
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share/file_info", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share/file_info", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -68,51 +68,51 @@ const (
|
|||||||
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
WebClientID = "YUMx5nI8ZU8Ap8pm"
|
||||||
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
WebClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
WebClientVersion = "2.0.0"
|
WebClientVersion = "2.0.0"
|
||||||
WebPackageName = "mypikpak.com"
|
WebPackageName = "mypikpak.net"
|
||||||
WebSdkVersion = "8.0.3"
|
WebSdkVersion = "8.0.3"
|
||||||
PCClientID = "YvtoWO6GNHiuCl7x"
|
PCClientID = "YvtoWO6GNHiuCl7x"
|
||||||
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
PCClientSecret = "1NIH5R1IEe2pAxZE3hv3uA"
|
||||||
PCClientVersion = "undefined" // 2.5.6.4831
|
PCClientVersion = "undefined" // 2.5.6.4831
|
||||||
PCPackageName = "mypikpak.com"
|
PCPackageName = "mypikpak.net"
|
||||||
PCSdkVersion = "8.0.3"
|
PCSdkVersion = "8.0.3"
|
||||||
)
|
)
|
||||||
|
|
||||||
var DlAddr = []string{
|
var DlAddr = []string{
|
||||||
"dl-a10b-0621.mypikpak.com",
|
"dl-a10b-0621.mypikpak.net",
|
||||||
"dl-a10b-0622.mypikpak.com",
|
"dl-a10b-0622.mypikpak.net",
|
||||||
"dl-a10b-0623.mypikpak.com",
|
"dl-a10b-0623.mypikpak.net",
|
||||||
"dl-a10b-0624.mypikpak.com",
|
"dl-a10b-0624.mypikpak.net",
|
||||||
"dl-a10b-0625.mypikpak.com",
|
"dl-a10b-0625.mypikpak.net",
|
||||||
"dl-a10b-0858.mypikpak.com",
|
"dl-a10b-0858.mypikpak.net",
|
||||||
"dl-a10b-0859.mypikpak.com",
|
"dl-a10b-0859.mypikpak.net",
|
||||||
"dl-a10b-0860.mypikpak.com",
|
"dl-a10b-0860.mypikpak.net",
|
||||||
"dl-a10b-0861.mypikpak.com",
|
"dl-a10b-0861.mypikpak.net",
|
||||||
"dl-a10b-0862.mypikpak.com",
|
"dl-a10b-0862.mypikpak.net",
|
||||||
"dl-a10b-0863.mypikpak.com",
|
"dl-a10b-0863.mypikpak.net",
|
||||||
"dl-a10b-0864.mypikpak.com",
|
"dl-a10b-0864.mypikpak.net",
|
||||||
"dl-a10b-0865.mypikpak.com",
|
"dl-a10b-0865.mypikpak.net",
|
||||||
"dl-a10b-0866.mypikpak.com",
|
"dl-a10b-0866.mypikpak.net",
|
||||||
"dl-a10b-0867.mypikpak.com",
|
"dl-a10b-0867.mypikpak.net",
|
||||||
"dl-a10b-0868.mypikpak.com",
|
"dl-a10b-0868.mypikpak.net",
|
||||||
"dl-a10b-0869.mypikpak.com",
|
"dl-a10b-0869.mypikpak.net",
|
||||||
"dl-a10b-0870.mypikpak.com",
|
"dl-a10b-0870.mypikpak.net",
|
||||||
"dl-a10b-0871.mypikpak.com",
|
"dl-a10b-0871.mypikpak.net",
|
||||||
"dl-a10b-0872.mypikpak.com",
|
"dl-a10b-0872.mypikpak.net",
|
||||||
"dl-a10b-0873.mypikpak.com",
|
"dl-a10b-0873.mypikpak.net",
|
||||||
"dl-a10b-0874.mypikpak.com",
|
"dl-a10b-0874.mypikpak.net",
|
||||||
"dl-a10b-0875.mypikpak.com",
|
"dl-a10b-0875.mypikpak.net",
|
||||||
"dl-a10b-0876.mypikpak.com",
|
"dl-a10b-0876.mypikpak.net",
|
||||||
"dl-a10b-0877.mypikpak.com",
|
"dl-a10b-0877.mypikpak.net",
|
||||||
"dl-a10b-0878.mypikpak.com",
|
"dl-a10b-0878.mypikpak.net",
|
||||||
"dl-a10b-0879.mypikpak.com",
|
"dl-a10b-0879.mypikpak.net",
|
||||||
"dl-a10b-0880.mypikpak.com",
|
"dl-a10b-0880.mypikpak.net",
|
||||||
"dl-a10b-0881.mypikpak.com",
|
"dl-a10b-0881.mypikpak.net",
|
||||||
"dl-a10b-0882.mypikpak.com",
|
"dl-a10b-0882.mypikpak.net",
|
||||||
"dl-a10b-0883.mypikpak.com",
|
"dl-a10b-0883.mypikpak.net",
|
||||||
"dl-a10b-0884.mypikpak.com",
|
"dl-a10b-0884.mypikpak.net",
|
||||||
"dl-a10b-0885.mypikpak.com",
|
"dl-a10b-0885.mypikpak.net",
|
||||||
"dl-a10b-0886.mypikpak.com",
|
"dl-a10b-0886.mypikpak.net",
|
||||||
"dl-a10b-0887.mypikpak.com",
|
"dl-a10b-0887.mypikpak.net",
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
@ -159,7 +159,7 @@ func (d *PikPakShare) getSharePassToken() error {
|
|||||||
"limit": "100",
|
"limit": "100",
|
||||||
}
|
}
|
||||||
var resp ShareResp
|
var resp ShareResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -187,7 +187,7 @@ func (d *PikPakShare) getFiles(id string) ([]File, error) {
|
|||||||
"pass_code_token": d.PassCodeToken,
|
"pass_code_token": d.PassCodeToken,
|
||||||
}
|
}
|
||||||
var resp ShareResp
|
var resp ShareResp
|
||||||
_, err := d.request("https://api-drive.mypikpak.com/drive/v1/share/detail", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://api-drive.mypikpak.net/drive/v1/share/detail", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(query)
|
req.SetQueryParams(query)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -345,7 +345,7 @@ func (d *PikPakShare) refreshCaptchaToken(action string, metas map[string]string
|
|||||||
}
|
}
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
var resp CaptchaTokenResponse
|
var resp CaptchaTokenResponse
|
||||||
_, err := d.request("https://user.mypikpak.com/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://user.mypikpak.net/v1/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetError(&e).SetBody(param)
|
req.SetError(&e).SetBody(param)
|
||||||
}, &resp)
|
}, &resp)
|
||||||
|
|
||||||
|
@ -10,7 +10,6 @@ import (
|
|||||||
"math"
|
"math"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -23,7 +22,9 @@ import (
|
|||||||
type Terabox struct {
|
type Terabox struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
JsToken string
|
JsToken string
|
||||||
|
url_domain_prefix string
|
||||||
|
base_url string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) Config() driver.Config {
|
func (d *Terabox) Config() driver.Config {
|
||||||
@ -36,6 +37,8 @@ func (d *Terabox) GetAddition() driver.Additional {
|
|||||||
|
|
||||||
func (d *Terabox) Init(ctx context.Context) error {
|
func (d *Terabox) Init(ctx context.Context) error {
|
||||||
var resp CheckLoginResp
|
var resp CheckLoginResp
|
||||||
|
d.base_url = "https://www.terabox.com"
|
||||||
|
d.url_domain_prefix = "jp"
|
||||||
_, err := d.get("/api/check/login", nil, &resp)
|
_, err := d.get("/api/check/login", nil, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -71,7 +74,16 @@ func (d *Terabox) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *Terabox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
_, err := d.create(stdpath.Join(parentDir.GetPath(), dirName), 0, 1, "", "")
|
params := map[string]string{
|
||||||
|
"a": "commit",
|
||||||
|
}
|
||||||
|
data := map[string]string{
|
||||||
|
"path": stdpath.Join(parentDir.GetPath(), dirName),
|
||||||
|
"isdir": "1",
|
||||||
|
"block_list": "[]",
|
||||||
|
}
|
||||||
|
res, err := d.post_form("/api/create", params, data, nil)
|
||||||
|
log.Debugln(string(res))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -117,63 +129,61 @@ func (d *Terabox) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
tempFile, err := stream.CacheFullInTempFile()
|
resp, err := base.RestyClient.R().
|
||||||
|
SetContext(ctx).
|
||||||
|
Get("https://" + d.url_domain_prefix + "-data.terabox.com/rest/2.0/pcs/file?method=locateupload")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
var Default int64 = 4 * 1024 * 1024
|
var locateupload_resp LocateUploadResp
|
||||||
defaultByteData := make([]byte, Default)
|
err = utils.Json.Unmarshal(resp.Body(), &locateupload_resp)
|
||||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
|
||||||
// cal md5
|
|
||||||
h1 := md5.New()
|
|
||||||
h2 := md5.New()
|
|
||||||
block_list := make([]string, 0)
|
|
||||||
left := stream.GetSize()
|
|
||||||
for i := 0; i < count; i++ {
|
|
||||||
byteSize := Default
|
|
||||||
var byteData []byte
|
|
||||||
if left < Default {
|
|
||||||
byteSize = left
|
|
||||||
byteData = make([]byte, byteSize)
|
|
||||||
} else {
|
|
||||||
byteData = defaultByteData
|
|
||||||
}
|
|
||||||
left -= byteSize
|
|
||||||
_, err = io.ReadFull(tempFile, byteData)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
h1.Write(byteData)
|
|
||||||
h2.Write(byteData)
|
|
||||||
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
|
||||||
h2.Reset()
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
log.Debugln(resp)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
log.Debugln(locateupload_resp)
|
||||||
|
|
||||||
|
// precreate file
|
||||||
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||||
path := encodeURIComponent(rawPath)
|
path := encodeURIComponent(rawPath)
|
||||||
block_list_str := fmt.Sprintf("[%s]", strings.Join(block_list, ","))
|
|
||||||
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&block_list=%s",
|
var precreateBlockListStr string
|
||||||
path, stream.GetSize(),
|
if stream.GetSize() > initialChunkSize {
|
||||||
block_list_str)
|
precreateBlockListStr = `["5910a591dd8fc18c32a8f3df4fdc1761","a5fc157d78e6ad1c7e114b056c92821e"]`
|
||||||
params := map[string]string{}
|
} else {
|
||||||
|
precreateBlockListStr = `["5910a591dd8fc18c32a8f3df4fdc1761"]`
|
||||||
|
}
|
||||||
|
|
||||||
|
data := map[string]string{
|
||||||
|
"path": rawPath,
|
||||||
|
"autoinit": "1",
|
||||||
|
"target_path": dstDir.GetPath(),
|
||||||
|
"block_list": precreateBlockListStr,
|
||||||
|
"local_mtime": strconv.FormatInt(stream.ModTime().Unix(), 10),
|
||||||
|
"file_limit_switch_v34": "true",
|
||||||
|
}
|
||||||
var precreateResp PrecreateResp
|
var precreateResp PrecreateResp
|
||||||
_, err = d.post("/api/precreate", params, data, &precreateResp)
|
log.Debugln(data)
|
||||||
|
res, err := d.post_form("/api/precreate", nil, data, &precreateResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Debugf("%+v", precreateResp)
|
log.Debugf("%+v", precreateResp)
|
||||||
if precreateResp.Errno != 0 {
|
if precreateResp.Errno != 0 {
|
||||||
|
log.Debugln(string(res))
|
||||||
return fmt.Errorf("[terabox] failed to precreate file, errno: %d", precreateResp.Errno)
|
return fmt.Errorf("[terabox] failed to precreate file, errno: %d", precreateResp.Errno)
|
||||||
}
|
}
|
||||||
if precreateResp.ReturnType == 2 {
|
if precreateResp.ReturnType == 2 {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
params = map[string]string{
|
|
||||||
|
// upload chunks
|
||||||
|
tempFile, err := stream.CacheFullInTempFile()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
params := map[string]string{
|
||||||
"method": "upload",
|
"method": "upload",
|
||||||
"path": path,
|
"path": path,
|
||||||
"uploadid": precreateResp.Uploadid,
|
"uploadid": precreateResp.Uploadid,
|
||||||
@ -182,25 +192,38 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
"channel": "dubox",
|
"channel": "dubox",
|
||||||
"clienttype": "0",
|
"clienttype": "0",
|
||||||
}
|
}
|
||||||
left = stream.GetSize()
|
|
||||||
for i, partseq := range precreateResp.BlockList {
|
streamSize := stream.GetSize()
|
||||||
|
chunkSize := calculateChunkSize(streamSize)
|
||||||
|
chunkByteData := make([]byte, chunkSize)
|
||||||
|
count := int(math.Ceil(float64(streamSize) / float64(chunkSize)))
|
||||||
|
left := streamSize
|
||||||
|
uploadBlockList := make([]string, 0, count)
|
||||||
|
h := md5.New()
|
||||||
|
for partseq := 0; partseq < count; partseq++ {
|
||||||
if utils.IsCanceled(ctx) {
|
if utils.IsCanceled(ctx) {
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
}
|
}
|
||||||
byteSize := Default
|
byteSize := chunkSize
|
||||||
var byteData []byte
|
var byteData []byte
|
||||||
if left < Default {
|
if left >= chunkSize {
|
||||||
|
byteData = chunkByteData
|
||||||
|
} else {
|
||||||
byteSize = left
|
byteSize = left
|
||||||
byteData = make([]byte, byteSize)
|
byteData = make([]byte, byteSize)
|
||||||
} else {
|
|
||||||
byteData = defaultByteData
|
|
||||||
}
|
}
|
||||||
left -= byteSize
|
left -= byteSize
|
||||||
_, err = io.ReadFull(tempFile, byteData)
|
_, err = io.ReadFull(tempFile, byteData)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
u := "https://c-jp.terabox.com/rest/2.0/pcs/superfile2"
|
|
||||||
|
// calculate md5
|
||||||
|
h.Write(byteData)
|
||||||
|
uploadBlockList = append(uploadBlockList, hex.EncodeToString(h.Sum(nil)))
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
|
u := "https://" + locateupload_resp.Host + "/rest/2.0/pcs/superfile2"
|
||||||
params["partseq"] = strconv.Itoa(partseq)
|
params["partseq"] = strconv.Itoa(partseq)
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetContext(ctx).
|
SetContext(ctx).
|
||||||
@ -212,12 +235,39 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
log.Debugln(res.String())
|
log.Debugln(res.String())
|
||||||
if len(precreateResp.BlockList) > 0 {
|
if count > 0 {
|
||||||
up(float64(i) * 100 / float64(len(precreateResp.BlockList)))
|
up(float64(partseq) * 100 / float64(count))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_, err = d.create(rawPath, stream.GetSize(), 0, precreateResp.Uploadid, block_list_str)
|
|
||||||
return err
|
// create file
|
||||||
|
params = map[string]string{
|
||||||
|
"isdir": "0",
|
||||||
|
"rtype": "1",
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadBlockListStr, err := utils.Json.MarshalToString(uploadBlockList)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
data = map[string]string{
|
||||||
|
"path": rawPath,
|
||||||
|
"size": strconv.FormatInt(stream.GetSize(), 10),
|
||||||
|
"uploadid": precreateResp.Uploadid,
|
||||||
|
"target_path": dstDir.GetPath(),
|
||||||
|
"block_list": uploadBlockListStr,
|
||||||
|
"local_mtime": strconv.FormatInt(stream.ModTime().Unix(), 10),
|
||||||
|
}
|
||||||
|
var createResp CreateResp
|
||||||
|
res, err = d.post_form("/api/create", params, data, &createResp)
|
||||||
|
log.Debugln(string(res))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if createResp.Errno != 0 {
|
||||||
|
return fmt.Errorf("[terabox] failed to create file, errno: %d", createResp.Errno)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Terabox)(nil)
|
var _ driver.Driver = (*Terabox)(nil)
|
||||||
|
@ -95,3 +95,11 @@ type PrecreateResp struct {
|
|||||||
type CheckLoginResp struct {
|
type CheckLoginResp struct {
|
||||||
Errno int `json:"errno"`
|
Errno int `json:"errno"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type LocateUploadResp struct {
|
||||||
|
Host string `json:"host"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CreateResp struct {
|
||||||
|
Errno int `json:"errno"`
|
||||||
|
}
|
||||||
|
@ -14,6 +14,12 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
initialChunkSize int64 = 4 << 20 // 4MB
|
||||||
|
initialSizeThreshold int64 = 4 << 30 // 4GB
|
||||||
)
|
)
|
||||||
|
|
||||||
func getStrBetween(raw, start, end string) string {
|
func getStrBetween(raw, start, end string) string {
|
||||||
@ -28,11 +34,11 @@ func getStrBetween(raw, start, end string) string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) resetJsToken() error {
|
func (d *Terabox) resetJsToken() error {
|
||||||
u := "https://www.terabox.com/main"
|
u := d.base_url
|
||||||
res, err := base.RestyClient.R().SetHeaders(map[string]string{
|
res, err := base.RestyClient.R().SetHeaders(map[string]string{
|
||||||
"Cookie": d.Cookie,
|
"Cookie": d.Cookie,
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"Referer": "https://www.terabox.com/",
|
"Referer": d.base_url,
|
||||||
"User-Agent": base.UserAgent,
|
"User-Agent": base.UserAgent,
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
"X-Requested-With": "XMLHttpRequest",
|
||||||
}).Get(u)
|
}).Get(u)
|
||||||
@ -48,12 +54,12 @@ func (d *Terabox) resetJsToken() error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) request(furl string, method string, callback base.ReqCallback, resp interface{}, noRetry ...bool) ([]byte, error) {
|
func (d *Terabox) request(rurl string, method string, callback base.ReqCallback, resp interface{}, noRetry ...bool) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Cookie": d.Cookie,
|
"Cookie": d.Cookie,
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"Referer": "https://www.terabox.com/",
|
"Referer": d.base_url,
|
||||||
"User-Agent": base.UserAgent,
|
"User-Agent": base.UserAgent,
|
||||||
"X-Requested-With": "XMLHttpRequest",
|
"X-Requested-With": "XMLHttpRequest",
|
||||||
})
|
})
|
||||||
@ -70,7 +76,7 @@ func (d *Terabox) request(furl string, method string, callback base.ReqCallback,
|
|||||||
if resp != nil {
|
if resp != nil {
|
||||||
req.SetResult(resp)
|
req.SetResult(resp)
|
||||||
}
|
}
|
||||||
res, err := req.Execute(method, furl)
|
res, err := req.Execute(method, d.base_url+rurl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -82,14 +88,24 @@ func (d *Terabox) request(furl string, method string, callback base.ReqCallback,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if !utils.IsBool(noRetry...) {
|
if !utils.IsBool(noRetry...) {
|
||||||
return d.request(furl, method, callback, resp, true)
|
return d.request(rurl, method, callback, resp, true)
|
||||||
|
}
|
||||||
|
} else if errno == -6 {
|
||||||
|
header := res.Header()
|
||||||
|
log.Debugln(header)
|
||||||
|
urlDomainPrefix := header.Get("Url-Domain-Prefix")
|
||||||
|
if len(urlDomainPrefix) > 0 {
|
||||||
|
d.url_domain_prefix = urlDomainPrefix
|
||||||
|
d.base_url = "https://" + d.url_domain_prefix + ".terabox.com"
|
||||||
|
log.Debugln("Redirect base_url to", d.base_url)
|
||||||
|
return d.request(rurl, method, callback, resp, noRetry...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) get(pathname string, params map[string]string, resp interface{}) ([]byte, error) {
|
func (d *Terabox) get(pathname string, params map[string]string, resp interface{}) ([]byte, error) {
|
||||||
return d.request("https://www.terabox.com"+pathname, http.MethodGet, func(req *resty.Request) {
|
return d.request(pathname, http.MethodGet, func(req *resty.Request) {
|
||||||
if params != nil {
|
if params != nil {
|
||||||
req.SetQueryParams(params)
|
req.SetQueryParams(params)
|
||||||
}
|
}
|
||||||
@ -97,7 +113,7 @@ func (d *Terabox) get(pathname string, params map[string]string, resp interface{
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
|
func (d *Terabox) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
|
||||||
return d.request("https://www.terabox.com"+pathname, http.MethodPost, func(req *resty.Request) {
|
return d.request(pathname, http.MethodPost, func(req *resty.Request) {
|
||||||
if params != nil {
|
if params != nil {
|
||||||
req.SetQueryParams(params)
|
req.SetQueryParams(params)
|
||||||
}
|
}
|
||||||
@ -105,6 +121,15 @@ func (d *Terabox) post(pathname string, params map[string]string, data interface
|
|||||||
}, resp)
|
}, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Terabox) post_form(pathname string, params map[string]string, data map[string]string, resp interface{}) ([]byte, error) {
|
||||||
|
return d.request(pathname, http.MethodPost, func(req *resty.Request) {
|
||||||
|
if params != nil {
|
||||||
|
req.SetQueryParams(params)
|
||||||
|
}
|
||||||
|
req.SetFormData(data)
|
||||||
|
}, resp)
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Terabox) getFiles(dir string) ([]File, error) {
|
func (d *Terabox) getFiles(dir string) ([]File, error) {
|
||||||
page := 1
|
page := 1
|
||||||
num := 100
|
num := 100
|
||||||
@ -237,17 +262,24 @@ func (d *Terabox) manage(opera string, filelist interface{}) ([]byte, error) {
|
|||||||
return d.post("/api/filemanager", params, data, nil)
|
return d.post("/api/filemanager", params, data, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Terabox) create(path string, size int64, isdir int, uploadid, block_list string) ([]byte, error) {
|
|
||||||
params := map[string]string{}
|
|
||||||
data := fmt.Sprintf("path=%s&size=%d&isdir=%d", encodeURIComponent(path), size, isdir)
|
|
||||||
if uploadid != "" {
|
|
||||||
data += fmt.Sprintf("&uploadid=%s&block_list=%s", uploadid, block_list)
|
|
||||||
}
|
|
||||||
return d.post("/api/create", params, data, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeURIComponent(str string) string {
|
func encodeURIComponent(str string) string {
|
||||||
r := url.QueryEscape(str)
|
r := url.QueryEscape(str)
|
||||||
r = strings.ReplaceAll(r, "+", "%20")
|
r = strings.ReplaceAll(r, "+", "%20")
|
||||||
return r
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func calculateChunkSize(streamSize int64) int64 {
|
||||||
|
chunkSize := initialChunkSize
|
||||||
|
sizeThreshold := initialSizeThreshold
|
||||||
|
|
||||||
|
if streamSize < chunkSize {
|
||||||
|
return streamSize
|
||||||
|
}
|
||||||
|
|
||||||
|
for streamSize > sizeThreshold {
|
||||||
|
chunkSize <<= 1
|
||||||
|
sizeThreshold <<= 1
|
||||||
|
}
|
||||||
|
|
||||||
|
return chunkSize
|
||||||
|
}
|
||||||
|
@ -55,7 +55,9 @@ func (d *Vtencent) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Vtencent) Drop(ctx context.Context) error {
|
func (d *Vtencent) Drop(ctx context.Context) error {
|
||||||
d.cron.Stop()
|
if d.cron != nil {
|
||||||
|
d.cron.Stop()
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
6
go.mod
6
go.mod
@ -33,6 +33,7 @@ require (
|
|||||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||||
github.com/google/uuid v1.6.0
|
github.com/google/uuid v1.6.0
|
||||||
github.com/gorilla/websocket v1.5.3
|
github.com/gorilla/websocket v1.5.3
|
||||||
|
github.com/hekmon/transmissionrpc/v3 v3.0.0
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0
|
github.com/hirochachacha/go-smb2 v1.1.0
|
||||||
github.com/ipfs/go-ipfs-api v0.7.0
|
github.com/ipfs/go-ipfs-api v0.7.0
|
||||||
github.com/jlaffaye/ftp v0.2.0
|
github.com/jlaffaye/ftp v0.2.0
|
||||||
@ -55,7 +56,7 @@ require (
|
|||||||
github.com/u2takey/ffmpeg-go v0.5.0
|
github.com/u2takey/ffmpeg-go v0.5.0
|
||||||
github.com/upyun/go-sdk/v3 v3.0.4
|
github.com/upyun/go-sdk/v3 v3.0.4
|
||||||
github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5
|
github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5
|
||||||
github.com/xhofe/tache v0.1.2
|
github.com/xhofe/tache v0.1.3
|
||||||
github.com/xhofe/wopan-sdk-go v0.1.3
|
github.com/xhofe/wopan-sdk-go v0.1.3
|
||||||
github.com/zzzhr1990/go-common-entity v0.0.0-20221216044934-fd1c571e3a22
|
github.com/zzzhr1990/go-common-entity v0.0.0-20221216044934-fd1c571e3a22
|
||||||
golang.org/x/crypto v0.27.0
|
golang.org/x/crypto v0.27.0
|
||||||
@ -82,6 +83,8 @@ require (
|
|||||||
github.com/cloudwego/base64x v0.1.4 // indirect
|
github.com/cloudwego/base64x v0.1.4 // indirect
|
||||||
github.com/cloudwego/iasm v0.2.0 // indirect
|
github.com/cloudwego/iasm v0.2.0 // indirect
|
||||||
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||||
|
github.com/hekmon/cunits/v2 v2.1.0 // indirect
|
||||||
github.com/ipfs/boxo v0.12.0 // indirect
|
github.com/ipfs/boxo v0.12.0 // indirect
|
||||||
github.com/jackc/puddle/v2 v2.2.1 // indirect
|
github.com/jackc/puddle/v2 v2.2.1 // indirect
|
||||||
)
|
)
|
||||||
@ -186,6 +189,7 @@ require (
|
|||||||
github.com/multiformats/go-multihash v0.2.3 // indirect
|
github.com/multiformats/go-multihash v0.2.3 // indirect
|
||||||
github.com/multiformats/go-multistream v0.4.1 // indirect
|
github.com/multiformats/go-multistream v0.4.1 // indirect
|
||||||
github.com/multiformats/go-varint v0.0.7 // indirect
|
github.com/multiformats/go-varint v0.0.7 // indirect
|
||||||
|
github.com/otiai10/copy v1.14.0
|
||||||
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
github.com/pelletier/go-toml/v2 v2.2.2 // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
github.com/pierrec/lz4/v4 v4.1.18 // indirect
|
||||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||||
|
10
go.sum
10
go.sum
@ -240,11 +240,17 @@ github.com/gorilla/websocket v1.5.3/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/ad
|
|||||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
|
||||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ=
|
||||||
|
github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48=
|
||||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||||
github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
|
github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
|
||||||
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
|
github.com/hekmon/cunits/v2 v2.1.0 h1:k6wIjc4PlacNOHwKEMBgWV2/c8jyD4eRMs5mR1BBhI0=
|
||||||
|
github.com/hekmon/cunits/v2 v2.1.0/go.mod h1:9r1TycXYXaTmEWlAIfFV8JT+Xo59U96yUJAYHxzii2M=
|
||||||
|
github.com/hekmon/transmissionrpc/v3 v3.0.0 h1:0Fb11qE0IBh4V4GlOwHNYpqpjcYDp5GouolwrpmcUDQ=
|
||||||
|
github.com/hekmon/transmissionrpc/v3 v3.0.0/go.mod h1:38SlNhFzinVUuY87wGj3acOmRxeYZAZfrj6Re7UgCDg=
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0 h1:b6hs9qKIql9eVXAiN0M2wSFY5xnhbHAQoCwRKbaRTZI=
|
github.com/hirochachacha/go-smb2 v1.1.0 h1:b6hs9qKIql9eVXAiN0M2wSFY5xnhbHAQoCwRKbaRTZI=
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0/go.mod h1:8F1A4d5EZzrGu5R7PU163UcMRDJQl4FtcxjBfsY8TZE=
|
github.com/hirochachacha/go-smb2 v1.1.0/go.mod h1:8F1A4d5EZzrGu5R7PU163UcMRDJQl4FtcxjBfsY8TZE=
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||||
@ -385,6 +391,8 @@ github.com/ncw/swift/v2 v2.0.3 h1:8R9dmgFIWs+RiVlisCEfiQiik1hjuR0JnOkLxaP9ihg=
|
|||||||
github.com/ncw/swift/v2 v2.0.3/go.mod h1:cbAO76/ZwcFrFlHdXPjaqWZ9R7Hdar7HpjRXBfbjigk=
|
github.com/ncw/swift/v2 v2.0.3/go.mod h1:cbAO76/ZwcFrFlHdXPjaqWZ9R7Hdar7HpjRXBfbjigk=
|
||||||
github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831 h1:K3T3eu4h5aYIOzUtLjN08L4Qt4WGaJONMgcaD0ayBJQ=
|
github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831 h1:K3T3eu4h5aYIOzUtLjN08L4Qt4WGaJONMgcaD0ayBJQ=
|
||||||
github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831/go.mod h1:lSHD4lC4zlMl+zcoysdJcd5KFzsWwOD8BJbyg1Ws9Ng=
|
github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831/go.mod h1:lSHD4lC4zlMl+zcoysdJcd5KFzsWwOD8BJbyg1Ws9Ng=
|
||||||
|
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
|
||||||
|
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
|
||||||
github.com/panjf2000/ants/v2 v2.4.2/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A=
|
github.com/panjf2000/ants/v2 v2.4.2/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||||
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
github.com/pelletier/go-toml/v2 v2.2.2 h1:aYUidT7k73Pcl9nb2gScu7NSrKCSHIDE89b3+6Wq+LM=
|
||||||
@ -506,6 +514,8 @@ github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25 h1:eDfebW/yfq9DtG9RO3K
|
|||||||
github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25/go.mod h1:fH4oNm5F9NfI5dLi0oIMtsLNKQOirUDbEMCIBb/7SU0=
|
github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25/go.mod h1:fH4oNm5F9NfI5dLi0oIMtsLNKQOirUDbEMCIBb/7SU0=
|
||||||
github.com/xhofe/tache v0.1.2 h1:pHrXlrWcbTb4G7hVUDW7Rc+YTUnLJvnLBrdktVE1Fqg=
|
github.com/xhofe/tache v0.1.2 h1:pHrXlrWcbTb4G7hVUDW7Rc+YTUnLJvnLBrdktVE1Fqg=
|
||||||
github.com/xhofe/tache v0.1.2/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
|
github.com/xhofe/tache v0.1.2/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
|
||||||
|
github.com/xhofe/tache v0.1.3 h1:MipxzlljYX29E1YI/SLC7hVomVF+51iP1OUzlsuq1wE=
|
||||||
|
github.com/xhofe/tache v0.1.3/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
|
||||||
github.com/xhofe/wopan-sdk-go v0.1.3 h1:J58X6v+n25ewBZjb05pKOr7AWGohb+Rdll4CThGh6+A=
|
github.com/xhofe/wopan-sdk-go v0.1.3 h1:J58X6v+n25ewBZjb05pKOr7AWGohb+Rdll4CThGh6+A=
|
||||||
github.com/xhofe/wopan-sdk-go v0.1.3/go.mod h1:dcY9yA28fnaoZPnXZiVTFSkcd7GnIPTpTIIlfSI5z5Q=
|
github.com/xhofe/wopan-sdk-go v0.1.3/go.mod h1:dcY9yA28fnaoZPnXZiVTFSkcd7GnIPTpTIIlfSI5z5Q=
|
||||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
@ -131,22 +131,22 @@ func DefaultConfig() *Config {
|
|||||||
TlsInsecureSkipVerify: true,
|
TlsInsecureSkipVerify: true,
|
||||||
Tasks: TasksConfig{
|
Tasks: TasksConfig{
|
||||||
Download: TaskConfig{
|
Download: TaskConfig{
|
||||||
Workers: 5,
|
Workers: 5,
|
||||||
MaxRetry: 1,
|
MaxRetry: 1,
|
||||||
TaskPersistant: true,
|
// TaskPersistant: true,
|
||||||
},
|
},
|
||||||
Transfer: TaskConfig{
|
Transfer: TaskConfig{
|
||||||
Workers: 5,
|
Workers: 5,
|
||||||
MaxRetry: 2,
|
MaxRetry: 2,
|
||||||
TaskPersistant: true,
|
// TaskPersistant: true,
|
||||||
},
|
},
|
||||||
Upload: TaskConfig{
|
Upload: TaskConfig{
|
||||||
Workers: 5,
|
Workers: 5,
|
||||||
},
|
},
|
||||||
Copy: TaskConfig{
|
Copy: TaskConfig{
|
||||||
Workers: 5,
|
Workers: 5,
|
||||||
MaxRetry: 2,
|
MaxRetry: 2,
|
||||||
TaskPersistant: true,
|
// TaskPersistant: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
Cors: Cors{
|
Cors: Cors{
|
||||||
|
@ -54,11 +54,15 @@ const (
|
|||||||
Aria2Uri = "aria2_uri"
|
Aria2Uri = "aria2_uri"
|
||||||
Aria2Secret = "aria2_secret"
|
Aria2Secret = "aria2_secret"
|
||||||
|
|
||||||
|
// transmission
|
||||||
|
TransmissionUri = "transmission_uri"
|
||||||
|
TransmissionSeedtime = "transmission_seedtime"
|
||||||
|
|
||||||
// single
|
// single
|
||||||
Token = "token"
|
Token = "token"
|
||||||
IndexProgress = "index_progress"
|
IndexProgress = "index_progress"
|
||||||
|
|
||||||
//SSO
|
// SSO
|
||||||
SSOClientId = "sso_client_id"
|
SSOClientId = "sso_client_id"
|
||||||
SSOClientSecret = "sso_client_secret"
|
SSOClientSecret = "sso_client_secret"
|
||||||
SSOLoginEnabled = "sso_login_enabled"
|
SSOLoginEnabled = "sso_login_enabled"
|
||||||
@ -73,7 +77,7 @@ const (
|
|||||||
SSODefaultPermission = "sso_default_permission"
|
SSODefaultPermission = "sso_default_permission"
|
||||||
SSOCompatibilityMode = "sso_compatibility_mode"
|
SSOCompatibilityMode = "sso_compatibility_mode"
|
||||||
|
|
||||||
//ldap
|
// ldap
|
||||||
LdapLoginEnabled = "ldap_login_enabled"
|
LdapLoginEnabled = "ldap_login_enabled"
|
||||||
LdapServer = "ldap_server"
|
LdapServer = "ldap_server"
|
||||||
LdapManagerDN = "ldap_manager_dn"
|
LdapManagerDN = "ldap_manager_dn"
|
||||||
@ -84,7 +88,7 @@ const (
|
|||||||
LdapDefaultDir = "ldap_default_dir"
|
LdapDefaultDir = "ldap_default_dir"
|
||||||
LdapLoginTips = "ldap_login_tips"
|
LdapLoginTips = "ldap_login_tips"
|
||||||
|
|
||||||
//s3
|
// s3
|
||||||
S3Buckets = "s3_buckets"
|
S3Buckets = "s3_buckets"
|
||||||
S3AccessKeyId = "s3_access_key_id"
|
S3AccessKeyId = "s3_access_key_id"
|
||||||
S3SecretAccessKey = "s3_secret_access_key"
|
S3SecretAccessKey = "s3_secret_access_key"
|
||||||
@ -97,7 +101,7 @@ const (
|
|||||||
const (
|
const (
|
||||||
UNKNOWN = iota
|
UNKNOWN = iota
|
||||||
FOLDER
|
FOLDER
|
||||||
//OFFICE
|
// OFFICE
|
||||||
VIDEO
|
VIDEO
|
||||||
AUDIO
|
AUDIO
|
||||||
TEXT
|
TEXT
|
||||||
|
@ -11,13 +11,14 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/xhofe/tache"
|
"github.com/xhofe/tache"
|
||||||
)
|
)
|
||||||
|
|
||||||
type CopyTask struct {
|
type CopyTask struct {
|
||||||
tache.Base
|
task.TaskWithCreator
|
||||||
Status string `json:"-"` //don't save status to save space
|
Status string `json:"-"` //don't save status to save space
|
||||||
SrcObjPath string `json:"src_path"`
|
SrcObjPath string `json:"src_path"`
|
||||||
DstDirPath string `json:"dst_path"`
|
DstDirPath string `json:"dst_path"`
|
||||||
@ -53,7 +54,7 @@ var CopyTaskManager *tache.Manager[*CopyTask]
|
|||||||
|
|
||||||
// Copy if in the same storage, call move method
|
// Copy if in the same storage, call move method
|
||||||
// if not, add copy task
|
// if not, add copy task
|
||||||
func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (tache.TaskWithInfo, error) {
|
func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (task.TaskInfoWithCreator, error) {
|
||||||
srcStorage, srcObjActualPath, err := op.GetStorageAndActualPath(srcObjPath)
|
srcStorage, srcObjActualPath, err := op.GetStorageAndActualPath(srcObjPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.WithMessage(err, "failed get src storage")
|
return nil, errors.WithMessage(err, "failed get src storage")
|
||||||
@ -92,7 +93,11 @@ func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// not in the same storage
|
// not in the same storage
|
||||||
|
taskCreator, _ := ctx.Value("user").(*model.User) // taskCreator is nil when convert failed
|
||||||
t := &CopyTask{
|
t := &CopyTask{
|
||||||
|
TaskWithCreator: task.TaskWithCreator{
|
||||||
|
Creator: taskCreator,
|
||||||
|
},
|
||||||
srcStorage: srcStorage,
|
srcStorage: srcStorage,
|
||||||
dstStorage: dstStorage,
|
dstStorage: dstStorage,
|
||||||
SrcObjPath: srcObjActualPath,
|
SrcObjPath: srcObjActualPath,
|
||||||
@ -123,6 +128,9 @@ func copyBetween2Storages(t *CopyTask, srcStorage, dstStorage driver.Driver, src
|
|||||||
srcObjPath := stdpath.Join(srcObjPath, obj.GetName())
|
srcObjPath := stdpath.Join(srcObjPath, obj.GetName())
|
||||||
dstObjPath := stdpath.Join(dstDirPath, srcObj.GetName())
|
dstObjPath := stdpath.Join(dstDirPath, srcObj.GetName())
|
||||||
CopyTaskManager.Add(&CopyTask{
|
CopyTaskManager.Add(&CopyTask{
|
||||||
|
TaskWithCreator: task.TaskWithCreator{
|
||||||
|
Creator: t.Creator,
|
||||||
|
},
|
||||||
srcStorage: srcStorage,
|
srcStorage: srcStorage,
|
||||||
dstStorage: dstStorage,
|
dstStorage: dstStorage,
|
||||||
SrcObjPath: srcObjPath,
|
SrcObjPath: srcObjPath,
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/xhofe/tache"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// the param named path of functions in this package is a mount path
|
// the param named path of functions in this package is a mount path
|
||||||
@ -69,7 +69,7 @@ func Move(ctx context.Context, srcPath, dstDirPath string, lazyCache ...bool) er
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func Copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (tache.TaskWithInfo, error) {
|
func Copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (task.TaskInfoWithCreator, error) {
|
||||||
res, err := _copy(ctx, srcObjPath, dstDirPath, lazyCache...)
|
res, err := _copy(ctx, srcObjPath, dstDirPath, lazyCache...)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("failed copy %s to %s: %+v", srcObjPath, dstDirPath, err)
|
log.Errorf("failed copy %s to %s: %+v", srcObjPath, dstDirPath, err)
|
||||||
@ -101,8 +101,8 @@ func PutDirectly(ctx context.Context, dstDirPath string, file model.FileStreamer
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func PutAsTask(dstDirPath string, file model.FileStreamer) (tache.TaskWithInfo, error) {
|
func PutAsTask(ctx context.Context, dstDirPath string, file model.FileStreamer) (task.TaskInfoWithCreator, error) {
|
||||||
t, err := putAsTask(dstDirPath, file)
|
t, err := putAsTask(ctx, dstDirPath, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("failed put %s: %+v", dstDirPath, err)
|
log.Errorf("failed put %s: %+v", dstDirPath, err)
|
||||||
}
|
}
|
||||||
|
@ -7,12 +7,13 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/xhofe/tache"
|
"github.com/xhofe/tache"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UploadTask struct {
|
type UploadTask struct {
|
||||||
tache.Base
|
task.TaskWithCreator
|
||||||
storage driver.Driver
|
storage driver.Driver
|
||||||
dstDirActualPath string
|
dstDirActualPath string
|
||||||
file model.FileStreamer
|
file model.FileStreamer
|
||||||
@ -33,7 +34,7 @@ func (t *UploadTask) Run() error {
|
|||||||
var UploadTaskManager *tache.Manager[*UploadTask]
|
var UploadTaskManager *tache.Manager[*UploadTask]
|
||||||
|
|
||||||
// putAsTask add as a put task and return immediately
|
// putAsTask add as a put task and return immediately
|
||||||
func putAsTask(dstDirPath string, file model.FileStreamer) (tache.TaskWithInfo, error) {
|
func putAsTask(ctx context.Context, dstDirPath string, file model.FileStreamer) (task.TaskInfoWithCreator, error) {
|
||||||
storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
|
storage, dstDirActualPath, err := op.GetStorageAndActualPath(dstDirPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.WithMessage(err, "failed get storage")
|
return nil, errors.WithMessage(err, "failed get storage")
|
||||||
@ -49,7 +50,11 @@ func putAsTask(dstDirPath string, file model.FileStreamer) (tache.TaskWithInfo,
|
|||||||
//file.SetReader(tempFile)
|
//file.SetReader(tempFile)
|
||||||
//file.SetTmpFile(tempFile)
|
//file.SetTmpFile(tempFile)
|
||||||
}
|
}
|
||||||
|
taskCreator, _ := ctx.Value("user").(*model.User) // taskCreator is nil when convert failed
|
||||||
t := &UploadTask{
|
t := &UploadTask{
|
||||||
|
TaskWithCreator: task.TaskWithCreator{
|
||||||
|
Creator: taskCreator,
|
||||||
|
},
|
||||||
storage: storage,
|
storage: storage,
|
||||||
dstDirActualPath: dstDirActualPath,
|
dstDirActualPath: dstDirActualPath,
|
||||||
file: file,
|
file: file,
|
||||||
|
@ -6,4 +6,5 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/internal/offline_download/http"
|
_ "github.com/alist-org/alist/v3/internal/offline_download/http"
|
||||||
_ "github.com/alist-org/alist/v3/internal/offline_download/pikpak"
|
_ "github.com/alist-org/alist/v3/internal/offline_download/pikpak"
|
||||||
_ "github.com/alist-org/alist/v3/internal/offline_download/qbit"
|
_ "github.com/alist-org/alist/v3/internal/offline_download/qbit"
|
||||||
|
_ "github.com/alist-org/alist/v3/internal/offline_download/transmission"
|
||||||
)
|
)
|
||||||
|
@ -2,6 +2,8 @@ package tool
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
@ -9,7 +11,6 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/xhofe/tache"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type DeletePolicy string
|
type DeletePolicy string
|
||||||
@ -28,7 +29,7 @@ type AddURLArgs struct {
|
|||||||
DeletePolicy DeletePolicy
|
DeletePolicy DeletePolicy
|
||||||
}
|
}
|
||||||
|
|
||||||
func AddURL(ctx context.Context, args *AddURLArgs) (tache.TaskWithInfo, error) {
|
func AddURL(ctx context.Context, args *AddURLArgs) (task.TaskInfoWithCreator, error) {
|
||||||
// get tool
|
// get tool
|
||||||
tool, err := Tools.Get(args.Tool)
|
tool, err := Tools.Get(args.Tool)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -77,8 +78,12 @@ func AddURL(ctx context.Context, args *AddURLArgs) (tache.TaskWithInfo, error) {
|
|||||||
// 防止将下载好的文件删除
|
// 防止将下载好的文件删除
|
||||||
deletePolicy = DeleteNever
|
deletePolicy = DeleteNever
|
||||||
}
|
}
|
||||||
|
|
||||||
|
taskCreator, _ := ctx.Value("user").(*model.User) // taskCreator is nil when convert failed
|
||||||
t := &DownloadTask{
|
t := &DownloadTask{
|
||||||
|
TaskWithCreator: task.TaskWithCreator{
|
||||||
|
Creator: taskCreator,
|
||||||
|
},
|
||||||
Url: args.URL,
|
Url: args.URL,
|
||||||
DstDirPath: args.DstDirPath,
|
DstDirPath: args.DstDirPath,
|
||||||
TempDir: tempDir,
|
TempDir: tempDir,
|
||||||
|
@ -7,13 +7,14 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/setting"
|
"github.com/alist-org/alist/v3/internal/setting"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"github.com/xhofe/tache"
|
"github.com/xhofe/tache"
|
||||||
)
|
)
|
||||||
|
|
||||||
type DownloadTask struct {
|
type DownloadTask struct {
|
||||||
tache.Base
|
task.TaskWithCreator
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
DstDirPath string `json:"dst_dir_path"`
|
DstDirPath string `json:"dst_dir_path"`
|
||||||
TempDir string `json:"temp_dir"`
|
TempDir string `json:"temp_dir"`
|
||||||
@ -101,6 +102,19 @@ outer:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if t.tool.Name() == "transmission" {
|
||||||
|
// hack for transmission
|
||||||
|
seedTime := setting.GetInt(conf.TransmissionSeedtime, 0)
|
||||||
|
if seedTime >= 0 {
|
||||||
|
t.Status = "offline download completed, waiting for seeding"
|
||||||
|
<-time.After(time.Minute * time.Duration(seedTime))
|
||||||
|
err := t.tool.Remove(t)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorln(err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,6 +172,9 @@ func (t *DownloadTask) Complete() error {
|
|||||||
for i := range files {
|
for i := range files {
|
||||||
file := files[i]
|
file := files[i]
|
||||||
TransferTaskManager.Add(&TransferTask{
|
TransferTaskManager.Add(&TransferTask{
|
||||||
|
TaskWithCreator: task.TaskWithCreator{
|
||||||
|
Creator: t.Creator,
|
||||||
|
},
|
||||||
file: file,
|
file: file,
|
||||||
DstDirPath: t.DstDirPath,
|
DstDirPath: t.DstDirPath,
|
||||||
TempDir: t.TempDir,
|
TempDir: t.TempDir,
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@ -15,7 +16,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type TransferTask struct {
|
type TransferTask struct {
|
||||||
tache.Base
|
task.TaskWithCreator
|
||||||
FileDir string `json:"file_dir"`
|
FileDir string `json:"file_dir"`
|
||||||
DstDirPath string `json:"dst_dir_path"`
|
DstDirPath string `json:"dst_dir_path"`
|
||||||
TempDir string `json:"temp_dir"`
|
TempDir string `json:"temp_dir"`
|
||||||
|
176
internal/offline_download/transmission/client.go
Normal file
176
internal/offline_download/transmission/client.go
Normal file
@ -0,0 +1,176 @@
|
|||||||
|
package transmission
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/offline_download/tool"
|
||||||
|
"github.com/alist-org/alist/v3/internal/setting"
|
||||||
|
"github.com/hekmon/transmissionrpc/v3"
|
||||||
|
"github.com/pkg/errors"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Transmission struct {
|
||||||
|
client *transmissionrpc.Client
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Run(task *tool.DownloadTask) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Name() string {
|
||||||
|
return "transmission"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Items() []model.SettingItem {
|
||||||
|
// transmission settings
|
||||||
|
return []model.SettingItem{
|
||||||
|
{Key: conf.TransmissionUri, Value: "http://localhost:9091/transmission/rpc", Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
|
||||||
|
{Key: conf.TransmissionSeedtime, Value: "0", Type: conf.TypeNumber, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Init() (string, error) {
|
||||||
|
t.client = nil
|
||||||
|
uri := setting.GetStr(conf.TransmissionUri)
|
||||||
|
endpoint, err := url.Parse(uri)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.Wrap(err, "failed to init transmission client")
|
||||||
|
}
|
||||||
|
c, err := transmissionrpc.New(endpoint, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.Wrap(err, "failed to init transmission client")
|
||||||
|
}
|
||||||
|
|
||||||
|
ok, serverVersion, serverMinimumVersion, err := c.RPCVersion(context.Background())
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.Wrapf(err, "failed get transmission version")
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("remote transmission RPC version (v%d) is incompatible with the transmission library (v%d): remote needs at least v%d",
|
||||||
|
serverVersion, transmissionrpc.RPCVersion, serverMinimumVersion)
|
||||||
|
}
|
||||||
|
|
||||||
|
t.client = c
|
||||||
|
log.Infof("remote transmission RPC version (v%d) is compatible with our transmissionrpc library (v%d)\n",
|
||||||
|
serverVersion, transmissionrpc.RPCVersion)
|
||||||
|
log.Infof("using transmission version: %d", serverVersion)
|
||||||
|
return fmt.Sprintf("transmission version: %d", serverVersion), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) IsReady() bool {
|
||||||
|
return t.client != nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) AddURL(args *tool.AddUrlArgs) (string, error) {
|
||||||
|
endpoint, err := url.Parse(args.Url)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.Wrap(err, "failed to parse transmission uri")
|
||||||
|
}
|
||||||
|
|
||||||
|
rpcPayload := transmissionrpc.TorrentAddPayload{
|
||||||
|
DownloadDir: &args.TempDir,
|
||||||
|
}
|
||||||
|
// http url for .torrent file
|
||||||
|
if endpoint.Scheme == "http" || endpoint.Scheme == "https" {
|
||||||
|
resp, err := http.Get(args.Url)
|
||||||
|
if err != nil {
|
||||||
|
return "", errors.Wrap(err, "failed to get .torrent file")
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
buffer := new(bytes.Buffer)
|
||||||
|
encoder := base64.NewEncoder(base64.StdEncoding, buffer)
|
||||||
|
// Stream file to the encoder
|
||||||
|
if _, err = io.Copy(encoder, resp.Body); err != nil {
|
||||||
|
return "", errors.Wrap(err, "can't copy file content into the base64 encoder")
|
||||||
|
}
|
||||||
|
// Flush last bytes
|
||||||
|
if err = encoder.Close(); err != nil {
|
||||||
|
return "", errors.Wrap(err, "can't flush last bytes of the base64 encoder")
|
||||||
|
}
|
||||||
|
// Get the string form
|
||||||
|
b64 := buffer.String()
|
||||||
|
rpcPayload.MetaInfo = &b64
|
||||||
|
} else { // magnet uri
|
||||||
|
rpcPayload.Filename = &args.Url
|
||||||
|
}
|
||||||
|
|
||||||
|
torrent, err := t.client.TorrentAdd(context.TODO(), rpcPayload)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
if torrent.ID == nil {
|
||||||
|
return "", fmt.Errorf("failed get torrent ID")
|
||||||
|
}
|
||||||
|
gid := strconv.FormatInt(*torrent.ID, 10)
|
||||||
|
return gid, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Remove(task *tool.DownloadTask) error {
|
||||||
|
gid, err := strconv.ParseInt(task.GID, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = t.client.TorrentRemove(context.TODO(), transmissionrpc.TorrentRemovePayload{
|
||||||
|
IDs: []int64{gid},
|
||||||
|
DeleteLocalData: false,
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *Transmission) Status(task *tool.DownloadTask) (*tool.Status, error) {
|
||||||
|
gid, err := strconv.ParseInt(task.GID, 10, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
infos, err := t.client.TorrentGetAllFor(context.TODO(), []int64{gid})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(infos) < 1 {
|
||||||
|
return nil, fmt.Errorf("failed get status, wrong gid: %s", task.GID)
|
||||||
|
}
|
||||||
|
info := infos[0]
|
||||||
|
|
||||||
|
s := &tool.Status{
|
||||||
|
Completed: *info.IsFinished,
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
s.Progress = *info.PercentDone * 100
|
||||||
|
|
||||||
|
switch *info.Status {
|
||||||
|
case transmissionrpc.TorrentStatusCheckWait,
|
||||||
|
transmissionrpc.TorrentStatusDownloadWait,
|
||||||
|
transmissionrpc.TorrentStatusCheck,
|
||||||
|
transmissionrpc.TorrentStatusDownload,
|
||||||
|
transmissionrpc.TorrentStatusIsolated:
|
||||||
|
s.Status = "[transmission] " + info.Status.String()
|
||||||
|
case transmissionrpc.TorrentStatusSeedWait,
|
||||||
|
transmissionrpc.TorrentStatusSeed:
|
||||||
|
s.Completed = true
|
||||||
|
case transmissionrpc.TorrentStatusStopped:
|
||||||
|
s.Err = errors.Errorf("[transmission] failed to download %s, status: %s, error: %s", task.GID, info.Status.String(), *info.ErrorString)
|
||||||
|
default:
|
||||||
|
s.Err = errors.Errorf("[transmission] unknown status occurred downloading %s, err: %s", task.GID, *info.ErrorString)
|
||||||
|
}
|
||||||
|
return s, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ tool.Tool = (*Transmission)(nil)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
tool.Tools.Add(&Transmission{})
|
||||||
|
}
|
@ -101,7 +101,7 @@ func initStorage(ctx context.Context, storage model.Storage, storageDriver drive
|
|||||||
log.Errorf("panic init storage: %s", errInfo)
|
log.Errorf("panic init storage: %s", errInfo)
|
||||||
driverStorage.SetStatus(errInfo)
|
driverStorage.SetStatus(errInfo)
|
||||||
MustSaveDriverStorage(storageDriver)
|
MustSaveDriverStorage(storageDriver)
|
||||||
storagesMap.Delete(driverStorage.MountPath)
|
storagesMap.Store(driverStorage.MountPath, storageDriver)
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
// Unmarshal Addition
|
// Unmarshal Addition
|
||||||
|
26
internal/task/base.go
Normal file
26
internal/task/base.go
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
package task
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/xhofe/tache"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TaskWithCreator struct {
|
||||||
|
tache.Base
|
||||||
|
Creator *model.User
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TaskWithCreator) SetCreator(creator *model.User) {
|
||||||
|
t.Creator = creator
|
||||||
|
t.Persist()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TaskWithCreator) GetCreator() *model.User {
|
||||||
|
return t.Creator
|
||||||
|
}
|
||||||
|
|
||||||
|
type TaskInfoWithCreator interface {
|
||||||
|
tache.TaskWithInfo
|
||||||
|
SetCreator(creator *model.User)
|
||||||
|
GetCreator() *model.User
|
||||||
|
}
|
@ -2,7 +2,7 @@ package handles
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/xhofe/tache"
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"io"
|
"io"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
|
|
||||||
@ -121,7 +121,7 @@ func FsCopy(c *gin.Context) {
|
|||||||
common.ErrorResp(c, err, 403)
|
common.ErrorResp(c, err, 403)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var addedTasks []tache.TaskWithInfo
|
var addedTasks []task.TaskInfoWithCreator
|
||||||
for i, name := range req.Names {
|
for i, name := range req.Names {
|
||||||
t, err := fs.Copy(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
|
t, err := fs.Copy(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
|
||||||
if t != nil {
|
if t != nil {
|
||||||
|
@ -1,17 +1,16 @@
|
|||||||
package handles
|
package handles
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/xhofe/tache"
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"io"
|
"io"
|
||||||
"net/url"
|
"net/url"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strconv"
|
"strconv"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
)
|
)
|
||||||
@ -58,9 +57,9 @@ func FsStream(c *gin.Context) {
|
|||||||
Mimetype: c.GetHeader("Content-Type"),
|
Mimetype: c.GetHeader("Content-Type"),
|
||||||
WebPutAsTask: asTask,
|
WebPutAsTask: asTask,
|
||||||
}
|
}
|
||||||
var t tache.TaskWithInfo
|
var t task.TaskInfoWithCreator
|
||||||
if asTask {
|
if asTask {
|
||||||
t, err = fs.PutAsTask(dir, s)
|
t, err = fs.PutAsTask(c, dir, s)
|
||||||
} else {
|
} else {
|
||||||
err = fs.PutDirectly(c, dir, s, true)
|
err = fs.PutDirectly(c, dir, s, true)
|
||||||
}
|
}
|
||||||
@ -123,12 +122,12 @@ func FsForm(c *gin.Context) {
|
|||||||
Mimetype: file.Header.Get("Content-Type"),
|
Mimetype: file.Header.Get("Content-Type"),
|
||||||
WebPutAsTask: asTask,
|
WebPutAsTask: asTask,
|
||||||
}
|
}
|
||||||
var t tache.TaskWithInfo
|
var t task.TaskInfoWithCreator
|
||||||
if asTask {
|
if asTask {
|
||||||
s.Reader = struct {
|
s.Reader = struct {
|
||||||
io.Reader
|
io.Reader
|
||||||
}{f}
|
}{f}
|
||||||
t, err = fs.PutAsTask(dir, &s)
|
t, err = fs.PutAsTask(c, dir, &s)
|
||||||
} else {
|
} else {
|
||||||
ss, err := stream.NewSeekableStream(s, nil)
|
ss, err := stream.NewSeekableStream(s, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -5,9 +5,9 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/offline_download/tool"
|
"github.com/alist-org/alist/v3/internal/offline_download/tool"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
"github.com/xhofe/tache"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type SetAria2Req struct {
|
type SetAria2Req struct {
|
||||||
@ -30,6 +30,10 @@ func SetAria2(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
_tool, err := tool.Tools.Get("aria2")
|
_tool, err := tool.Tools.Get("aria2")
|
||||||
|
if err != nil {
|
||||||
|
common.ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
version, err := _tool.Init()
|
version, err := _tool.Init()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.ErrorResp(c, err, 500)
|
common.ErrorResp(c, err, 500)
|
||||||
@ -74,6 +78,37 @@ func OfflineDownloadTools(c *gin.Context) {
|
|||||||
common.SuccessResp(c, tools)
|
common.SuccessResp(c, tools)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type SetTransmissionReq struct {
|
||||||
|
Uri string `json:"uri" form:"uri"`
|
||||||
|
Seedtime string `json:"seedtime" form:"seedtime"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func SetTransmission(c *gin.Context) {
|
||||||
|
var req SetTransmissionReq
|
||||||
|
if err := c.ShouldBind(&req); err != nil {
|
||||||
|
common.ErrorResp(c, err, 400)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
items := []model.SettingItem{
|
||||||
|
{Key: conf.TransmissionUri, Value: req.Uri, Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
|
||||||
|
{Key: conf.TransmissionSeedtime, Value: req.Seedtime, Type: conf.TypeNumber, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
|
||||||
|
}
|
||||||
|
if err := op.SaveSettingItems(items); err != nil {
|
||||||
|
common.ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
_tool, err := tool.Tools.Get("transmission")
|
||||||
|
if err != nil {
|
||||||
|
common.ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if _, err := _tool.Init(); err != nil {
|
||||||
|
common.ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
common.SuccessResp(c, "ok")
|
||||||
|
}
|
||||||
|
|
||||||
type AddOfflineDownloadReq struct {
|
type AddOfflineDownloadReq struct {
|
||||||
Urls []string `json:"urls"`
|
Urls []string `json:"urls"`
|
||||||
Path string `json:"path"`
|
Path string `json:"path"`
|
||||||
@ -98,7 +133,7 @@ func AddOfflineDownload(c *gin.Context) {
|
|||||||
common.ErrorResp(c, err, 403)
|
common.ErrorResp(c, err, 403)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
var tasks []tache.TaskWithInfo
|
var tasks []task.TaskInfoWithCreator
|
||||||
for _, url := range req.Urls {
|
for _, url := range req.Urls {
|
||||||
t, err := tool.AddURL(c, &tool.AddURLArgs{
|
t, err := tool.AddURL(c, &tool.AddURLArgs{
|
||||||
URL: url,
|
URL: url,
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
package handles
|
package handles
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/task"
|
||||||
"math"
|
"math"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
@ -12,15 +14,17 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type TaskInfo struct {
|
type TaskInfo struct {
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
State tache.State `json:"state"`
|
Creator string `json:"creator"`
|
||||||
Status string `json:"status"`
|
CreatorRole int `json:"creator_role"`
|
||||||
Progress float64 `json:"progress"`
|
State tache.State `json:"state"`
|
||||||
Error string `json:"error"`
|
Status string `json:"status"`
|
||||||
|
Progress float64 `json:"progress"`
|
||||||
|
Error string `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTaskInfo[T tache.TaskWithInfo](task T) TaskInfo {
|
func getTaskInfo[T task.TaskInfoWithCreator](task T) TaskInfo {
|
||||||
errMsg := ""
|
errMsg := ""
|
||||||
if task.GetErr() != nil {
|
if task.GetErr() != nil {
|
||||||
errMsg = task.GetErr().Error()
|
errMsg = task.GetErr().Error()
|
||||||
@ -30,62 +34,142 @@ func getTaskInfo[T tache.TaskWithInfo](task T) TaskInfo {
|
|||||||
if math.IsNaN(progress) {
|
if math.IsNaN(progress) {
|
||||||
progress = 100
|
progress = 100
|
||||||
}
|
}
|
||||||
|
creatorName := ""
|
||||||
|
creatorRole := -1
|
||||||
|
if task.GetCreator() != nil {
|
||||||
|
creatorName = task.GetCreator().Username
|
||||||
|
creatorRole = task.GetCreator().Role
|
||||||
|
}
|
||||||
return TaskInfo{
|
return TaskInfo{
|
||||||
ID: task.GetID(),
|
ID: task.GetID(),
|
||||||
Name: task.GetName(),
|
Name: task.GetName(),
|
||||||
State: task.GetState(),
|
Creator: creatorName,
|
||||||
Status: task.GetStatus(),
|
CreatorRole: creatorRole,
|
||||||
Progress: progress,
|
State: task.GetState(),
|
||||||
Error: errMsg,
|
Status: task.GetStatus(),
|
||||||
|
Progress: progress,
|
||||||
|
Error: errMsg,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func getTaskInfos[T tache.TaskWithInfo](tasks []T) []TaskInfo {
|
func getTaskInfos[T task.TaskInfoWithCreator](tasks []T) []TaskInfo {
|
||||||
return utils.MustSliceConvert(tasks, getTaskInfo[T])
|
return utils.MustSliceConvert(tasks, getTaskInfo[T])
|
||||||
}
|
}
|
||||||
|
|
||||||
func taskRoute[T tache.TaskWithInfo](g *gin.RouterGroup, manager *tache.Manager[T]) {
|
func argsContains[T comparable](v T, slice ...T) bool {
|
||||||
g.GET("/undone", func(c *gin.Context) {
|
return utils.SliceContains(slice, v)
|
||||||
common.SuccessResp(c, getTaskInfos(manager.GetByState(tache.StatePending, tache.StateRunning,
|
}
|
||||||
tache.StateCanceling, tache.StateErrored, tache.StateFailing, tache.StateWaitingRetry, tache.StateBeforeRetry)))
|
|
||||||
})
|
func getUserInfo(c *gin.Context) (bool, uint, bool) {
|
||||||
g.GET("/done", func(c *gin.Context) {
|
if user, ok := c.Value("user").(*model.User); ok {
|
||||||
common.SuccessResp(c, getTaskInfos(manager.GetByState(tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)))
|
return user.IsAdmin(), user.ID, true
|
||||||
})
|
} else {
|
||||||
g.POST("/info", func(c *gin.Context) {
|
return false, 0, false
|
||||||
tid := c.Query("tid")
|
}
|
||||||
task, ok := manager.GetByID(tid)
|
}
|
||||||
|
|
||||||
|
func getTargetedHandler[T task.TaskInfoWithCreator](manager *tache.Manager[T], callback func(c *gin.Context, task T)) gin.HandlerFunc {
|
||||||
|
return func(c *gin.Context) {
|
||||||
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
t, ok := manager.GetByID(c.Query("tid"))
|
||||||
if !ok {
|
if !ok {
|
||||||
common.ErrorStrResp(c, "task not found", 404)
|
common.ErrorStrResp(c, "task not found", 404)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if !isAdmin && uid != t.GetCreator().ID {
|
||||||
|
// to avoid an attacker using error messages to guess valid TID, return a 404 rather than a 403
|
||||||
|
common.ErrorStrResp(c, "task not found", 404)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
callback(c, t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func taskRoute[T task.TaskInfoWithCreator](g *gin.RouterGroup, manager *tache.Manager[T]) {
|
||||||
|
g.GET("/undone", func(c *gin.Context) {
|
||||||
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
common.SuccessResp(c, getTaskInfos(manager.GetByCondition(func(task T) bool {
|
||||||
|
// avoid directly passing the user object into the function to reduce closure size
|
||||||
|
return (isAdmin || uid == task.GetCreator().ID) &&
|
||||||
|
argsContains(task.GetState(), tache.StatePending, tache.StateRunning, tache.StateCanceling,
|
||||||
|
tache.StateErrored, tache.StateFailing, tache.StateWaitingRetry, tache.StateBeforeRetry)
|
||||||
|
})))
|
||||||
|
})
|
||||||
|
g.GET("/done", func(c *gin.Context) {
|
||||||
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
common.SuccessResp(c, getTaskInfos(manager.GetByCondition(func(task T) bool {
|
||||||
|
return (isAdmin || uid == task.GetCreator().ID) &&
|
||||||
|
argsContains(task.GetState(), tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)
|
||||||
|
})))
|
||||||
|
})
|
||||||
|
g.POST("/info", getTargetedHandler(manager, func(c *gin.Context, task T) {
|
||||||
common.SuccessResp(c, getTaskInfo(task))
|
common.SuccessResp(c, getTaskInfo(task))
|
||||||
})
|
}))
|
||||||
g.POST("/cancel", func(c *gin.Context) {
|
g.POST("/cancel", getTargetedHandler(manager, func(c *gin.Context, task T) {
|
||||||
tid := c.Query("tid")
|
manager.Cancel(task.GetID())
|
||||||
manager.Cancel(tid)
|
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
}))
|
||||||
g.POST("/delete", func(c *gin.Context) {
|
g.POST("/delete", getTargetedHandler(manager, func(c *gin.Context, task T) {
|
||||||
tid := c.Query("tid")
|
manager.Remove(task.GetID())
|
||||||
manager.Remove(tid)
|
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
}))
|
||||||
g.POST("/retry", func(c *gin.Context) {
|
g.POST("/retry", getTargetedHandler(manager, func(c *gin.Context, task T) {
|
||||||
tid := c.Query("tid")
|
manager.Retry(task.GetID())
|
||||||
manager.Retry(tid)
|
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
}))
|
||||||
g.POST("/clear_done", func(c *gin.Context) {
|
g.POST("/clear_done", func(c *gin.Context) {
|
||||||
manager.RemoveByState(tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
manager.RemoveByCondition(func(task T) bool {
|
||||||
|
return (isAdmin || uid == task.GetCreator().ID) &&
|
||||||
|
argsContains(task.GetState(), tache.StateCanceled, tache.StateFailed, tache.StateSucceeded)
|
||||||
|
})
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
})
|
||||||
g.POST("/clear_succeeded", func(c *gin.Context) {
|
g.POST("/clear_succeeded", func(c *gin.Context) {
|
||||||
manager.RemoveByState(tache.StateSucceeded)
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
manager.RemoveByCondition(func(task T) bool {
|
||||||
|
return (isAdmin || uid == task.GetCreator().ID) && task.GetState() == tache.StateSucceeded
|
||||||
|
})
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
})
|
||||||
g.POST("/retry_failed", func(c *gin.Context) {
|
g.POST("/retry_failed", func(c *gin.Context) {
|
||||||
manager.RetryAllFailed()
|
isAdmin, uid, ok := getUserInfo(c)
|
||||||
|
if !ok {
|
||||||
|
// if there is no bug, here is unreachable
|
||||||
|
common.ErrorStrResp(c, "user invalid", 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
tasks := manager.GetByCondition(func(task T) bool {
|
||||||
|
return (isAdmin || uid == task.GetCreator().ID) && task.GetState() == tache.StateFailed
|
||||||
|
})
|
||||||
|
for _, t := range tasks {
|
||||||
|
manager.Retry(t.GetID())
|
||||||
|
}
|
||||||
common.SuccessResp(c)
|
common.SuccessResp(c)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -127,6 +127,16 @@ func Authn(c *gin.Context) {
|
|||||||
c.Next()
|
c.Next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func AuthNotGuest(c *gin.Context) {
|
||||||
|
user := c.MustGet("user").(*model.User)
|
||||||
|
if user.IsGuest() {
|
||||||
|
common.ErrorStrResp(c, "You are a guest", 403)
|
||||||
|
c.Abort()
|
||||||
|
} else {
|
||||||
|
c.Next()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func AuthAdmin(c *gin.Context) {
|
func AuthAdmin(c *gin.Context) {
|
||||||
user := c.MustGet("user").(*model.User)
|
user := c.MustGet("user").(*model.User)
|
||||||
if !user.IsAdmin() {
|
if !user.IsAdmin() {
|
||||||
|
@ -62,7 +62,7 @@ func Init(e *gin.Engine) {
|
|||||||
api.GET("/auth/get_sso_id", handles.SSOLoginCallback)
|
api.GET("/auth/get_sso_id", handles.SSOLoginCallback)
|
||||||
api.GET("/auth/sso_get_token", handles.SSOLoginCallback)
|
api.GET("/auth/sso_get_token", handles.SSOLoginCallback)
|
||||||
|
|
||||||
//webauthn
|
// webauthn
|
||||||
webauthn.GET("/webauthn_begin_registration", handles.BeginAuthnRegistration)
|
webauthn.GET("/webauthn_begin_registration", handles.BeginAuthnRegistration)
|
||||||
webauthn.POST("/webauthn_finish_registration", handles.FinishAuthnRegistration)
|
webauthn.POST("/webauthn_finish_registration", handles.FinishAuthnRegistration)
|
||||||
webauthn.GET("/webauthn_begin_login", handles.BeginAuthnLogin)
|
webauthn.GET("/webauthn_begin_login", handles.BeginAuthnLogin)
|
||||||
@ -76,6 +76,7 @@ func Init(e *gin.Engine) {
|
|||||||
public.Any("/offline_download_tools", handles.OfflineDownloadTools)
|
public.Any("/offline_download_tools", handles.OfflineDownloadTools)
|
||||||
|
|
||||||
_fs(auth.Group("/fs"))
|
_fs(auth.Group("/fs"))
|
||||||
|
_task(auth.Group("/task", middlewares.AuthNotGuest))
|
||||||
admin(auth.Group("/admin", middlewares.AuthAdmin))
|
admin(auth.Group("/admin", middlewares.AuthAdmin))
|
||||||
if flags.Debug || flags.Dev {
|
if flags.Debug || flags.Dev {
|
||||||
debug(g.Group("/debug"))
|
debug(g.Group("/debug"))
|
||||||
@ -125,9 +126,10 @@ func admin(g *gin.RouterGroup) {
|
|||||||
setting.POST("/reset_token", handles.ResetToken)
|
setting.POST("/reset_token", handles.ResetToken)
|
||||||
setting.POST("/set_aria2", handles.SetAria2)
|
setting.POST("/set_aria2", handles.SetAria2)
|
||||||
setting.POST("/set_qbit", handles.SetQbittorrent)
|
setting.POST("/set_qbit", handles.SetQbittorrent)
|
||||||
|
setting.POST("/set_transmission", handles.SetTransmission)
|
||||||
|
|
||||||
task := g.Group("/task")
|
// retain /admin/task API to ensure compatibility with legacy automation scripts
|
||||||
handles.SetupTaskRoute(task)
|
_task(g.Group("/task"))
|
||||||
|
|
||||||
ms := g.Group("/message")
|
ms := g.Group("/message")
|
||||||
ms.POST("/get", message.HttpInstance.GetHandle)
|
ms.POST("/get", message.HttpInstance.GetHandle)
|
||||||
@ -159,14 +161,19 @@ func _fs(g *gin.RouterGroup) {
|
|||||||
g.PUT("/put", middlewares.FsUp, handles.FsStream)
|
g.PUT("/put", middlewares.FsUp, handles.FsStream)
|
||||||
g.PUT("/form", middlewares.FsUp, handles.FsForm)
|
g.PUT("/form", middlewares.FsUp, handles.FsForm)
|
||||||
g.POST("/link", middlewares.AuthAdmin, handles.Link)
|
g.POST("/link", middlewares.AuthAdmin, handles.Link)
|
||||||
//g.POST("/add_aria2", handles.AddOfflineDownload)
|
// g.POST("/add_aria2", handles.AddOfflineDownload)
|
||||||
//g.POST("/add_qbit", handles.AddQbittorrent)
|
// g.POST("/add_qbit", handles.AddQbittorrent)
|
||||||
|
// g.POST("/add_transmission", handles.SetTransmission)
|
||||||
g.POST("/add_offline_download", handles.AddOfflineDownload)
|
g.POST("/add_offline_download", handles.AddOfflineDownload)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func _task(g *gin.RouterGroup) {
|
||||||
|
handles.SetupTaskRoute(g)
|
||||||
|
}
|
||||||
|
|
||||||
func Cors(r *gin.Engine) {
|
func Cors(r *gin.Engine) {
|
||||||
config := cors.DefaultConfig()
|
config := cors.DefaultConfig()
|
||||||
//config.AllowAllOrigins = true
|
// config.AllowAllOrigins = true
|
||||||
config.AllowOrigins = conf.Conf.Cors.AllowOrigins
|
config.AllowOrigins = conf.Conf.Cors.AllowOrigins
|
||||||
config.AllowHeaders = conf.Conf.Cors.AllowHeaders
|
config.AllowHeaders = conf.Conf.Cors.AllowHeaders
|
||||||
config.AllowMethods = conf.Conf.Cors.AllowMethods
|
config.AllowMethods = conf.Conf.Cors.AllowMethods
|
||||||
|
Reference in New Issue
Block a user