Compare commits
37 Commits
Author | SHA1 | Date | |
---|---|---|---|
8e2b9c681a | |||
0a8d710e01 | |||
d781f7127a | |||
85d743c5d2 | |||
5f60b51cf8 | |||
7013d1b7b8 | |||
9eec872637 | |||
037850bbd5 | |||
bbe3d4e19f | |||
78a9676c7c | |||
8bf93562eb | |||
b57afd0a98 | |||
f261ef50cc | |||
7e7b9b9b48 | |||
2313213f59 | |||
5f28532423 | |||
4cbbda8832 | |||
7bf5014417 | |||
b704bba444 | |||
eecea3febd | |||
0e246a7b0c | |||
b95df1d745 | |||
ec08ecdf6c | |||
479fc6d466 | |||
32ddab9b01 | |||
0c9dcec9cd | |||
793a4ea6ca | |||
c3c5181847 | |||
cd5a8a011d | |||
1756036a21 | |||
58c3cb3cf6 | |||
d8e190406a | |||
2880ed70ce | |||
0e86036874 | |||
e37465e67e | |||
d517adde71 | |||
8a18f47e68 |
13
.github/workflows/build_docker.yml
vendored
13
.github/workflows/build_docker.yml
vendored
@ -32,10 +32,21 @@ jobs:
|
|||||||
flavor: |
|
flavor: |
|
||||||
suffix=-ffmpeg,onlatest=true
|
suffix=-ffmpeg,onlatest=true
|
||||||
|
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 'stable'
|
go-version: 'stable'
|
||||||
|
|
||||||
|
- name: Cache Musl
|
||||||
|
id: cache-musl
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: build/musl-libs
|
||||||
|
key: docker-musl-libs
|
||||||
|
|
||||||
|
- name: Download Musl Library
|
||||||
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
|
run: bash build.sh prepare docker-multiplatform
|
||||||
|
|
||||||
- name: Build go binary
|
- name: Build go binary
|
||||||
run: bash build.sh dev docker-multiplatform
|
run: bash build.sh dev docker-multiplatform
|
||||||
|
|
||||||
|
13
.github/workflows/release_docker.yml
vendored
13
.github/workflows/release_docker.yml
vendored
@ -13,10 +13,21 @@ jobs:
|
|||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- uses: actions/setup-go@v4
|
- uses: actions/setup-go@v5
|
||||||
with:
|
with:
|
||||||
go-version: 'stable'
|
go-version: 'stable'
|
||||||
|
|
||||||
|
- name: Cache Musl
|
||||||
|
id: cache-musl
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: build/musl-libs
|
||||||
|
key: docker-musl-libs
|
||||||
|
|
||||||
|
- name: Download Musl Library
|
||||||
|
if: steps.cache-musl.outputs.cache-hit != 'true'
|
||||||
|
run: bash build.sh prepare docker-multiplatform
|
||||||
|
|
||||||
- name: Build go binary
|
- name: Build go binary
|
||||||
run: bash build.sh release docker-multiplatform
|
run: bash build.sh release docker-multiplatform
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ ARG TARGETPLATFORM
|
|||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
VOLUME /opt/alist/data/
|
VOLUME /opt/alist/data/
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
COPY /${TARGETPLATFORM}/alist ./
|
COPY /build/${TARGETPLATFORM}/alist ./
|
||||||
COPY entrypoint.sh /entrypoint.sh
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
RUN apk update && \
|
RUN apk update && \
|
||||||
apk upgrade --no-cache && \
|
apk upgrade --no-cache && \
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
@ -115,7 +115,7 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### Special sponsors
|
### Special sponsors
|
||||||
|
|
||||||
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
|
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
@ -113,7 +113,7 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
|||||||
|
|
||||||
### 特别赞助
|
### 特别赞助
|
||||||
|
|
||||||
- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
||||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
@ -115,7 +115,7 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### スペシャルスポンサー
|
### スペシャルスポンサー
|
||||||
|
|
||||||
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||||
|
|
||||||
|
27
build.sh
27
build.sh
@ -96,17 +96,24 @@ BuildDocker() {
|
|||||||
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
||||||
}
|
}
|
||||||
|
|
||||||
BuildDockerMultiplatform() {
|
PrepareBuildDockerMusl() {
|
||||||
PrepareBuildDocker
|
mkdir -p build/musl-libs
|
||||||
|
|
||||||
BASE="https://musl.cc/"
|
BASE="https://musl.cc/"
|
||||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
||||||
for i in "${FILES[@]}"; do
|
for i in "${FILES[@]}"; do
|
||||||
url="${BASE}${i}.tgz"
|
url="${BASE}${i}.tgz"
|
||||||
curl -L -o "${i}.tgz" "${url}"
|
lib_tgz="build/${i}.tgz"
|
||||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
curl -L -o "${lib_tgz}" "${url}"
|
||||||
rm -f "${i}.tgz"
|
tar xf "${lib_tgz}" --strip-components 1 -C build/musl-libs
|
||||||
|
rm -f "${lib_tgz}"
|
||||||
done
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
BuildDockerMultiplatform() {
|
||||||
|
PrepareBuildDocker
|
||||||
|
|
||||||
|
# run PrepareBuildDockerMusl before build
|
||||||
|
export PATH=$PATH:$PWD/build/musl-libs/bin
|
||||||
|
|
||||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||||
export CGO_ENABLED=1
|
export CGO_ENABLED=1
|
||||||
@ -122,7 +129,7 @@ BuildDockerMultiplatform() {
|
|||||||
export GOARCH=$arch
|
export GOARCH=$arch
|
||||||
export CC=${cgo_cc}
|
export CC=${cgo_cc}
|
||||||
echo "building for $os_arch"
|
echo "building for $os_arch"
|
||||||
go build -o ./$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
go build -o build/$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||||
done
|
done
|
||||||
|
|
||||||
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
||||||
@ -136,7 +143,7 @@ BuildDockerMultiplatform() {
|
|||||||
export GOARM=${GO_ARM[$i]}
|
export GOARM=${GO_ARM[$i]}
|
||||||
export CC=${cgo_cc}
|
export CC=${cgo_cc}
|
||||||
echo "building for $docker_arch"
|
echo "building for $docker_arch"
|
||||||
go build -o ./${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
go build -o build/${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -289,6 +296,10 @@ elif [ "$1" = "release" ]; then
|
|||||||
BuildRelease
|
BuildRelease
|
||||||
MakeRelease "md5.txt"
|
MakeRelease "md5.txt"
|
||||||
fi
|
fi
|
||||||
|
elif [ "$1" = "prepare" ]; then
|
||||||
|
if [ "$2" = "docker-multiplatform" ]; then
|
||||||
|
PrepareBuildDockerMusl
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo -e "Parameter error"
|
echo -e "Parameter error"
|
||||||
fi
|
fi
|
||||||
|
@ -91,10 +91,10 @@ the address is defined in config file`,
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
}
|
}
|
||||||
s3r := gin.New()
|
if conf.Conf.S3.Port != -1 && conf.Conf.S3.Enable {
|
||||||
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
s3r := gin.New()
|
||||||
server.InitS3(s3r)
|
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||||
if conf.Conf.S3.Port != -1 {
|
server.InitS3(s3r)
|
||||||
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
|
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
|
||||||
utils.Log.Infof("start S3 server @ %s", s3Base)
|
utils.Log.Infof("start S3 server @ %s", s3Base)
|
||||||
go func() {
|
go func() {
|
||||||
|
@ -19,7 +19,7 @@ var config = driver.Config{
|
|||||||
DefaultRoot: "0",
|
DefaultRoot: "0",
|
||||||
//OnlyProxy: true,
|
//OnlyProxy: true,
|
||||||
//OnlyLocal: true,
|
//OnlyLocal: true,
|
||||||
NoOverwriteUpload: true,
|
//NoOverwriteUpload: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -194,7 +194,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
defer func() {
|
defer func() {
|
||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
if _, err = io.Copy(h, tempFile); err != nil {
|
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
|
@ -4,8 +4,11 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -19,6 +22,7 @@ import (
|
|||||||
type Pan123Share struct {
|
type Pan123Share struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
apiRateLimit sync.Map
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) Config() driver.Config {
|
func (d *Pan123Share) Config() driver.Config {
|
||||||
@ -146,4 +150,11 @@ func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
|
|||||||
// return nil, errs.NotSupport
|
// return nil, errs.NotSupport
|
||||||
//}
|
//}
|
||||||
|
|
||||||
|
func (d *Pan123Share) APIRateLimit(api string) bool {
|
||||||
|
limiter, _ := d.apiRateLimit.LoadOrStore(api,
|
||||||
|
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
|
||||||
|
ins := limiter.(*rate.Limiter)
|
||||||
|
return ins.Allow()
|
||||||
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Pan123Share)(nil)
|
var _ driver.Driver = (*Pan123Share)(nil)
|
||||||
|
@ -7,10 +7,11 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
ShareKey string `json:"sharekey" required:"true"`
|
ShareKey string `json:"sharekey" required:"true"`
|
||||||
SharePwd string `json:"sharepassword" required:"true"`
|
SharePwd string `json:"sharepassword"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
|
AccessToken string `json:"accesstoken" type:"text"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -2,8 +2,15 @@ package _123Share
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"hash/crc32"
|
||||||
|
"math"
|
||||||
|
"math/rand"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
@ -15,20 +22,45 @@ const (
|
|||||||
Api = "https://www.123pan.com/api"
|
Api = "https://www.123pan.com/api"
|
||||||
AApi = "https://www.123pan.com/a/api"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
BApi = "https://www.123pan.com/b/api"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
MainApi = Api
|
MainApi = BApi
|
||||||
FileList = MainApi + "/share/get"
|
FileList = MainApi + "/share/get"
|
||||||
DownloadInfo = MainApi + "/share/download/info"
|
DownloadInfo = MainApi + "/share/download/info"
|
||||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func signPath(path string, os string, version string) (k string, v string) {
|
||||||
|
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
||||||
|
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
||||||
|
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
||||||
|
timestamp := fmt.Sprint(now.Unix())
|
||||||
|
nowStr := []byte(now.Format("200601021504"))
|
||||||
|
for i := 0; i < len(nowStr); i++ {
|
||||||
|
nowStr[i] = table[nowStr[i]-48]
|
||||||
|
}
|
||||||
|
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
||||||
|
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
||||||
|
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
||||||
|
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetApi(rawUrl string) string {
|
||||||
|
u, _ := url.Parse(rawUrl)
|
||||||
|
query := u.Query()
|
||||||
|
query.Add(signPath(u.Path, "web", "3"))
|
||||||
|
u.RawQuery = query.Encode()
|
||||||
|
return u.String()
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
"referer": "https://www.123pan.com/",
|
"referer": "https://www.123pan.com/",
|
||||||
"user-agent": "Dart/2.19(dart:io)",
|
"authorization": "Bearer " + d.AccessToken,
|
||||||
"platform": "android",
|
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
|
||||||
"app-version": "36",
|
"platform": "web",
|
||||||
|
"app-version": "3",
|
||||||
|
//"user-agent": base.UserAgent,
|
||||||
})
|
})
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -36,7 +68,7 @@ func (d *Pan123Share) request(url string, method string, callback base.ReqCallba
|
|||||||
if resp != nil {
|
if resp != nil {
|
||||||
req.SetResult(resp)
|
req.SetResult(resp)
|
||||||
}
|
}
|
||||||
res, err := req.Execute(method, url)
|
res, err := req.Execute(method, GetApi(url))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -52,6 +84,10 @@ func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
|
|||||||
page := 1
|
page := 1
|
||||||
res := make([]File, 0)
|
res := make([]File, 0)
|
||||||
for {
|
for {
|
||||||
|
if !d.APIRateLimit(FileList) {
|
||||||
|
time.Sleep(time.Millisecond * 200)
|
||||||
|
continue
|
||||||
|
}
|
||||||
var resp Files
|
var resp Files
|
||||||
query := map[string]string{
|
query := map[string]string{
|
||||||
"limit": "100",
|
"limit": "100",
|
||||||
|
@ -14,12 +14,15 @@ type Addition struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "139Yun",
|
Name: "139Yun",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
|
ProxyRangeOption: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &Yun139{}
|
d := &Yun139{}
|
||||||
|
d.ProxyRange = true
|
||||||
|
return d
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package _189pc
|
package _189pc
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"container/ring"
|
||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
@ -28,6 +29,9 @@ type Cloud189PC struct {
|
|||||||
|
|
||||||
uploadThread int
|
uploadThread int
|
||||||
|
|
||||||
|
familyTransferFolder *ring.Ring
|
||||||
|
cleanFamilyTransferFile func()
|
||||||
|
|
||||||
storageConfig driver.Config
|
storageConfig driver.Config
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -52,7 +56,6 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
}
|
}
|
||||||
if !y.isFamily() && y.RootFolderID == "" {
|
if !y.isFamily() && y.RootFolderID == "" {
|
||||||
y.RootFolderID = "-11"
|
y.RootFolderID = "-11"
|
||||||
y.FamilyID = ""
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// 限制上传线程数
|
// 限制上传线程数
|
||||||
@ -79,11 +82,24 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 处理家庭云ID
|
// 处理家庭云ID
|
||||||
if y.isFamily() && y.FamilyID == "" {
|
if y.FamilyID == "" {
|
||||||
if y.FamilyID, err = y.getFamilyID(); err != nil {
|
if y.FamilyID, err = y.getFamilyID(); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 创建中转文件夹,防止重名文件
|
||||||
|
if y.FamilyTransfer {
|
||||||
|
if y.familyTransferFolder, err = y.createFamilyTransferFolder(32); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
y.cleanFamilyTransferFile = utils.NewThrottle2(time.Minute, func() {
|
||||||
|
if err := y.cleanFamilyTransfer(context.TODO()); err != nil {
|
||||||
|
utils.Log.Errorf("cleanFamilyTransferFolderError:%s", err)
|
||||||
|
}
|
||||||
|
})
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -92,7 +108,7 @@ func (y *Cloud189PC) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
return y.getFiles(ctx, dir.GetID())
|
return y.getFiles(ctx, dir.GetID(), y.isFamily())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
@ -100,8 +116,9 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
URL string `json:"fileDownloadUrl"`
|
URL string `json:"fileDownloadUrl"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
isFamily := y.isFamily()
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/getFileDownloadUrl.action"
|
fullUrl += "/getFileDownloadUrl.action"
|
||||||
@ -109,7 +126,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
_, err := y.get(fullUrl, func(r *resty.Request) {
|
_, err := y.get(fullUrl, func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetQueryParam("fileId", file.GetID())
|
r.SetQueryParam("fileId", file.GetID())
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
r.SetQueryParams(map[string]string{
|
r.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
})
|
})
|
||||||
@ -119,7 +136,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
"flag": "1",
|
"flag": "1",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &downloadUrl)
|
}, &downloadUrl, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -156,8 +173,9 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
isFamily := y.isFamily()
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/createFolder.action"
|
fullUrl += "/createFolder.action"
|
||||||
@ -169,7 +187,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
"folderName": dirName,
|
"folderName": dirName,
|
||||||
"relativePath": "",
|
"relativePath": "",
|
||||||
})
|
})
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"parentId": parentDir.GetID(),
|
"parentId": parentDir.GetID(),
|
||||||
@ -179,7 +197,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
"parentFolderId": parentDir.GetID(),
|
"parentFolderId": parentDir.GetID(),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &newFolder)
|
}, &newFolder, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -187,27 +205,14 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
var resp CreateBatchTaskResp
|
isFamily := y.isFamily()
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
other := map[string]string{"targetFileName": dstDir.GetName()}
|
||||||
req.SetContext(ctx)
|
|
||||||
req.SetFormData(map[string]string{
|
resp, err := y.CreateBatchTask("MOVE", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
|
||||||
"type": "MOVE",
|
FileId: srcObj.GetID(),
|
||||||
"taskInfos": MustString(utils.Json.MarshalToString(
|
FileName: srcObj.GetName(),
|
||||||
[]BatchTaskInfo{
|
IsFolder: BoolToNumber(srcObj.IsDir()),
|
||||||
{
|
})
|
||||||
FileId: srcObj.GetID(),
|
|
||||||
FileName: srcObj.GetName(),
|
|
||||||
IsFolder: BoolToNumber(srcObj.IsDir()),
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
"targetFolderId": dstDir.GetID(),
|
|
||||||
})
|
|
||||||
if y.isFamily() {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"familyId": y.FamilyID,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -218,10 +223,11 @@ func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
isFamily := y.isFamily()
|
||||||
queryParam := make(map[string]string)
|
queryParam := make(map[string]string)
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
method := http.MethodPost
|
method := http.MethodPost
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
method = http.MethodGet
|
method = http.MethodGet
|
||||||
queryParam["familyId"] = y.FamilyID
|
queryParam["familyId"] = y.FamilyID
|
||||||
@ -245,7 +251,7 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
|
|||||||
|
|
||||||
_, err := y.request(fullUrl, method, func(req *resty.Request) {
|
_, err := y.request(fullUrl, method, func(req *resty.Request) {
|
||||||
req.SetContext(ctx).SetQueryParams(queryParam)
|
req.SetContext(ctx).SetQueryParams(queryParam)
|
||||||
}, nil, newObj)
|
}, nil, newObj, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -253,28 +259,15 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
var resp CreateBatchTaskResp
|
isFamily := y.isFamily()
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
other := map[string]string{"targetFileName": dstDir.GetName()}
|
||||||
req.SetContext(ctx)
|
|
||||||
req.SetFormData(map[string]string{
|
resp, err := y.CreateBatchTask("COPY", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
|
||||||
"type": "COPY",
|
FileId: srcObj.GetID(),
|
||||||
"taskInfos": MustString(utils.Json.MarshalToString(
|
FileName: srcObj.GetName(),
|
||||||
[]BatchTaskInfo{
|
IsFolder: BoolToNumber(srcObj.IsDir()),
|
||||||
{
|
})
|
||||||
FileId: srcObj.GetID(),
|
|
||||||
FileName: srcObj.GetName(),
|
|
||||||
IsFolder: BoolToNumber(srcObj.IsDir()),
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
"targetFolderId": dstDir.GetID(),
|
|
||||||
"targetFileName": dstDir.GetName(),
|
|
||||||
})
|
|
||||||
if y.isFamily() {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"familyId": y.FamilyID,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -282,27 +275,13 @@ func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
var resp CreateBatchTaskResp
|
isFamily := y.isFamily()
|
||||||
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
|
||||||
req.SetContext(ctx)
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"type": "DELETE",
|
|
||||||
"taskInfos": MustString(utils.Json.MarshalToString(
|
|
||||||
[]*BatchTaskInfo{
|
|
||||||
{
|
|
||||||
FileId: obj.GetID(),
|
|
||||||
FileName: obj.GetName(),
|
|
||||||
IsFolder: BoolToNumber(obj.IsDir()),
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
})
|
|
||||||
|
|
||||||
if y.isFamily() {
|
resp, err := y.CreateBatchTask("DELETE", IF(isFamily, y.FamilyID, ""), "", nil, BatchTaskInfo{
|
||||||
req.SetFormData(map[string]string{
|
FileId: obj.GetID(),
|
||||||
"familyId": y.FamilyID,
|
FileName: obj.GetName(),
|
||||||
})
|
IsFolder: BoolToNumber(obj.IsDir()),
|
||||||
}
|
})
|
||||||
}, &resp)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -310,25 +289,73 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
|
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (newObj model.Obj, err error) {
|
||||||
|
overwrite := true
|
||||||
|
isFamily := y.isFamily()
|
||||||
|
|
||||||
// 响应时间长,按需启用
|
// 响应时间长,按需启用
|
||||||
if y.Addition.RapidUpload {
|
if y.Addition.RapidUpload && !stream.IsForceStreamUpload() {
|
||||||
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
|
if newObj, err := y.RapidUpload(ctx, dstDir, stream, isFamily, overwrite); err == nil {
|
||||||
return newObj, nil
|
return newObj, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
switch y.UploadMethod {
|
uploadMethod := y.UploadMethod
|
||||||
case "old":
|
if stream.IsForceStreamUpload() {
|
||||||
return y.OldUpload(ctx, dstDir, stream, up)
|
uploadMethod = "stream"
|
||||||
|
}
|
||||||
|
|
||||||
|
// 旧版上传家庭云也有限制
|
||||||
|
if uploadMethod == "old" {
|
||||||
|
return y.OldUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 开启家庭云转存
|
||||||
|
if !isFamily && y.FamilyTransfer {
|
||||||
|
// 修改上传目标为家庭云文件夹
|
||||||
|
transferDstDir := dstDir
|
||||||
|
dstDir = (y.familyTransferFolder.Value).(*Cloud189Folder)
|
||||||
|
y.familyTransferFolder = y.familyTransferFolder.Next()
|
||||||
|
|
||||||
|
isFamily = true
|
||||||
|
overwrite = false
|
||||||
|
|
||||||
|
defer func() {
|
||||||
|
if newObj != nil {
|
||||||
|
// 批量任务有概率删不掉
|
||||||
|
y.cleanFamilyTransferFile()
|
||||||
|
|
||||||
|
// 转存家庭云文件到个人云
|
||||||
|
err = y.SaveFamilyFileToPersonCloud(context.TODO(), y.FamilyID, newObj, transferDstDir, true)
|
||||||
|
|
||||||
|
task := BatchTaskInfo{
|
||||||
|
FileId: newObj.GetID(),
|
||||||
|
FileName: newObj.GetName(),
|
||||||
|
IsFolder: BoolToNumber(newObj.IsDir()),
|
||||||
|
}
|
||||||
|
|
||||||
|
// 删除源文件
|
||||||
|
if resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, task); err == nil {
|
||||||
|
y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
|
||||||
|
// 永久删除
|
||||||
|
if resp, err := y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, task); err == nil {
|
||||||
|
y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
newObj = nil
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
|
||||||
|
switch uploadMethod {
|
||||||
case "rapid":
|
case "rapid":
|
||||||
return y.FastUpload(ctx, dstDir, stream, up)
|
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
||||||
case "stream":
|
case "stream":
|
||||||
if stream.GetSize() == 0 {
|
if stream.GetSize() == 0 {
|
||||||
return y.FastUpload(ctx, dstDir, stream, up)
|
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
||||||
}
|
}
|
||||||
fallthrough
|
fallthrough
|
||||||
default:
|
default:
|
||||||
return y.StreamUpload(ctx, dstDir, stream, up)
|
return y.StreamUpload(ctx, dstDir, stream, up, isFamily, overwrite)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -192,3 +192,19 @@ func partSize(size int64) int64 {
|
|||||||
}
|
}
|
||||||
return DEFAULT
|
return DEFAULT
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isBool(bs ...bool) bool {
|
||||||
|
for _, b := range bs {
|
||||||
|
if b {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func IF[V any](o bool, t V, f V) V {
|
||||||
|
if o {
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
@ -16,6 +16,7 @@ type Addition struct {
|
|||||||
FamilyID string `json:"family_id"`
|
FamilyID string `json:"family_id"`
|
||||||
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||||
|
FamilyTransfer bool `json:"family_transfer"`
|
||||||
RapidUpload bool `json:"rapid_upload"`
|
RapidUpload bool `json:"rapid_upload"`
|
||||||
NoUseOcr bool `json:"no_use_ocr"`
|
NoUseOcr bool `json:"no_use_ocr"`
|
||||||
}
|
}
|
||||||
|
@ -3,10 +3,11 @@ package _189pc
|
|||||||
import (
|
import (
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
// 居然有四种返回方式
|
// 居然有四种返回方式
|
||||||
@ -142,7 +143,7 @@ type FamilyInfoListResp struct {
|
|||||||
type FamilyInfoResp struct {
|
type FamilyInfoResp struct {
|
||||||
Count int `json:"count"`
|
Count int `json:"count"`
|
||||||
CreateTime string `json:"createTime"`
|
CreateTime string `json:"createTime"`
|
||||||
FamilyID int `json:"familyId"`
|
FamilyID int64 `json:"familyId"`
|
||||||
RemarkName string `json:"remarkName"`
|
RemarkName string `json:"remarkName"`
|
||||||
Type int `json:"type"`
|
Type int `json:"type"`
|
||||||
UseFlag int `json:"useFlag"`
|
UseFlag int `json:"useFlag"`
|
||||||
@ -242,7 +243,12 @@ type BatchTaskInfo struct {
|
|||||||
// IsFolder 是否是文件夹,0-否,1-是
|
// IsFolder 是否是文件夹,0-否,1-是
|
||||||
IsFolder int `json:"isFolder"`
|
IsFolder int `json:"isFolder"`
|
||||||
// SrcParentId 文件所在父目录ID
|
// SrcParentId 文件所在父目录ID
|
||||||
//SrcParentId string `json:"srcParentId"`
|
SrcParentId string `json:"srcParentId,omitempty"`
|
||||||
|
|
||||||
|
/* 冲突管理 */
|
||||||
|
// 1 -> 跳过 2 -> 保留 3 -> 覆盖
|
||||||
|
DealWay int `json:"dealWay,omitempty"`
|
||||||
|
IsConflict int `json:"isConflict,omitempty"`
|
||||||
}
|
}
|
||||||
|
|
||||||
/* 上传部分 */
|
/* 上传部分 */
|
||||||
@ -355,6 +361,14 @@ type BatchTaskStateResp struct {
|
|||||||
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中,4 完成
|
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中,4 完成
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type BatchTaskConflictTaskInfoResp struct {
|
||||||
|
SessionKey string `json:"sessionKey"`
|
||||||
|
TargetFolderID int `json:"targetFolderId"`
|
||||||
|
TaskID string `json:"taskId"`
|
||||||
|
TaskInfos []BatchTaskInfo
|
||||||
|
TaskType int `json:"taskType"`
|
||||||
|
}
|
||||||
|
|
||||||
/* query 加密参数*/
|
/* query 加密参数*/
|
||||||
type Params map[string]string
|
type Params map[string]string
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@ package _189pc
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"container/ring"
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
@ -54,11 +55,11 @@ const (
|
|||||||
CHANNEL_ID = "web_cloud.189.cn"
|
CHANNEL_ID = "web_cloud.189.cn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
|
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
|
||||||
dateOfGmt := getHttpDateStr()
|
dateOfGmt := getHttpDateStr()
|
||||||
sessionKey := y.tokenInfo.SessionKey
|
sessionKey := y.tokenInfo.SessionKey
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
sessionKey = y.tokenInfo.FamilySessionKey
|
sessionKey = y.tokenInfo.FamilySessionKey
|
||||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
@ -72,9 +73,9 @@ func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]stri
|
|||||||
return header
|
return header
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) EncryptParams(params Params) string {
|
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
if params != nil {
|
if params != nil {
|
||||||
@ -83,17 +84,17 @@ func (y *Cloud189PC) EncryptParams(params Params) string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
|
||||||
req := y.client.R().SetQueryParams(clientSuffix())
|
req := y.client.R().SetQueryParams(clientSuffix())
|
||||||
|
|
||||||
// 设置params
|
// 设置params
|
||||||
paramsData := y.EncryptParams(params)
|
paramsData := y.EncryptParams(params, isBool(isFamily...))
|
||||||
if paramsData != "" {
|
if paramsData != "" {
|
||||||
req.SetQueryParam("params", paramsData)
|
req.SetQueryParam("params", paramsData)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Signature
|
// Signature
|
||||||
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
|
req.SetHeaders(y.SignatureHeader(url, method, paramsData, isBool(isFamily...)))
|
||||||
|
|
||||||
var erron RespErr
|
var erron RespErr
|
||||||
req.SetError(&erron)
|
req.SetError(&erron)
|
||||||
@ -129,15 +130,15 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
|||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
|
||||||
return y.request(url, http.MethodGet, callback, nil, resp)
|
return y.request(url, http.MethodGet, callback, nil, resp, isFamily...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
|
||||||
return y.request(url, http.MethodPost, callback, nil, resp)
|
return y.request(url, http.MethodPost, callback, nil, resp, isFamily...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
|
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader, isFamily bool) ([]byte, error) {
|
||||||
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -154,7 +155,7 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
|
|||||||
}
|
}
|
||||||
|
|
||||||
if sign {
|
if sign {
|
||||||
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
|
for key, value := range y.SignatureHeader(url, http.MethodPut, "", isFamily) {
|
||||||
req.Header.Add(key, value)
|
req.Header.Add(key, value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -181,9 +182,9 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
|
|||||||
}
|
}
|
||||||
return body, nil
|
return body, nil
|
||||||
}
|
}
|
||||||
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool) ([]model.Obj, error) {
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family/file"
|
fullUrl += "/family/file"
|
||||||
}
|
}
|
||||||
fullUrl += "/listFiles.action"
|
fullUrl += "/listFiles.action"
|
||||||
@ -201,7 +202,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj,
|
|||||||
"pageNum": fmt.Sprint(pageNum),
|
"pageNum": fmt.Sprint(pageNum),
|
||||||
"pageSize": "130",
|
"pageSize": "130",
|
||||||
})
|
})
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
r.SetQueryParams(map[string]string{
|
r.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"orderBy": toFamilyOrderBy(y.OrderBy),
|
"orderBy": toFamilyOrderBy(y.OrderBy),
|
||||||
@ -214,7 +215,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj,
|
|||||||
"descending": toDesc(y.OrderDirection),
|
"descending": toDesc(y.OrderDirection),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &resp)
|
}, &resp, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -437,7 +438,7 @@ func (y *Cloud189PC) refreshSession() (err error) {
|
|||||||
|
|
||||||
// 普通上传
|
// 普通上传
|
||||||
// 无法上传大小为0的文件
|
// 无法上传大小为0的文件
|
||||||
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
||||||
var sliceSize = partSize(file.GetSize())
|
var sliceSize = partSize(file.GetSize())
|
||||||
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
|
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
|
||||||
lastPartSize := file.GetSize() % sliceSize
|
lastPartSize := file.GetSize() % sliceSize
|
||||||
@ -454,7 +455,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
params.Set("familyId", y.FamilyID)
|
params.Set("familyId", y.FamilyID)
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
@ -466,7 +467,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
var initMultiUpload InitMultiUploadResp
|
var initMultiUpload InitMultiUploadResp
|
||||||
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
}, params, &initMultiUpload)
|
}, params, &initMultiUpload, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -502,14 +503,14 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
|
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
|
||||||
|
|
||||||
threadG.Go(func(ctx context.Context) error {
|
threadG.Go(func(ctx context.Context) error {
|
||||||
uploadUrls, err := y.GetMultiUploadUrls(ctx, initMultiUpload.Data.UploadFileID, partInfo)
|
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, initMultiUpload.Data.UploadFileID, partInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// step.4 上传切片
|
// step.4 上传切片
|
||||||
uploadUrl := uploadUrls[0]
|
uploadUrl := uploadUrls[0]
|
||||||
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData))
|
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData), isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -538,21 +539,21 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
"sliceMd5": sliceMd5Hex,
|
"sliceMd5": sliceMd5Hex,
|
||||||
"lazyCheck": "1",
|
"lazyCheck": "1",
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
"opertype": "3",
|
"opertype": IF(overwrite, "3", "1"),
|
||||||
}, &resp)
|
}, &resp, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return resp.toFile(), nil
|
return resp.toFile(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
|
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, isFamily bool, overwrite bool) (model.Obj, error) {
|
||||||
fileMd5 := stream.GetHash().GetHash(utils.MD5)
|
fileMd5 := stream.GetHash().GetHash(utils.MD5)
|
||||||
if len(fileMd5) < utils.MD5.Width {
|
if len(fileMd5) < utils.MD5.Width {
|
||||||
return nil, errors.New("invalid hash")
|
return nil, errors.New("invalid hash")
|
||||||
}
|
}
|
||||||
|
|
||||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
|
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()), isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -561,11 +562,11 @@ func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
return nil, errors.New("rapid upload fail")
|
return nil, errors.New("rapid upload fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
|
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId, isFamily, overwrite)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 快传
|
// 快传
|
||||||
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
||||||
tempFile, err := file.CacheFullInTempFile()
|
tempFile, err := file.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -594,7 +595,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
silceMd5.Reset()
|
silceMd5.Reset()
|
||||||
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
|
if _, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
md5Byte := silceMd5.Sum(nil)
|
md5Byte := silceMd5.Sum(nil)
|
||||||
@ -609,7 +610,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
|
||||||
@ -628,13 +629,13 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
"sliceSize": fmt.Sprint(sliceSize),
|
"sliceSize": fmt.Sprint(sliceSize),
|
||||||
"sliceMd5": sliceMd5Hex,
|
"sliceMd5": sliceMd5Hex,
|
||||||
}
|
}
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
params.Set("familyId", y.FamilyID)
|
params.Set("familyId", y.FamilyID)
|
||||||
}
|
}
|
||||||
var uploadInfo InitMultiUploadResp
|
var uploadInfo InitMultiUploadResp
|
||||||
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
}, params, &uploadInfo)
|
}, params, &uploadInfo, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -659,7 +660,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
i, uploadPart := i, uploadPart
|
i, uploadPart := i, uploadPart
|
||||||
threadG.Go(func(ctx context.Context) error {
|
threadG.Go(func(ctx context.Context) error {
|
||||||
// step.3 获取上传链接
|
// step.3 获取上传链接
|
||||||
uploadUrls, err := y.GetMultiUploadUrls(ctx, uploadInfo.UploadFileID, uploadPart)
|
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, uploadInfo.UploadFileID, uploadPart)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -671,7 +672,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}
|
}
|
||||||
|
|
||||||
// step.4 上传切片
|
// step.4 上传切片
|
||||||
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize))
|
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize), isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -698,8 +699,8 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadInfo.UploadFileID,
|
"uploadFileId": uploadInfo.UploadFileID,
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
"opertype": "3",
|
"opertype": IF(overwrite, "3", "1"),
|
||||||
}, &resp)
|
}, &resp, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -708,9 +709,9 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
|
|
||||||
// 获取上传切片信息
|
// 获取上传切片信息
|
||||||
// 对http body有大小限制,分片信息太多会出错
|
// 对http body有大小限制,分片信息太多会出错
|
||||||
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
|
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
|
||||||
fullUrl := UPLOAD_URL
|
fullUrl := UPLOAD_URL
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl += "/family"
|
fullUrl += "/family"
|
||||||
} else {
|
} else {
|
||||||
fullUrl += "/person"
|
fullUrl += "/person"
|
||||||
@ -723,7 +724,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
|
|||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadFileId,
|
"uploadFileId": uploadFileId,
|
||||||
"partInfo": strings.Join(partInfo, ","),
|
"partInfo": strings.Join(partInfo, ","),
|
||||||
}, &uploadUrlsResp)
|
}, &uploadUrlsResp, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -752,7 +753,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 旧版本上传,家庭云不支持覆盖
|
// 旧版本上传,家庭云不支持覆盖
|
||||||
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
|
||||||
tempFile, err := file.CacheFullInTempFile()
|
tempFile, err := file.CacheFullInTempFile()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -763,7 +764,7 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 创建上传会话
|
// 创建上传会话
|
||||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
|
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()), isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -780,14 +781,14 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
"Expect": "100-continue",
|
"Expect": "100-continue",
|
||||||
}
|
}
|
||||||
|
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
||||||
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
} else {
|
} else {
|
||||||
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
|
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile), isFamily)
|
||||||
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -802,10 +803,10 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||||
"resumePolicy": "1",
|
"resumePolicy": "1",
|
||||||
})
|
})
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
||||||
}
|
}
|
||||||
}, &status)
|
}, &status, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -815,20 +816,20 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
|||||||
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
||||||
}
|
}
|
||||||
|
|
||||||
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
|
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId, isFamily, overwrite)
|
||||||
}
|
}
|
||||||
|
|
||||||
// 创建上传会话
|
// 创建上传会话
|
||||||
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
|
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string, isFamily bool) (*CreateUploadFileResp, error) {
|
||||||
var uploadInfo CreateUploadFileResp
|
var uploadInfo CreateUploadFileResp
|
||||||
|
|
||||||
fullUrl := API_URL + "/createUploadFile.action"
|
fullUrl := API_URL + "/createUploadFile.action"
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||||
}
|
}
|
||||||
_, err := y.post(fullUrl, func(req *resty.Request) {
|
_, err := y.post(fullUrl, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
req.SetQueryParams(map[string]string{
|
req.SetQueryParams(map[string]string{
|
||||||
"familyId": y.FamilyID,
|
"familyId": y.FamilyID,
|
||||||
"parentId": parentID,
|
"parentId": parentID,
|
||||||
@ -849,7 +850,7 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
|
|||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &uploadInfo)
|
}, &uploadInfo, isFamily)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -858,11 +859,11 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
|
|||||||
}
|
}
|
||||||
|
|
||||||
// 提交上传文件
|
// 提交上传文件
|
||||||
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
|
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64, isFamily bool, overwrite bool) (model.Obj, error) {
|
||||||
var resp OldCommitUploadFileResp
|
var resp OldCommitUploadFileResp
|
||||||
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
|
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
if y.isFamily() {
|
if isFamily {
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"ResumePolicy": "1",
|
"ResumePolicy": "1",
|
||||||
"UploadFileId": fmt.Sprint(uploadFileID),
|
"UploadFileId": fmt.Sprint(uploadFileID),
|
||||||
@ -870,13 +871,13 @@ func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string,
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
req.SetFormData(map[string]string{
|
req.SetFormData(map[string]string{
|
||||||
"opertype": "3",
|
"opertype": IF(overwrite, "3", "1"),
|
||||||
"resumePolicy": "1",
|
"resumePolicy": "1",
|
||||||
"uploadFileId": fmt.Sprint(uploadFileID),
|
"uploadFileId": fmt.Sprint(uploadFileID),
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, &resp)
|
}, &resp, isFamily)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -895,10 +896,100 @@ func (y *Cloud189PC) isLogin() bool {
|
|||||||
return err == nil
|
return err == nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 创建家庭云中转文件夹
|
||||||
|
func (y *Cloud189PC) createFamilyTransferFolder(count int) (*ring.Ring, error) {
|
||||||
|
folders := ring.New(count)
|
||||||
|
var rootFolder Cloud189Folder
|
||||||
|
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"folderName": "FamilyTransferFolder",
|
||||||
|
"familyId": y.FamilyID,
|
||||||
|
})
|
||||||
|
}, &rootFolder, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
folderCount := 0
|
||||||
|
|
||||||
|
// 获取已有目录
|
||||||
|
files, err := y.getFiles(context.TODO(), rootFolder.GetID(), true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if folder, ok := file.(*Cloud189Folder); ok {
|
||||||
|
folders.Value = folder
|
||||||
|
folders = folders.Next()
|
||||||
|
folderCount++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 创建新的目录
|
||||||
|
for folderCount < count {
|
||||||
|
var newFolder Cloud189Folder
|
||||||
|
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"folderName": uuid.NewString(),
|
||||||
|
"familyId": y.FamilyID,
|
||||||
|
"parentId": rootFolder.GetID(),
|
||||||
|
})
|
||||||
|
}, &newFolder, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
folders.Value = &newFolder
|
||||||
|
folders = folders.Next()
|
||||||
|
folderCount++
|
||||||
|
}
|
||||||
|
return folders, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 清理中转文件夹
|
||||||
|
func (y *Cloud189PC) cleanFamilyTransfer(ctx context.Context) error {
|
||||||
|
var tasks []BatchTaskInfo
|
||||||
|
r := y.familyTransferFolder
|
||||||
|
for p := r.Next(); p != r; p = p.Next() {
|
||||||
|
folder := p.Value.(*Cloud189Folder)
|
||||||
|
|
||||||
|
files, err := y.getFiles(ctx, folder.GetID(), true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
tasks = append(tasks, BatchTaskInfo{
|
||||||
|
FileId: file.GetID(),
|
||||||
|
FileName: file.GetName(),
|
||||||
|
IsFolder: BoolToNumber(file.IsDir()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(tasks) > 0 {
|
||||||
|
// 删除
|
||||||
|
resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, tasks...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
// 永久删除
|
||||||
|
resp, err = y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, tasks...)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
// 获取家庭云所有用户信息
|
// 获取家庭云所有用户信息
|
||||||
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
|
||||||
var resp FamilyInfoListResp
|
var resp FamilyInfoListResp
|
||||||
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
|
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -922,6 +1013,73 @@ func (y *Cloud189PC) getFamilyID() (string, error) {
|
|||||||
return fmt.Sprint(infos[0].FamilyID), nil
|
return fmt.Sprint(infos[0].FamilyID), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 保存家庭云中的文件到个人云
|
||||||
|
func (y *Cloud189PC) SaveFamilyFileToPersonCloud(ctx context.Context, familyId string, srcObj, dstDir model.Obj, overwrite bool) error {
|
||||||
|
// _, err := y.post(API_URL+"/family/file/saveFileToMember.action", func(req *resty.Request) {
|
||||||
|
// req.SetQueryParams(map[string]string{
|
||||||
|
// "channelId": "home",
|
||||||
|
// "familyId": familyId,
|
||||||
|
// "destParentId": destParentId,
|
||||||
|
// "fileIdList": familyFileId,
|
||||||
|
// })
|
||||||
|
// }, nil)
|
||||||
|
// return err
|
||||||
|
|
||||||
|
task := BatchTaskInfo{
|
||||||
|
FileId: srcObj.GetID(),
|
||||||
|
FileName: srcObj.GetName(),
|
||||||
|
IsFolder: BoolToNumber(srcObj.IsDir()),
|
||||||
|
}
|
||||||
|
resp, err := y.CreateBatchTask("COPY", familyId, dstDir.GetID(), map[string]string{
|
||||||
|
"groupId": "null",
|
||||||
|
"copyType": "2",
|
||||||
|
"shareId": "null",
|
||||||
|
}, task)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
state, err := y.CheckBatchTask("COPY", resp.TaskID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch state.TaskStatus {
|
||||||
|
case 2:
|
||||||
|
task.DealWay = IF(overwrite, 3, 2)
|
||||||
|
// 冲突时覆盖文件
|
||||||
|
if err := y.ManageBatchTask("COPY", resp.TaskID, dstDir.GetID(), task); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
case 4:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
time.Sleep(time.Millisecond * 400)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (y *Cloud189PC) CreateBatchTask(aType string, familyID string, targetFolderId string, other map[string]string, taskInfos ...BatchTaskInfo) (*CreateBatchTaskResp, error) {
|
||||||
|
var resp CreateBatchTaskResp
|
||||||
|
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"type": aType,
|
||||||
|
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
|
||||||
|
})
|
||||||
|
if targetFolderId != "" {
|
||||||
|
req.SetFormData(map[string]string{"targetFolderId": targetFolderId})
|
||||||
|
}
|
||||||
|
if familyID != "" {
|
||||||
|
req.SetFormData(map[string]string{"familyId": familyID})
|
||||||
|
}
|
||||||
|
req.SetFormData(other)
|
||||||
|
}, &resp, familyID != "")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 检测任务状态
|
||||||
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
|
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
|
||||||
var resp BatchTaskStateResp
|
var resp BatchTaskStateResp
|
||||||
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
|
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
|
||||||
@ -936,6 +1094,37 @@ func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStat
|
|||||||
return &resp, nil
|
return &resp, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 获取冲突的任务信息
|
||||||
|
func (y *Cloud189PC) GetConflictTaskInfo(aType string, taskID string) (*BatchTaskConflictTaskInfoResp, error) {
|
||||||
|
var resp BatchTaskConflictTaskInfoResp
|
||||||
|
_, err := y.post(API_URL+"/batch/getConflictTaskInfo.action", func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"type": aType,
|
||||||
|
"taskId": taskID,
|
||||||
|
})
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 处理冲突
|
||||||
|
func (y *Cloud189PC) ManageBatchTask(aType string, taskID string, targetFolderId string, taskInfos ...BatchTaskInfo) error {
|
||||||
|
_, err := y.post(API_URL+"/batch/manageBatchTask.action", func(req *resty.Request) {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"targetFolderId": targetFolderId,
|
||||||
|
"type": aType,
|
||||||
|
"taskId": taskID,
|
||||||
|
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var ErrIsConflict = errors.New("there is a conflict with the target object")
|
||||||
|
|
||||||
|
// 等待任务完成
|
||||||
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
|
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
|
||||||
for {
|
for {
|
||||||
state, err := y.CheckBatchTask(aType, taskID)
|
state, err := y.CheckBatchTask(aType, taskID)
|
||||||
@ -944,7 +1133,7 @@ func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration)
|
|||||||
}
|
}
|
||||||
switch state.TaskStatus {
|
switch state.TaskStatus {
|
||||||
case 2:
|
case 2:
|
||||||
return errors.New("there is a conflict with the target object")
|
return ErrIsConflict
|
||||||
case 4:
|
case 4:
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
)
|
)
|
||||||
@ -45,6 +46,9 @@ func (d *Alias) Init(ctx context.Context) error {
|
|||||||
d.oneKey = k
|
d.oneKey = k
|
||||||
}
|
}
|
||||||
d.autoFlatten = true
|
d.autoFlatten = true
|
||||||
|
} else {
|
||||||
|
d.oneKey = ""
|
||||||
|
d.autoFlatten = false
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -111,4 +115,26 @@ func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
return nil, errs.ObjectNotFound
|
return nil, errs.ObjectNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Alias) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
reqPath, err := d.getReqPath(ctx, srcObj)
|
||||||
|
if err == nil {
|
||||||
|
return fs.Rename(ctx, *reqPath, newName)
|
||||||
|
}
|
||||||
|
if errs.IsNotImplement(err) {
|
||||||
|
return errors.New("same-name files cannot be Rename")
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Alias) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
reqPath, err := d.getReqPath(ctx, obj)
|
||||||
|
if err == nil {
|
||||||
|
return fs.Remove(ctx, *reqPath)
|
||||||
|
}
|
||||||
|
if errs.IsNotImplement(err) {
|
||||||
|
return errors.New("same-name files cannot be Delete")
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Alias)(nil)
|
var _ driver.Driver = (*Alias)(nil)
|
||||||
|
@ -9,19 +9,25 @@ type Addition struct {
|
|||||||
// Usually one of two
|
// Usually one of two
|
||||||
// driver.RootPath
|
// driver.RootPath
|
||||||
// define other
|
// define other
|
||||||
Paths string `json:"paths" required:"true" type:"text"`
|
Paths string `json:"paths" required:"true" type:"text"`
|
||||||
|
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "Alias",
|
Name: "Alias",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
NoCache: true,
|
NoCache: true,
|
||||||
NoUpload: true,
|
NoUpload: true,
|
||||||
DefaultRoot: "/",
|
DefaultRoot: "/",
|
||||||
|
ProxyRangeOption: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
op.RegisterDriver(func() driver.Driver {
|
op.RegisterDriver(func() driver.Driver {
|
||||||
return &Alias{}
|
return &Alias{
|
||||||
|
Addition: Addition{
|
||||||
|
ProtectSameName: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/sign"
|
"github.com/alist-org/alist/v3/internal/sign"
|
||||||
@ -102,13 +103,49 @@ func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs)
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||||
return &model.Link{
|
link := &model.Link{
|
||||||
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
||||||
common.GetApiUrl(args.HttpReq),
|
common.GetApiUrl(args.HttpReq),
|
||||||
utils.EncodePath(reqPath, true),
|
utils.EncodePath(reqPath, true),
|
||||||
sign.Sign(reqPath)),
|
sign.Sign(reqPath)),
|
||||||
}, nil
|
}
|
||||||
|
if args.HttpReq != nil && d.ProxyRange {
|
||||||
|
link.RangeReadCloser = common.NoProxyRange
|
||||||
|
}
|
||||||
|
return link, nil
|
||||||
}
|
}
|
||||||
link, _, err := fs.Link(ctx, reqPath, args)
|
link, _, err := fs.Link(ctx, reqPath, args)
|
||||||
return link, err
|
return link, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Alias) getReqPath(ctx context.Context, obj model.Obj) (*string, error) {
|
||||||
|
root, sub := d.getRootAndPath(obj.GetPath())
|
||||||
|
if sub == "" || sub == "/" {
|
||||||
|
return nil, errs.NotSupport
|
||||||
|
}
|
||||||
|
dsts, ok := d.pathMap[root]
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
var reqPath string
|
||||||
|
var err error
|
||||||
|
for _, dst := range dsts {
|
||||||
|
reqPath = stdpath.Join(dst, sub)
|
||||||
|
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
||||||
|
if err == nil {
|
||||||
|
if d.ProtectSameName {
|
||||||
|
if ok {
|
||||||
|
ok = false
|
||||||
|
} else {
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
return &reqPath, nil
|
||||||
|
}
|
||||||
|
@ -109,11 +109,19 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
|||||||
|
|
||||||
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
var resp common.Resp[FsGetResp]
|
var resp common.Resp[FsGetResp]
|
||||||
|
// if PassUAToUpsteam is true, then pass the user-agent to the upstream
|
||||||
|
userAgent := base.UserAgent
|
||||||
|
if d.PassUAToUpsteam {
|
||||||
|
userAgent = args.Header.Get("user-agent")
|
||||||
|
if userAgent == "" {
|
||||||
|
userAgent = base.UserAgent
|
||||||
|
}
|
||||||
|
}
|
||||||
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetResult(&resp).SetBody(FsGetReq{
|
req.SetResult(&resp).SetBody(FsGetReq{
|
||||||
Path: file.GetPath(),
|
Path: file.GetPath(),
|
||||||
Password: d.MetaPassword,
|
Password: d.MetaPassword,
|
||||||
})
|
}).SetHeader("user-agent", userAgent)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -7,18 +7,20 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
Address string `json:"url" required:"true"`
|
Address string `json:"url" required:"true"`
|
||||||
MetaPassword string `json:"meta_password"`
|
MetaPassword string `json:"meta_password"`
|
||||||
Username string `json:"username"`
|
Username string `json:"username"`
|
||||||
Password string `json:"password"`
|
Password string `json:"password"`
|
||||||
Token string `json:"token"`
|
Token string `json:"token"`
|
||||||
|
PassUAToUpsteam bool `json:"pass_ua_to_upsteam" default:"true"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "AList V3",
|
Name: "AList V3",
|
||||||
LocalSort: true,
|
LocalSort: true,
|
||||||
DefaultRoot: "/",
|
DefaultRoot: "/",
|
||||||
CheckStatus: true,
|
CheckStatus: true,
|
||||||
|
ProxyRangeOption: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -194,7 +194,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
|||||||
}
|
}
|
||||||
if d.RapidUpload {
|
if d.RapidUpload {
|
||||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||||
io.CopyN(buf, file, 1024)
|
utils.CopyWithBufferN(buf, file, 1024)
|
||||||
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
|
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
|
||||||
if localFile != nil {
|
if localFile != nil {
|
||||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||||
|
@ -136,7 +136,7 @@ func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
_, err = io.CopyN(buf, reader, length)
|
_, err = utils.CopyWithBufferN(buf, reader, length)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@ -164,7 +164,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||||
createData["part_info_list"] = makePartInfos(count)
|
createData["part_info_list"] = makePartInfos(count)
|
||||||
// rapid upload
|
// rapid upload
|
||||||
rapidUpload := stream.GetSize() > 100*utils.KB && d.RapidUpload
|
rapidUpload := !stream.IsForceStreamUpload() && stream.GetSize() > 100*utils.KB && d.RapidUpload
|
||||||
if rapidUpload {
|
if rapidUpload {
|
||||||
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||||
// read 1024 bytes to calculate pre hash
|
// read 1024 bytes to calculate pre hash
|
||||||
@ -242,13 +242,16 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
|||||||
if remain := stream.GetSize() - offset; length > remain {
|
if remain := stream.GetSize() - offset; length > remain {
|
||||||
length = remain
|
length = remain
|
||||||
}
|
}
|
||||||
//rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
|
rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
|
||||||
rd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
|
if rapidUpload {
|
||||||
if err != nil {
|
srd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
|
||||||
return nil, err
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rd = utils.NewMultiReadable(srd)
|
||||||
}
|
}
|
||||||
err = retry.Do(func() error {
|
err = retry.Do(func() error {
|
||||||
//rd.Reset()
|
rd.Reset()
|
||||||
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
|
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
|
||||||
},
|
},
|
||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
|
@ -32,6 +32,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/netease_music"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||||
@ -45,6 +46,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/thunderx"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||||
|
@ -165,9 +165,16 @@ func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream mo
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
||||||
|
newFile.Ctime = stream.CreateTime().Unix()
|
||||||
|
newFile.Mtime = stream.ModTime().Unix()
|
||||||
return fileToObj(newFile), nil
|
return fileToObj(newFile), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Put
|
||||||
|
//
|
||||||
|
// **注意**: 截至 2024/04/20 百度云盘 api 接口返回的时间永远是当前时间,而不是文件时间。
|
||||||
|
// 而实际上云盘存储的时间是文件时间,所以此处需要覆盖时间,保证缓存与云盘的数据一致
|
||||||
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
// rapid upload
|
// rapid upload
|
||||||
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
|
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
|
||||||
@ -204,7 +211,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
if i == count {
|
if i == count {
|
||||||
byteSize = lastBlockSize
|
byteSize = lastBlockSize
|
||||||
}
|
}
|
||||||
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -245,9 +252,9 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
log.Debugf("%+v", precreateResp)
|
log.Debugf("%+v", precreateResp)
|
||||||
if precreateResp.ReturnType == 2 {
|
if precreateResp.ReturnType == 2 {
|
||||||
//rapid upload, since got md5 match from baidu server
|
//rapid upload, since got md5 match from baidu server
|
||||||
if err != nil {
|
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
||||||
return nil, err
|
precreateResp.File.Ctime = ctime
|
||||||
}
|
precreateResp.File.Mtime = mtime
|
||||||
return fileToObj(precreateResp.File), nil
|
return fileToObj(precreateResp.File), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -298,6 +305,9 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
// 修复时间,具体原因见 Put 方法注释的 **注意**
|
||||||
|
newFile.Ctime = ctime
|
||||||
|
newFile.Mtime = mtime
|
||||||
return fileToObj(newFile), nil
|
return fileToObj(newFile), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,15 +8,16 @@ import (
|
|||||||
type Addition struct {
|
type Addition struct {
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
|
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
||||||
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||||
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||||
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
||||||
AccessToken string
|
AccessToken string
|
||||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||||
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
|
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
|
||||||
|
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -249,6 +249,9 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *BaiduNetdisk) getSliceSize() int64 {
|
func (d *BaiduNetdisk) getSliceSize() int64 {
|
||||||
|
if d.CustomUploadPartSize != 0 {
|
||||||
|
return d.CustomUploadPartSize
|
||||||
|
}
|
||||||
switch d.vipType {
|
switch d.vipType {
|
||||||
case 1:
|
case 1:
|
||||||
return VipSliceSize
|
return VipSliceSize
|
||||||
|
@ -261,7 +261,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
if i == count {
|
if i == count {
|
||||||
byteSize = lastBlockSize
|
byteSize = lastBlockSize
|
||||||
}
|
}
|
||||||
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
|
||||||
if err != nil && err != io.EOF {
|
if err != nil && err != io.EOF {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -229,7 +229,7 @@ func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
_, err = io.Copy(filePart, stream)
|
_, err = utils.CopyWithBuffer(filePart, stream)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -71,6 +71,9 @@ func (d *Cloudreve) Link(ctx context.Context, file model.Obj, args model.LinkArg
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if strings.HasPrefix(dUrl, "/api") {
|
||||||
|
dUrl = d.Address + dUrl
|
||||||
|
}
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: dUrl,
|
URL: dUrl,
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -3,7 +3,6 @@ package crypt
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
|
||||||
"io"
|
"io"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"regexp"
|
"regexp"
|
||||||
@ -14,6 +13,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
@ -160,7 +160,7 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
// discarding hash as it's encrypted
|
// discarding hash as it's encrypted
|
||||||
}
|
}
|
||||||
if d.Thumbnail && thumb == "" {
|
if d.Thumbnail && thumb == "" {
|
||||||
thumb = utils.EncodePath(common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
||||||
}
|
}
|
||||||
if !ok && !d.Thumbnail {
|
if !ok && !d.Thumbnail {
|
||||||
result = append(result, &objRes)
|
result = append(result, &objRes)
|
||||||
@ -389,10 +389,11 @@ func (d *Crypt) Put(ctx context.Context, dstDir model.Obj, streamer model.FileSt
|
|||||||
Modified: streamer.ModTime(),
|
Modified: streamer.ModTime(),
|
||||||
IsFolder: streamer.IsDir(),
|
IsFolder: streamer.IsDir(),
|
||||||
},
|
},
|
||||||
Reader: wrappedIn,
|
Reader: wrappedIn,
|
||||||
Mimetype: "application/octet-stream",
|
Mimetype: "application/octet-stream",
|
||||||
WebPutAsTask: streamer.NeedStore(),
|
WebPutAsTask: streamer.NeedStore(),
|
||||||
Exist: streamer.GetExist(),
|
ForceStreamUpload: true,
|
||||||
|
Exist: streamer.GetExist(),
|
||||||
}
|
}
|
||||||
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
|
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -147,7 +147,8 @@ func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
// get the url after redirect
|
// get the url after redirect
|
||||||
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
|
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
|
||||||
//"Origin": d.conf.site,
|
//"Origin": d.conf.site,
|
||||||
"Referer": d.conf.site + "/",
|
"Referer": d.conf.site + "/",
|
||||||
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
||||||
}).Get(realURL)
|
}).Get(realURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -155,7 +156,12 @@ func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
if res.StatusCode() == 302 {
|
if res.StatusCode() == 302 {
|
||||||
realURL = res.Header().Get("location")
|
realURL = res.Header().Get("location")
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("redirect failed, status: %d", res.StatusCode())
|
contentLengthStr := res.Header().Get("Content-Length")
|
||||||
|
contentLength, err := strconv.Atoi(contentLengthStr)
|
||||||
|
if err != nil || contentLength == 0 || contentLength > 1024*10 {
|
||||||
|
return nil, fmt.Errorf("redirect failed, status: %d", res.StatusCode())
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("redirect failed, content: %s", res.String())
|
||||||
}
|
}
|
||||||
link := model.Link{URL: realURL}
|
link := model.Link{URL: realURL}
|
||||||
return &link, nil
|
return &link, nil
|
||||||
@ -271,7 +277,7 @@ func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
defer func() {
|
defer func() {
|
||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
if _, err = io.Copy(h, tempFile); err != nil {
|
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
|
@ -46,7 +46,7 @@ func init() {
|
|||||||
bucket: "wpanstore-lanzou",
|
bucket: "wpanstore-lanzou",
|
||||||
unproved: "unproved",
|
unproved: "unproved",
|
||||||
proved: "proved",
|
proved: "proved",
|
||||||
devVersion: "122",
|
devVersion: "125",
|
||||||
site: "https://www.ilanzou.com",
|
site: "https://www.ilanzou.com",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -72,7 +72,7 @@ func init() {
|
|||||||
bucket: "wpanstore",
|
bucket: "wpanstore",
|
||||||
unproved: "ws",
|
unproved: "ws",
|
||||||
proved: "app",
|
proved: "app",
|
||||||
devVersion: "121",
|
devVersion: "125",
|
||||||
site: "https://www.feijipan.com",
|
site: "https://www.feijipan.com",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -59,8 +59,9 @@ func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, pr
|
|||||||
"extra": "2",
|
"extra": "2",
|
||||||
})
|
})
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Origin": d.conf.site,
|
"Origin": d.conf.site,
|
||||||
"Referer": d.conf.site + "/",
|
"Referer": d.conf.site + "/",
|
||||||
|
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
|
||||||
})
|
})
|
||||||
if proved {
|
if proved {
|
||||||
req.SetQueryParam("appToken", d.Token)
|
req.SetQueryParam("appToken", d.Token)
|
||||||
|
8
drivers/lark.go
Normal file
8
drivers/lark.go
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
// +build linux darwin windows
|
||||||
|
// +build amd64 arm64
|
||||||
|
|
||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/lark"
|
||||||
|
)
|
396
drivers/lark/driver.go
Normal file
396
drivers/lark/driver.go
Normal file
@ -0,0 +1,396 @@
|
|||||||
|
package lark
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/ipfs/boxo/path"
|
||||||
|
lark "github.com/larksuite/oapi-sdk-go/v3"
|
||||||
|
larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
|
||||||
|
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
|
||||||
|
"golang.org/x/time/rate"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Lark struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
|
||||||
|
client *lark.Client
|
||||||
|
rootFolderToken string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) GetAddition() driver.Additional {
|
||||||
|
return &c.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Init(ctx context.Context) error {
|
||||||
|
c.client = lark.NewClient(c.AppId, c.AppSecret, lark.WithTokenCache(newTokenCache()))
|
||||||
|
|
||||||
|
paths := path.SplitList(c.RootFolderPath)
|
||||||
|
token := ""
|
||||||
|
|
||||||
|
var ok bool
|
||||||
|
var file *larkdrive.File
|
||||||
|
for _, p := range paths {
|
||||||
|
if p == "" {
|
||||||
|
token = ""
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
ok, file, err = resp.Next()
|
||||||
|
if !ok {
|
||||||
|
return errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if *file.Type == "folder" && *file.Name == p {
|
||||||
|
token = *file.Token
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.rootFolderToken = token
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
token, ok := c.getObjToken(ctx, dir.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
if token == emptyFolderToken {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ok = false
|
||||||
|
var file *larkdrive.File
|
||||||
|
var res []model.Obj
|
||||||
|
|
||||||
|
for {
|
||||||
|
ok, file, err = resp.Next()
|
||||||
|
if !ok {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
modifiedUnix, _ := strconv.ParseInt(*file.ModifiedTime, 10, 64)
|
||||||
|
createdUnix, _ := strconv.ParseInt(*file.CreatedTime, 10, 64)
|
||||||
|
|
||||||
|
f := model.Object{
|
||||||
|
ID: *file.Token,
|
||||||
|
Path: path.Join([]string{c.RootFolderPath, dir.GetPath(), *file.Name}),
|
||||||
|
Name: *file.Name,
|
||||||
|
Size: 0,
|
||||||
|
Modified: time.Unix(modifiedUnix, 0),
|
||||||
|
Ctime: time.Unix(createdUnix, 0),
|
||||||
|
IsFolder: *file.Type == "folder",
|
||||||
|
}
|
||||||
|
res = append(res, &f)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
token, ok := c.getObjToken(ctx, file.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.client.GetTenantAccessTokenBySelfBuiltApp(ctx, &larkcore.SelfBuiltTenantAccessTokenReq{
|
||||||
|
AppID: c.AppId,
|
||||||
|
AppSecret: c.AppSecret,
|
||||||
|
})
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !c.ExternalMode {
|
||||||
|
accessToken := resp.TenantAccessToken
|
||||||
|
|
||||||
|
url := fmt.Sprintf("https://open.feishu.cn/open-apis/drive/v1/files/%s/download", token)
|
||||||
|
|
||||||
|
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", accessToken))
|
||||||
|
req.Header.Set("Range", "bytes=0-1")
|
||||||
|
|
||||||
|
ar, err := http.DefaultClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if ar.StatusCode != http.StatusPartialContent {
|
||||||
|
return nil, errors.New("failed to get download link")
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{
|
||||||
|
URL: url,
|
||||||
|
Header: http.Header{
|
||||||
|
"Authorization": []string{fmt.Sprintf("Bearer %s", accessToken)},
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
} else {
|
||||||
|
url := path.Join([]string{c.TenantUrlPrefix, "file", token})
|
||||||
|
|
||||||
|
return &model.Link{
|
||||||
|
URL: url,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
token, ok := c.getObjToken(ctx, parentDir.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := larkdrive.NewCreateFolderFilePathReqBodyBuilder().FolderToken(token).Name(dirName).Build()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := c.client.Drive.File.CreateFolder(ctx,
|
||||||
|
larkdrive.NewCreateFolderFileReqBuilder().Body(body).Build())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return nil, errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Object{
|
||||||
|
ID: *resp.Data.Token,
|
||||||
|
Path: path.Join([]string{c.RootFolderPath, parentDir.GetPath(), dirName}),
|
||||||
|
Name: dirName,
|
||||||
|
Size: 0,
|
||||||
|
IsFolder: true,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
req := larkdrive.NewMoveFileReqBuilder().
|
||||||
|
Body(larkdrive.NewMoveFileReqBodyBuilder().
|
||||||
|
Type("file").
|
||||||
|
FolderToken(dstDirToken).
|
||||||
|
Build()).FileToken(srcToken).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
resp, err := c.client.Drive.File.Move(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return nil, errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
// TODO rename obj, optional
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
req := larkdrive.NewCopyFileReqBuilder().
|
||||||
|
Body(larkdrive.NewCopyFileReqBodyBuilder().
|
||||||
|
Name(srcObj.GetName()).
|
||||||
|
Type("file").
|
||||||
|
FolderToken(dstDirToken).
|
||||||
|
Build()).FileToken(srcToken).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
resp, err := c.client.Drive.File.Copy(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return nil, errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Lark) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
token, ok := c.getObjToken(ctx, obj.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
req := larkdrive.NewDeleteFileReqBuilder().
|
||||||
|
FileToken(token).
|
||||||
|
Type("file").
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
resp, err := c.client.Drive.File.Delete(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var uploadLimit = rate.NewLimiter(rate.Every(time.Second), 5)
|
||||||
|
|
||||||
|
func (c *Lark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
|
token, ok := c.getObjToken(ctx, dstDir.GetPath())
|
||||||
|
if !ok {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
// prepare
|
||||||
|
req := larkdrive.NewUploadPrepareFileReqBuilder().
|
||||||
|
FileUploadInfo(larkdrive.NewFileUploadInfoBuilder().
|
||||||
|
FileName(stream.GetName()).
|
||||||
|
ParentType(`explorer`).
|
||||||
|
ParentNode(token).
|
||||||
|
Size(int(stream.GetSize())).
|
||||||
|
Build()).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
uploadLimit.Wait(ctx)
|
||||||
|
resp, err := c.client.Drive.File.UploadPrepare(ctx, req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return nil, errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
uploadId := *resp.Data.UploadId
|
||||||
|
blockSize := *resp.Data.BlockSize
|
||||||
|
blockCount := *resp.Data.BlockNum
|
||||||
|
|
||||||
|
// upload
|
||||||
|
for i := 0; i < blockCount; i++ {
|
||||||
|
length := int64(blockSize)
|
||||||
|
if i == blockCount-1 {
|
||||||
|
length = stream.GetSize() - int64(i*blockSize)
|
||||||
|
}
|
||||||
|
|
||||||
|
reader := io.LimitReader(stream, length)
|
||||||
|
|
||||||
|
req := larkdrive.NewUploadPartFileReqBuilder().
|
||||||
|
Body(larkdrive.NewUploadPartFileReqBodyBuilder().
|
||||||
|
UploadId(uploadId).
|
||||||
|
Seq(i).
|
||||||
|
Size(int(length)).
|
||||||
|
File(reader).
|
||||||
|
Build()).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
uploadLimit.Wait(ctx)
|
||||||
|
resp, err := c.client.Drive.File.UploadPart(ctx, req)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !resp.Success() {
|
||||||
|
return nil, errors.New(resp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
up(float64(i) / float64(blockCount))
|
||||||
|
}
|
||||||
|
|
||||||
|
//close
|
||||||
|
closeReq := larkdrive.NewUploadFinishFileReqBuilder().
|
||||||
|
Body(larkdrive.NewUploadFinishFileReqBodyBuilder().
|
||||||
|
UploadId(uploadId).
|
||||||
|
BlockNum(blockCount).
|
||||||
|
Build()).
|
||||||
|
Build()
|
||||||
|
|
||||||
|
// 发起请求
|
||||||
|
closeResp, err := c.client.Drive.File.UploadFinish(ctx, closeReq)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !closeResp.Success() {
|
||||||
|
return nil, errors.New(closeResp.Error())
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Object{
|
||||||
|
ID: *closeResp.Data.FileToken,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (d *Lark) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
// return nil, errs.NotSupport
|
||||||
|
//}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*Lark)(nil)
|
36
drivers/lark/meta.go
Normal file
36
drivers/lark/meta.go
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
package lark
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
// Usually one of two
|
||||||
|
driver.RootPath
|
||||||
|
// define other
|
||||||
|
AppId string `json:"app_id" type:"text" help:"app id"`
|
||||||
|
AppSecret string `json:"app_secret" type:"text" help:"app secret"`
|
||||||
|
ExternalMode bool `json:"external_mode" type:"bool" help:"external mode"`
|
||||||
|
TenantUrlPrefix string `json:"tenant_url_prefix" type:"text" help:"tenant url prefix"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "Lark",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: false,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "/",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &Lark{}
|
||||||
|
})
|
||||||
|
}
|
32
drivers/lark/types.go
Normal file
32
drivers/lark/types.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package lark
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/Xhofe/go-cache"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TokenCache struct {
|
||||||
|
cache.ICache[string]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TokenCache) Set(_ context.Context, key string, value string, expireTime time.Duration) error {
|
||||||
|
t.ICache.Set(key, value, cache.WithEx[string](expireTime))
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TokenCache) Get(_ context.Context, key string) (string, error) {
|
||||||
|
v, ok := t.ICache.Get(key)
|
||||||
|
if ok {
|
||||||
|
return v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func newTokenCache() *TokenCache {
|
||||||
|
c := cache.NewMemCache[string]()
|
||||||
|
|
||||||
|
return &TokenCache{c}
|
||||||
|
}
|
66
drivers/lark/util.go
Normal file
66
drivers/lark/util.go
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
package lark
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"github.com/Xhofe/go-cache"
|
||||||
|
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
const objTokenCacheDuration = 5 * time.Minute
|
||||||
|
const emptyFolderToken = "empty"
|
||||||
|
|
||||||
|
var objTokenCache = cache.NewMemCache[string]()
|
||||||
|
var exOpts = cache.WithEx[string](objTokenCacheDuration)
|
||||||
|
|
||||||
|
func (c *Lark) getObjToken(ctx context.Context, folderPath string) (string, bool) {
|
||||||
|
if token, ok := objTokenCache.Get(folderPath); ok {
|
||||||
|
return token, true
|
||||||
|
}
|
||||||
|
|
||||||
|
dir, name := path.Split(folderPath)
|
||||||
|
// strip the last slash of dir if it exists
|
||||||
|
if len(dir) > 0 && dir[len(dir)-1] == '/' {
|
||||||
|
dir = dir[:len(dir)-1]
|
||||||
|
}
|
||||||
|
if name == "" {
|
||||||
|
return c.rootFolderToken, true
|
||||||
|
}
|
||||||
|
|
||||||
|
var parentToken string
|
||||||
|
var found bool
|
||||||
|
parentToken, found = c.getObjToken(ctx, dir)
|
||||||
|
if !found {
|
||||||
|
return emptyFolderToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
req := larkdrive.NewListFileReqBuilder().FolderToken(parentToken).Build()
|
||||||
|
resp, err := c.client.Drive.File.ListByIterator(ctx, req)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to list files")
|
||||||
|
return emptyFolderToken, false
|
||||||
|
}
|
||||||
|
|
||||||
|
var file *larkdrive.File
|
||||||
|
for {
|
||||||
|
found, file, err = resp.Next()
|
||||||
|
if !found {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
log.WithError(err).Error("failed to get next file")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if *file.Name == name {
|
||||||
|
objTokenCache.Set(folderPath, *file.Token, exOpts)
|
||||||
|
return *file.Token, true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return emptyFolderToken, false
|
||||||
|
}
|
@ -206,7 +206,7 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
h := md5.New()
|
h := md5.New()
|
||||||
_, err = io.Copy(h, tempFile)
|
_, err = utils.CopyWithBuffer(h, tempFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/pquerna/otp/totp"
|
||||||
"github.com/rclone/rclone/lib/readers"
|
"github.com/rclone/rclone/lib/readers"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -33,8 +34,16 @@ func (d *Mega) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Mega) Init(ctx context.Context) error {
|
func (d *Mega) Init(ctx context.Context) error {
|
||||||
|
var twoFACode = d.TwoFACode
|
||||||
d.c = mega.New()
|
d.c = mega.New()
|
||||||
return d.c.Login(d.Email, d.Password)
|
if d.TwoFASecret != "" {
|
||||||
|
code, err := totp.GenerateCode(d.TwoFASecret, time.Now())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("generate totp code failed: %w", err)
|
||||||
|
}
|
||||||
|
twoFACode = code
|
||||||
|
}
|
||||||
|
return d.c.MultiFactorLogin(d.Email, d.Password, twoFACode)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Mega) Drop(ctx context.Context) error {
|
func (d *Mega) Drop(ctx context.Context) error {
|
||||||
|
@ -9,8 +9,10 @@ type Addition struct {
|
|||||||
// Usually one of two
|
// Usually one of two
|
||||||
//driver.RootPath
|
//driver.RootPath
|
||||||
//driver.RootID
|
//driver.RootID
|
||||||
Email string `json:"email" required:"true"`
|
Email string `json:"email" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
|
TwoFACode string `json:"two_fa_code" required:"false" help:"2FA 6-digit code, filling in the 2FA code alone will not support reloading driver"`
|
||||||
|
TwoFASecret string `json:"two_fa_secret" required:"false" help:"2FA secret"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
135
drivers/netease_music/crypto.go
Normal file
135
drivers/netease_music/crypto.go
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto/aes"
|
||||||
|
"crypto/cipher"
|
||||||
|
"crypto/md5"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"encoding/pem"
|
||||||
|
"math/big"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
linuxapiKey = []byte("rFgB&h#%2?^eDg:Q")
|
||||||
|
eapiKey = []byte("e82ckenh8dichen8")
|
||||||
|
iv = []byte("0102030405060708")
|
||||||
|
presetKey = []byte("0CoJUm6Qyw8W8jud")
|
||||||
|
publicKey = []byte("-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDgtQn2JZ34ZC28NWYpAUd98iZ37BUrX/aKzmFbt7clFSs6sXqHauqKWqdtLkF2KexO40H1YTX8z2lSgBBOAxLsvaklV8k4cBFK9snQXE9/DDaFt6Rr7iVZMldczhC0JNgTz+SHXT6CBHuX3e9SdB1Ua44oncaTWz7OBGLbCiK45wIDAQAB\n-----END PUBLIC KEY-----")
|
||||||
|
stdChars = []byte("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
|
||||||
|
)
|
||||||
|
|
||||||
|
func aesKeyPending(key []byte) []byte {
|
||||||
|
k := len(key)
|
||||||
|
count := 0
|
||||||
|
switch true {
|
||||||
|
case k <= 16:
|
||||||
|
count = 16 - k
|
||||||
|
case k <= 24:
|
||||||
|
count = 24 - k
|
||||||
|
case k <= 32:
|
||||||
|
count = 32 - k
|
||||||
|
default:
|
||||||
|
return key[:32]
|
||||||
|
}
|
||||||
|
if count == 0 {
|
||||||
|
return key
|
||||||
|
}
|
||||||
|
|
||||||
|
return append(key, bytes.Repeat([]byte{0}, count)...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func pkcs7Padding(src []byte, blockSize int) []byte {
|
||||||
|
padding := blockSize - len(src)%blockSize
|
||||||
|
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
|
||||||
|
return append(src, padtext...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func aesCBCEncrypt(src, key, iv []byte) []byte {
|
||||||
|
block, _ := aes.NewCipher(aesKeyPending(key))
|
||||||
|
src = pkcs7Padding(src, block.BlockSize())
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
|
||||||
|
mode := cipher.NewCBCEncrypter(block, iv)
|
||||||
|
mode.CryptBlocks(dst, src)
|
||||||
|
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
|
func aesECBEncrypt(src, key []byte) []byte {
|
||||||
|
block, _ := aes.NewCipher(aesKeyPending(key))
|
||||||
|
|
||||||
|
src = pkcs7Padding(src, block.BlockSize())
|
||||||
|
dst := make([]byte, len(src))
|
||||||
|
|
||||||
|
ecbCryptBlocks(block, dst, src)
|
||||||
|
|
||||||
|
return dst
|
||||||
|
}
|
||||||
|
|
||||||
|
func ecbCryptBlocks(block cipher.Block, dst, src []byte) {
|
||||||
|
bs := block.BlockSize()
|
||||||
|
|
||||||
|
for len(src) > 0 {
|
||||||
|
block.Encrypt(dst, src[:bs])
|
||||||
|
src = src[bs:]
|
||||||
|
dst = dst[bs:]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func rsaEncrypt(buffer, key []byte) []byte {
|
||||||
|
buffers := make([]byte, 128-16, 128)
|
||||||
|
buffers = append(buffers, buffer...)
|
||||||
|
block, _ := pem.Decode(key)
|
||||||
|
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
|
||||||
|
pub := pubInterface.(*rsa.PublicKey)
|
||||||
|
c := new(big.Int).SetBytes([]byte(buffers))
|
||||||
|
return c.Exp(c, big.NewInt(int64(pub.E)), pub.N).Bytes()
|
||||||
|
}
|
||||||
|
|
||||||
|
func getSecretKey() ([]byte, []byte) {
|
||||||
|
key := make([]byte, 16)
|
||||||
|
reversed := make([]byte, 16)
|
||||||
|
for i := 0; i < 16; i++ {
|
||||||
|
result := stdChars[random.RangeInt64(0, 62)]
|
||||||
|
key[i] = result
|
||||||
|
reversed[15-i] = result
|
||||||
|
}
|
||||||
|
return key, reversed
|
||||||
|
}
|
||||||
|
|
||||||
|
func weapi(data map[string]string) map[string]string {
|
||||||
|
text, _ := utils.Json.Marshal(data)
|
||||||
|
secretKey, reversedKey := getSecretKey()
|
||||||
|
params := []byte(base64.StdEncoding.EncodeToString(aesCBCEncrypt(text, presetKey, iv)))
|
||||||
|
return map[string]string{
|
||||||
|
"params": base64.StdEncoding.EncodeToString(aesCBCEncrypt(params, reversedKey, iv)),
|
||||||
|
"encSecKey": hex.EncodeToString(rsaEncrypt(secretKey, publicKey)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func eapi(url string, data map[string]interface{}) map[string]string {
|
||||||
|
text, _ := utils.Json.Marshal(data)
|
||||||
|
msg := "nobody" + url + "use" + string(text) + "md5forencrypt"
|
||||||
|
h := md5.New()
|
||||||
|
h.Write([]byte(msg))
|
||||||
|
digest := hex.EncodeToString(h.Sum(nil))
|
||||||
|
params := []byte(url + "-36cd479b6b5-" + string(text) + "-36cd479b6b5-" + digest)
|
||||||
|
return map[string]string{
|
||||||
|
"params": hex.EncodeToString(aesECBEncrypt(params, eapiKey)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func linuxapi(data map[string]interface{}) map[string]string {
|
||||||
|
text, _ := utils.Json.Marshal(data)
|
||||||
|
return map[string]string{
|
||||||
|
"eparams": strings.ToUpper(hex.EncodeToString(aesECBEncrypt(text, linuxapiKey))),
|
||||||
|
}
|
||||||
|
}
|
110
drivers/netease_music/driver.go
Normal file
110
drivers/netease_music/driver.go
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
_ "golang.org/x/image/webp"
|
||||||
|
)
|
||||||
|
|
||||||
|
type NeteaseMusic struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
|
||||||
|
csrfToken string
|
||||||
|
musicU string
|
||||||
|
fileMapByName map[string]model.Obj
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Init(ctx context.Context) error {
|
||||||
|
d.csrfToken = d.Addition.getCookie("__csrf")
|
||||||
|
d.musicU = d.Addition.getCookie("MUSIC_U")
|
||||||
|
|
||||||
|
if d.csrfToken == "" || d.musicU == "" {
|
||||||
|
return errs.EmptyToken
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||||
|
if path == "/" {
|
||||||
|
return &model.Object{
|
||||||
|
IsFolder: true,
|
||||||
|
Path: path,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
fragments := strings.Split(path, "/")
|
||||||
|
if len(fragments) > 1 {
|
||||||
|
fileName := fragments[1]
|
||||||
|
if strings.HasSuffix(fileName, ".lrc") {
|
||||||
|
lrc := d.fileMapByName[fileName]
|
||||||
|
return d.getLyricObj(lrc)
|
||||||
|
}
|
||||||
|
if song, ok := d.fileMapByName[fileName]; ok {
|
||||||
|
return song, nil
|
||||||
|
} else {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
return d.getSongObjs(args)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if lrc, ok := file.(*LyricObj); ok {
|
||||||
|
if args.Type == "parsed" {
|
||||||
|
return lrc.getLyricLink(), nil
|
||||||
|
} else {
|
||||||
|
return lrc.getProxyLink(args), nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return d.getSongLink(file)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
return d.removeSongObj(obj)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
return d.putSongStream(stream)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
return errs.NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*NeteaseMusic)(nil)
|
32
drivers/netease_music/meta.go
Normal file
32
drivers/netease_music/meta.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
Cookie string `json:"cookie" type:"text" required:"true" help:""`
|
||||||
|
SongLimit uint64 `json:"song_limit" default:"200" type:"number" help:"only get 200 songs by default"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ad *Addition) getCookie(name string) string {
|
||||||
|
re := regexp.MustCompile(name + "=([^(;|$)]+)")
|
||||||
|
matches := re.FindStringSubmatch(ad.Cookie)
|
||||||
|
if len(matches) < 2 {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return matches[1]
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "NeteaseMusic",
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &NeteaseMusic{}
|
||||||
|
})
|
||||||
|
}
|
116
drivers/netease_music/types.go
Normal file
116
drivers/netease_music/types.go
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/sign"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils/random"
|
||||||
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
|
)
|
||||||
|
|
||||||
|
type HostsResp struct {
|
||||||
|
Upload []string `json:"upload"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type SongResp struct {
|
||||||
|
Data []struct {
|
||||||
|
Url string `json:"url"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListResp struct {
|
||||||
|
Size string `json:"size"`
|
||||||
|
MaxSize string `json:"maxSize"`
|
||||||
|
Data []struct {
|
||||||
|
AddTime int64 `json:"addTime"`
|
||||||
|
FileName string `json:"fileName"`
|
||||||
|
FileSize int64 `json:"fileSize"`
|
||||||
|
SongId int64 `json:"songId"`
|
||||||
|
SimpleSong struct {
|
||||||
|
Al struct {
|
||||||
|
PicUrl string `json:"picUrl"`
|
||||||
|
} `json:"al"`
|
||||||
|
} `json:"simpleSong"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LyricObj struct {
|
||||||
|
model.Object
|
||||||
|
lyric string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lrc *LyricObj) getProxyLink(args model.LinkArgs) *model.Link {
|
||||||
|
rawURL := common.GetApiUrl(args.HttpReq) + "/p" + lrc.Path
|
||||||
|
rawURL = utils.EncodePath(rawURL, true) + "?type=parsed&sign=" + sign.Sign(lrc.Path)
|
||||||
|
return &model.Link{URL: rawURL}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (lrc *LyricObj) getLyricLink() *model.Link {
|
||||||
|
reader := strings.NewReader(lrc.lyric)
|
||||||
|
return &model.Link{
|
||||||
|
RangeReadCloser: &model.RangeReadCloser{
|
||||||
|
RangeReader: func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
||||||
|
if httpRange.Length < 0 {
|
||||||
|
return io.NopCloser(reader), nil
|
||||||
|
}
|
||||||
|
sr := io.NewSectionReader(reader, httpRange.Start, httpRange.Length)
|
||||||
|
return io.NopCloser(sr), nil
|
||||||
|
},
|
||||||
|
Closers: utils.EmptyClosers(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReqOption struct {
|
||||||
|
crypto string
|
||||||
|
stream model.FileStreamer
|
||||||
|
data map[string]string
|
||||||
|
headers map[string]string
|
||||||
|
cookies []*http.Cookie
|
||||||
|
url string
|
||||||
|
}
|
||||||
|
|
||||||
|
type Characteristic map[string]string
|
||||||
|
|
||||||
|
func (ch *Characteristic) fromDriver(d *NeteaseMusic) *Characteristic {
|
||||||
|
*ch = map[string]string{
|
||||||
|
"osver": "",
|
||||||
|
"deviceId": "",
|
||||||
|
"mobilename": "",
|
||||||
|
"appver": "6.1.1",
|
||||||
|
"versioncode": "140",
|
||||||
|
"buildver": strconv.FormatInt(time.Now().Unix(), 10),
|
||||||
|
"resolution": "1920x1080",
|
||||||
|
"os": "android",
|
||||||
|
"channel": "",
|
||||||
|
"requestId": strconv.FormatInt(time.Now().Unix()*1000, 10) + strconv.Itoa(int(random.RangeInt64(0, 1000))),
|
||||||
|
"MUSIC_U": d.musicU,
|
||||||
|
}
|
||||||
|
return ch
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ch Characteristic) toCookies() []*http.Cookie {
|
||||||
|
cookies := make([]*http.Cookie, 0)
|
||||||
|
for k, v := range ch {
|
||||||
|
cookies = append(cookies, &http.Cookie{Name: k, Value: v})
|
||||||
|
}
|
||||||
|
return cookies
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ch *Characteristic) merge(data map[string]string) map[string]interface{} {
|
||||||
|
body := map[string]interface{}{
|
||||||
|
"header": ch,
|
||||||
|
}
|
||||||
|
for k, v := range data {
|
||||||
|
body[k] = v
|
||||||
|
}
|
||||||
|
return body
|
||||||
|
}
|
208
drivers/netease_music/upload.go
Normal file
208
drivers/netease_music/upload.go
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/hex"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/dhowden/tag"
|
||||||
|
)
|
||||||
|
|
||||||
|
type token struct {
|
||||||
|
resourceId string
|
||||||
|
objectKey string
|
||||||
|
token string
|
||||||
|
}
|
||||||
|
|
||||||
|
type songmeta struct {
|
||||||
|
needUpload bool
|
||||||
|
songId string
|
||||||
|
name string
|
||||||
|
artist string
|
||||||
|
album string
|
||||||
|
}
|
||||||
|
|
||||||
|
type uploader struct {
|
||||||
|
driver *NeteaseMusic
|
||||||
|
file model.File
|
||||||
|
meta songmeta
|
||||||
|
md5 string
|
||||||
|
ext string
|
||||||
|
size string
|
||||||
|
filename string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploader) init(stream model.FileStreamer) error {
|
||||||
|
u.filename = stream.GetName()
|
||||||
|
u.size = strconv.FormatInt(stream.GetSize(), 10)
|
||||||
|
|
||||||
|
u.ext = "mp3"
|
||||||
|
if strings.HasSuffix(stream.GetMimetype(), "flac") {
|
||||||
|
u.ext = "flac"
|
||||||
|
}
|
||||||
|
|
||||||
|
h := md5.New()
|
||||||
|
io.Copy(h, stream)
|
||||||
|
u.md5 = hex.EncodeToString(h.Sum(nil))
|
||||||
|
_, err := u.file.Seek(0, io.SeekStart)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if m, err := tag.ReadFrom(u.file); err != nil {
|
||||||
|
u.meta = songmeta{}
|
||||||
|
} else {
|
||||||
|
u.meta = songmeta{
|
||||||
|
name: m.Title(),
|
||||||
|
artist: m.Artist(),
|
||||||
|
album: m.Album(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if u.meta.name == "" {
|
||||||
|
u.meta.name = u.filename
|
||||||
|
}
|
||||||
|
if u.meta.album == "" {
|
||||||
|
u.meta.album = "未知专辑"
|
||||||
|
}
|
||||||
|
if u.meta.artist == "" {
|
||||||
|
u.meta.artist = "未知艺术家"
|
||||||
|
}
|
||||||
|
_, err = u.file.Seek(0, io.SeekStart)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploader) checkIfExisted() error {
|
||||||
|
body, err := u.driver.request("https://interface.music.163.com/api/cloud/upload/check", http.MethodPost,
|
||||||
|
ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"ext": "",
|
||||||
|
"songId": "0",
|
||||||
|
"version": "1",
|
||||||
|
"bitrate": "999000",
|
||||||
|
"length": u.size,
|
||||||
|
"md5": u.md5,
|
||||||
|
},
|
||||||
|
cookies: []*http.Cookie{
|
||||||
|
{Name: "os", Value: "pc"},
|
||||||
|
{Name: "appver", Value: "2.9.7"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
u.meta.songId = utils.Json.Get(body, "songId").ToString()
|
||||||
|
u.meta.needUpload = utils.Json.Get(body, "needUpload").ToBool()
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploader) allocToken(bucket ...string) (token, error) {
|
||||||
|
if len(bucket) == 0 {
|
||||||
|
bucket = []string{""}
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := u.driver.request("https://music.163.com/weapi/nos/token/alloc", http.MethodPost, ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"bucket": bucket[0],
|
||||||
|
"local": "false",
|
||||||
|
"type": "audio",
|
||||||
|
"nos_product": "3",
|
||||||
|
"filename": u.filename,
|
||||||
|
"md5": u.md5,
|
||||||
|
"ext": u.ext,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return token{}, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return token{
|
||||||
|
resourceId: utils.Json.Get(body, "result", "resourceId").ToString(),
|
||||||
|
objectKey: utils.Json.Get(body, "result", "objectKey").ToString(),
|
||||||
|
token: utils.Json.Get(body, "result", "token").ToString(),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploader) publishInfo(resourceId string) error {
|
||||||
|
body, err := u.driver.request("https://music.163.com/api/upload/cloud/info/v2", http.MethodPost, ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"md5": u.md5,
|
||||||
|
"filename": u.filename,
|
||||||
|
"song": u.meta.name,
|
||||||
|
"album": u.meta.album,
|
||||||
|
"artist": u.meta.artist,
|
||||||
|
"songid": u.meta.songId,
|
||||||
|
"resourceId": resourceId,
|
||||||
|
"bitrate": "999000",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err = u.driver.request("https://interface.music.163.com/api/cloud/pub/v2", http.MethodPost, ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"songid": utils.Json.Get(body, "songId").ToString(),
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (u *uploader) upload(stream model.FileStreamer) error {
|
||||||
|
bucket := "jd-musicrep-privatecloud-audio-public"
|
||||||
|
token, err := u.allocToken(bucket)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := u.driver.request("https://wanproxy.127.net/lbs?version=1.0&bucketname="+bucket, http.MethodGet,
|
||||||
|
ReqOption{},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var resp HostsResp
|
||||||
|
err = utils.Json.Unmarshal(body, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
objectKey := strings.ReplaceAll(token.objectKey, "/", "%2F")
|
||||||
|
_, err = u.driver.request(
|
||||||
|
resp.Upload[0]+"/"+bucket+"/"+objectKey+"?offset=0&complete=true&version=1.0",
|
||||||
|
http.MethodPost,
|
||||||
|
ReqOption{
|
||||||
|
stream: stream,
|
||||||
|
headers: map[string]string{
|
||||||
|
"x-nos-token": token.token,
|
||||||
|
"Content-Type": "audio/mpeg",
|
||||||
|
"Content-Length": u.size,
|
||||||
|
"Content-MD5": u.md5,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
246
drivers/netease_music/util.go
Normal file
246
drivers/netease_music/util.go
Normal file
@ -0,0 +1,246 @@
|
|||||||
|
package netease_music
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) request(url, method string, opt ReqOption) ([]byte, error) {
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
|
||||||
|
req.SetHeader("Cookie", d.Addition.Cookie)
|
||||||
|
|
||||||
|
if strings.Contains(url, "music.163.com") {
|
||||||
|
req.SetHeader("Referer", "https://music.163.com")
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.cookies != nil {
|
||||||
|
for _, cookie := range opt.cookies {
|
||||||
|
req.SetCookie(cookie)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if opt.headers != nil {
|
||||||
|
for header, value := range opt.headers {
|
||||||
|
req.SetHeader(header, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
data := opt.data
|
||||||
|
if opt.crypto == "weapi" {
|
||||||
|
data = weapi(data)
|
||||||
|
re, _ := regexp.Compile(`/\w*api/`)
|
||||||
|
url = re.ReplaceAllString(url, "/weapi/")
|
||||||
|
} else if opt.crypto == "eapi" {
|
||||||
|
ch := new(Characteristic).fromDriver(d)
|
||||||
|
req.SetCookies(ch.toCookies())
|
||||||
|
data = eapi(opt.url, ch.merge(data))
|
||||||
|
re, _ := regexp.Compile(`/\w*api/`)
|
||||||
|
url = re.ReplaceAllString(url, "/eapi/")
|
||||||
|
} else if opt.crypto == "linuxapi" {
|
||||||
|
re, _ := regexp.Compile(`/\w*api/`)
|
||||||
|
data = linuxapi(map[string]interface{}{
|
||||||
|
"url": re.ReplaceAllString(url, "/api/"),
|
||||||
|
"method": method,
|
||||||
|
"params": data,
|
||||||
|
})
|
||||||
|
req.Header.Set("User-Agent", "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.90 Safari/537.36")
|
||||||
|
url = "https://music.163.com/api/linux/forward"
|
||||||
|
}
|
||||||
|
|
||||||
|
if method == http.MethodPost {
|
||||||
|
if opt.stream != nil {
|
||||||
|
req.SetContentLength(true)
|
||||||
|
req.SetBody(io.ReadCloser(opt.stream))
|
||||||
|
} else {
|
||||||
|
req.SetFormData(data)
|
||||||
|
}
|
||||||
|
res, err := req.Post(url)
|
||||||
|
return res.Body(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
if method == http.MethodGet {
|
||||||
|
res, err := req.Get(url)
|
||||||
|
return res.Body(), err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, errs.NotImplement
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) getSongObjs(args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
body, err := d.request("https://music.163.com/weapi/v1/cloud/get", http.MethodPost, ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"limit": strconv.FormatUint(d.Addition.SongLimit, 10),
|
||||||
|
"offset": "0",
|
||||||
|
},
|
||||||
|
cookies: []*http.Cookie{
|
||||||
|
{Name: "os", Value: "pc"},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp ListResp
|
||||||
|
err = utils.Json.Unmarshal(body, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
d.fileMapByName = make(map[string]model.Obj)
|
||||||
|
files := make([]model.Obj, 0, len(resp.Data))
|
||||||
|
for _, f := range resp.Data {
|
||||||
|
song := &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
IsFolder: false,
|
||||||
|
Size: f.FileSize,
|
||||||
|
Name: f.FileName,
|
||||||
|
Modified: time.UnixMilli(f.AddTime),
|
||||||
|
ID: strconv.FormatInt(f.SongId, 10),
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{Thumbnail: f.SimpleSong.Al.PicUrl},
|
||||||
|
}
|
||||||
|
d.fileMapByName[song.Name] = song
|
||||||
|
files = append(files, song)
|
||||||
|
|
||||||
|
// map song id for lyric
|
||||||
|
lrcName := strings.Split(f.FileName, ".")[0] + ".lrc"
|
||||||
|
lrc := &model.Object{
|
||||||
|
IsFolder: false,
|
||||||
|
Name: lrcName,
|
||||||
|
Path: path.Join(args.ReqPath, lrcName),
|
||||||
|
ID: strconv.FormatInt(f.SongId, 10),
|
||||||
|
}
|
||||||
|
d.fileMapByName[lrc.Name] = lrc
|
||||||
|
}
|
||||||
|
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) getSongLink(file model.Obj) (*model.Link, error) {
|
||||||
|
body, err := d.request(
|
||||||
|
"https://music.163.com/api/song/enhance/player/url", http.MethodPost, ReqOption{
|
||||||
|
crypto: "linuxapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"ids": "[" + file.GetID() + "]",
|
||||||
|
"br": "999000",
|
||||||
|
},
|
||||||
|
cookies: []*http.Cookie{
|
||||||
|
{Name: "os", Value: "pc"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp SongResp
|
||||||
|
err = utils.Json.Unmarshal(body, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(resp.Data) < 1 {
|
||||||
|
return nil, errs.ObjectNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{URL: resp.Data[0].Url}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) getLyricObj(file model.Obj) (model.Obj, error) {
|
||||||
|
if lrc, ok := file.(*LyricObj); ok {
|
||||||
|
return lrc, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := d.request(
|
||||||
|
"https://music.163.com/api/song/lyric?_nmclfl=1", http.MethodPost, ReqOption{
|
||||||
|
data: map[string]string{
|
||||||
|
"id": file.GetID(),
|
||||||
|
"tv": "-1",
|
||||||
|
"lv": "-1",
|
||||||
|
"rv": "-1",
|
||||||
|
"kv": "-1",
|
||||||
|
},
|
||||||
|
cookies: []*http.Cookie{
|
||||||
|
{Name: "os", Value: "ios"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
lyric := utils.Json.Get(body, "lrc", "lyric").ToString()
|
||||||
|
|
||||||
|
return &LyricObj{
|
||||||
|
lyric: lyric,
|
||||||
|
Object: model.Object{
|
||||||
|
IsFolder: false,
|
||||||
|
ID: file.GetID(),
|
||||||
|
Name: file.GetName(),
|
||||||
|
Path: file.GetPath(),
|
||||||
|
Size: int64(len(lyric)),
|
||||||
|
},
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) removeSongObj(file model.Obj) error {
|
||||||
|
_, err := d.request("http://music.163.com/weapi/cloud/del", http.MethodPost, ReqOption{
|
||||||
|
crypto: "weapi",
|
||||||
|
data: map[string]string{
|
||||||
|
"songIds": "[" + file.GetID() + "]",
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *NeteaseMusic) putSongStream(stream model.FileStreamer) error {
|
||||||
|
tmp, err := stream.CacheFullInTempFile()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer tmp.Close()
|
||||||
|
|
||||||
|
u := uploader{driver: d, file: tmp}
|
||||||
|
|
||||||
|
err = u.init(stream)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = u.checkIfExisted()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
token, err := u.allocToken()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if u.meta.needUpload {
|
||||||
|
err = u.upload(stream)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err = u.publishInfo(token.resourceId)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
@ -118,6 +118,7 @@ func (d *Onedrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName str
|
|||||||
"folder": base.Json{},
|
"folder": base.Json{},
|
||||||
"@microsoft.graph.conflictBehavior": "rename",
|
"@microsoft.graph.conflictBehavior": "rename",
|
||||||
}
|
}
|
||||||
|
// todo 修复文件夹 ctime/mtime, onedrive 可在 data 里设置 fileSystemInfo 字段, 但是此接口未提供 ctime/mtime
|
||||||
_, err := d.Request(url, http.MethodPost, func(req *resty.Request) {
|
_, err := d.Request(url, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(data)
|
||||||
}, nil)
|
}, nil)
|
||||||
|
@ -24,12 +24,12 @@ type RespErr struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
Id string `json:"id"`
|
Id string `json:"id"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Size int64 `json:"size"`
|
Size int64 `json:"size"`
|
||||||
LastModifiedDateTime time.Time `json:"lastModifiedDateTime"`
|
FileSystemInfo *FileSystemInfoFacet `json:"fileSystemInfo"`
|
||||||
Url string `json:"@microsoft.graph.downloadUrl"`
|
Url string `json:"@microsoft.graph.downloadUrl"`
|
||||||
File *struct {
|
File *struct {
|
||||||
MimeType string `json:"mimeType"`
|
MimeType string `json:"mimeType"`
|
||||||
} `json:"file"`
|
} `json:"file"`
|
||||||
Thumbnails []struct {
|
Thumbnails []struct {
|
||||||
@ -58,7 +58,7 @@ func fileToObj(f File, parentID string) *Object {
|
|||||||
ID: f.Id,
|
ID: f.Id,
|
||||||
Name: f.Name,
|
Name: f.Name,
|
||||||
Size: f.Size,
|
Size: f.Size,
|
||||||
Modified: f.LastModifiedDateTime,
|
Modified: f.FileSystemInfo.LastModifiedDateTime,
|
||||||
IsFolder: f.File == nil,
|
IsFolder: f.File == nil,
|
||||||
},
|
},
|
||||||
Thumbnail: model.Thumbnail{Thumbnail: thumb},
|
Thumbnail: model.Thumbnail{Thumbnail: thumb},
|
||||||
@ -72,3 +72,20 @@ type Files struct {
|
|||||||
Value []File `json:"value"`
|
Value []File `json:"value"`
|
||||||
NextLink string `json:"@odata.nextLink"`
|
NextLink string `json:"@odata.nextLink"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Metadata represents a request to update Metadata.
|
||||||
|
// It includes only the writeable properties.
|
||||||
|
// omitempty is intentionally included for all, per https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_update?view=odsp-graph-online#request-body
|
||||||
|
type Metadata struct {
|
||||||
|
Description string `json:"description,omitempty"` // Provides a user-visible description of the item. Read-write. Only on OneDrive Personal. Undocumented limit of 1024 characters.
|
||||||
|
FileSystemInfo *FileSystemInfoFacet `json:"fileSystemInfo,omitempty"` // File system information on client. Read-write.
|
||||||
|
}
|
||||||
|
|
||||||
|
// FileSystemInfoFacet contains properties that are reported by the
|
||||||
|
// device's local file system for the local version of an item. This
|
||||||
|
// facet can be used to specify the last modified date or created date
|
||||||
|
// of the item as it was on the local device.
|
||||||
|
type FileSystemInfoFacet struct {
|
||||||
|
CreatedDateTime time.Time `json:"createdDateTime,omitempty"` // The UTC date and time the file was created on a client.
|
||||||
|
LastModifiedDateTime time.Time `json:"lastModifiedDateTime,omitempty"` // The UTC date and time the file was last modified on a client.
|
||||||
|
}
|
||||||
|
@ -127,7 +127,7 @@ func (d *Onedrive) Request(url string, method string, callback base.ReqCallback,
|
|||||||
|
|
||||||
func (d *Onedrive) getFiles(path string) ([]File, error) {
|
func (d *Onedrive) getFiles(path string) ([]File, error) {
|
||||||
var res []File
|
var res []File
|
||||||
nextLink := d.GetMetaUrl(false, path) + "/children?$top=5000&$expand=thumbnails($select=medium)&$select=id,name,size,lastModifiedDateTime,content.downloadUrl,file,parentReference"
|
nextLink := d.GetMetaUrl(false, path) + "/children?$top=5000&$expand=thumbnails($select=medium)&$select=id,name,size,fileSystemInfo,content.downloadUrl,file,parentReference"
|
||||||
for nextLink != "" {
|
for nextLink != "" {
|
||||||
var files Files
|
var files Files
|
||||||
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
||||||
@ -148,7 +148,10 @@ func (d *Onedrive) GetFile(path string) (*File, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Onedrive) upSmall(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) error {
|
func (d *Onedrive) upSmall(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) error {
|
||||||
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/content"
|
filepath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||||
|
// 1. upload new file
|
||||||
|
// ApiDoc: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_put_content?view=odsp-graph-online
|
||||||
|
url := d.GetMetaUrl(false, filepath) + "/content"
|
||||||
data, err := io.ReadAll(stream)
|
data, err := io.ReadAll(stream)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -156,12 +159,50 @@ func (d *Onedrive) upSmall(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
|
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("onedrive: Failed to upload new file(path=%v): %w", filepath, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2. update metadata
|
||||||
|
err = d.updateMetadata(ctx, stream, filepath)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("onedrive: Failed to update file(path=%v) metadata: %w", filepath, err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Onedrive) updateMetadata(ctx context.Context, stream model.FileStreamer, filepath string) error {
|
||||||
|
url := d.GetMetaUrl(false, filepath)
|
||||||
|
metadata := toAPIMetadata(stream)
|
||||||
|
// ApiDoc: https://learn.microsoft.com/en-us/onedrive/developer/rest-api/api/driveitem_update?view=odsp-graph-online
|
||||||
|
_, err := d.Request(url, http.MethodPatch, func(req *resty.Request) {
|
||||||
|
req.SetBody(metadata).SetContext(ctx)
|
||||||
|
}, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func toAPIMetadata(stream model.FileStreamer) Metadata {
|
||||||
|
metadata := Metadata{
|
||||||
|
FileSystemInfo: &FileSystemInfoFacet{},
|
||||||
|
}
|
||||||
|
if !stream.ModTime().IsZero() {
|
||||||
|
metadata.FileSystemInfo.LastModifiedDateTime = stream.ModTime()
|
||||||
|
}
|
||||||
|
if !stream.CreateTime().IsZero() {
|
||||||
|
metadata.FileSystemInfo.CreatedDateTime = stream.CreateTime()
|
||||||
|
}
|
||||||
|
if stream.CreateTime().IsZero() && !stream.ModTime().IsZero() {
|
||||||
|
metadata.FileSystemInfo.CreatedDateTime = stream.CreateTime()
|
||||||
|
}
|
||||||
|
return metadata
|
||||||
|
}
|
||||||
|
|
||||||
func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/createUploadSession"
|
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/createUploadSession"
|
||||||
res, err := d.Request(url, http.MethodPost, nil, nil)
|
metadata := map[string]interface{}{"item": toAPIMetadata(stream)}
|
||||||
|
res, err := d.Request(url, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(metadata).SetContext(ctx)
|
||||||
|
}, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -17,13 +17,14 @@ import (
|
|||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PikPak struct {
|
type PikPak struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
RefreshToken string
|
|
||||||
AccessToken string
|
oauth2Token oauth2.TokenSource
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Config() driver.Config {
|
func (d *PikPak) Config() driver.Config {
|
||||||
@ -34,8 +35,32 @@ func (d *PikPak) GetAddition() driver.Additional {
|
|||||||
return &d.Addition
|
return &d.Addition
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Init(ctx context.Context) error {
|
func (d *PikPak) Init(ctx context.Context) (err error) {
|
||||||
return d.login()
|
if d.ClientID == "" || d.ClientSecret == "" {
|
||||||
|
d.ClientID = "YNxT9w7GMdWvEOKa"
|
||||||
|
d.ClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
|
}
|
||||||
|
|
||||||
|
withClient := func(ctx context.Context) context.Context {
|
||||||
|
return context.WithValue(ctx, oauth2.HTTPClient, base.HttpClient)
|
||||||
|
}
|
||||||
|
|
||||||
|
oauth2Config := &oauth2.Config{
|
||||||
|
ClientID: d.ClientID,
|
||||||
|
ClientSecret: d.ClientSecret,
|
||||||
|
Endpoint: oauth2.Endpoint{
|
||||||
|
AuthURL: "https://user.mypikpak.com/v1/auth/signin",
|
||||||
|
TokenURL: "https://user.mypikpak.com/v1/auth/token",
|
||||||
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
oauth2Token, err := oauth2Config.PasswordCredentialsToken(withClient(ctx), d.Username, d.Password)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.oauth2Token = oauth2Config.TokenSource(withClient(context.Background()), oauth2Token)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPak) Drop(ctx context.Context) error {
|
func (d *PikPak) Drop(ctx context.Context) error {
|
||||||
|
@ -9,6 +9,8 @@ type Addition struct {
|
|||||||
driver.RootID
|
driver.RootID
|
||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
|
ClientID string `json:"client_id" required:"true" default:"YNxT9w7GMdWvEOKa"`
|
||||||
|
ClientSecret string `json:"client_secret" required:"true" default:"dbw2OtmVEeuUvIptb1Coyg"`
|
||||||
DisableMediaLink bool `json:"disable_media_link"`
|
DisableMediaLink bool `json:"disable_media_link"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,77 +1,24 @@
|
|||||||
package pikpak
|
package pikpak
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"crypto/sha1"
|
|
||||||
"encoding/hex"
|
|
||||||
"errors"
|
"errors"
|
||||||
"io"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
func (d *PikPak) login() error {
|
|
||||||
url := "https://user.mypikpak.com/v1/auth/signin"
|
|
||||||
var e RespErr
|
|
||||||
res, err := base.RestyClient.R().SetError(&e).SetBody(base.Json{
|
|
||||||
"captcha_token": "",
|
|
||||||
"client_id": "YNxT9w7GMdWvEOKa",
|
|
||||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
|
||||||
"username": d.Username,
|
|
||||||
"password": d.Password,
|
|
||||||
}).Post(url)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if e.ErrorCode != 0 {
|
|
||||||
return errors.New(e.Error)
|
|
||||||
}
|
|
||||||
data := res.Body()
|
|
||||||
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
|
|
||||||
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPak) refreshToken() error {
|
|
||||||
url := "https://user.mypikpak.com/v1/auth/token"
|
|
||||||
var e RespErr
|
|
||||||
res, err := base.RestyClient.R().SetError(&e).
|
|
||||||
SetHeader("user-agent", "").SetBody(base.Json{
|
|
||||||
"client_id": "YNxT9w7GMdWvEOKa",
|
|
||||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
|
||||||
"grant_type": "refresh_token",
|
|
||||||
"refresh_token": d.RefreshToken,
|
|
||||||
}).Post(url)
|
|
||||||
if err != nil {
|
|
||||||
d.Status = err.Error()
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if e.ErrorCode != 0 {
|
|
||||||
if e.ErrorCode == 4126 {
|
|
||||||
// refresh_token invalid, re-login
|
|
||||||
return d.login()
|
|
||||||
}
|
|
||||||
d.Status = e.Error
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return errors.New(e.Error)
|
|
||||||
}
|
|
||||||
data := res.Body()
|
|
||||||
d.Status = "work"
|
|
||||||
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
|
|
||||||
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPak) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *PikPak) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
||||||
|
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
@ -84,17 +31,9 @@ func (d *PikPak) request(url string, method string, callback base.ReqCallback, r
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if e.ErrorCode != 0 {
|
if e.ErrorCode != 0 {
|
||||||
if e.ErrorCode == 16 {
|
return nil, errors.New(e.Error)
|
||||||
// login / refresh token
|
|
||||||
err = d.refreshToken()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return d.request(url, method, callback, resp)
|
|
||||||
} else {
|
|
||||||
return nil, errors.New(e.Error)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
@ -126,28 +65,3 @@ func (d *PikPak) getFiles(id string) ([]File, error) {
|
|||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getGcid(r io.Reader, size int64) (string, error) {
|
|
||||||
calcBlockSize := func(j int64) int64 {
|
|
||||||
var psize int64 = 0x40000
|
|
||||||
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
|
||||||
psize = psize << 1
|
|
||||||
}
|
|
||||||
return psize
|
|
||||||
}
|
|
||||||
|
|
||||||
hash1 := sha1.New()
|
|
||||||
hash2 := sha1.New()
|
|
||||||
readSize := calcBlockSize(size)
|
|
||||||
for {
|
|
||||||
hash2.Reset()
|
|
||||||
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
|
||||||
if err != io.EOF {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
hash1.Write(hash2.Sum(nil))
|
|
||||||
}
|
|
||||||
return hex.EncodeToString(hash1.Sum(nil)), nil
|
|
||||||
}
|
|
||||||
|
@ -4,17 +4,18 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
"golang.org/x/oauth2"
|
||||||
)
|
)
|
||||||
|
|
||||||
type PikPakShare struct {
|
type PikPakShare struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
RefreshToken string
|
oauth2Token oauth2.TokenSource
|
||||||
AccessToken string
|
|
||||||
PassCodeToken string
|
PassCodeToken string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -27,10 +28,31 @@ func (d *PikPakShare) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *PikPakShare) Init(ctx context.Context) error {
|
func (d *PikPakShare) Init(ctx context.Context) error {
|
||||||
err := d.login()
|
if d.ClientID == "" || d.ClientSecret == "" {
|
||||||
|
d.ClientID = "YNxT9w7GMdWvEOKa"
|
||||||
|
d.ClientSecret = "dbw2OtmVEeuUvIptb1Coyg"
|
||||||
|
}
|
||||||
|
|
||||||
|
withClient := func(ctx context.Context) context.Context {
|
||||||
|
return context.WithValue(ctx, oauth2.HTTPClient, base.HttpClient)
|
||||||
|
}
|
||||||
|
|
||||||
|
oauth2Config := &oauth2.Config{
|
||||||
|
ClientID: d.ClientID,
|
||||||
|
ClientSecret: d.ClientSecret,
|
||||||
|
Endpoint: oauth2.Endpoint{
|
||||||
|
AuthURL: "https://user.mypikpak.com/v1/auth/signin",
|
||||||
|
TokenURL: "https://user.mypikpak.com/v1/auth/token",
|
||||||
|
AuthStyle: oauth2.AuthStyleInParams,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
oauth2Token, err := oauth2Config.PasswordCredentialsToken(withClient(ctx), d.Username, d.Password)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
d.oauth2Token = oauth2Config.TokenSource(withClient(context.Background()), oauth2Token)
|
||||||
|
|
||||||
if d.SharePwd != "" {
|
if d.SharePwd != "" {
|
||||||
err = d.getSharePassToken()
|
err = d.getSharePassToken()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -67,8 +89,14 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
downloadUrl := resp.FileInfo.WebContentLink
|
||||||
|
if downloadUrl == "" && len(resp.FileInfo.Medias) > 0 {
|
||||||
|
downloadUrl = resp.FileInfo.Medias[0].Link.Url
|
||||||
|
}
|
||||||
|
|
||||||
link := model.Link{
|
link := model.Link{
|
||||||
URL: resp.FileInfo.WebContentLink,
|
URL: downloadUrl,
|
||||||
}
|
}
|
||||||
return &link, nil
|
return &link, nil
|
||||||
}
|
}
|
||||||
|
@ -7,10 +7,12 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
ShareId string `json:"share_id" required:"true"`
|
ShareId string `json:"share_id" required:"true"`
|
||||||
SharePwd string `json:"share_pwd"`
|
SharePwd string `json:"share_pwd"`
|
||||||
|
ClientID string `json:"client_id" required:"true" default:"YNxT9w7GMdWvEOKa"`
|
||||||
|
ClientSecret string `json:"client_secret" required:"true" default:"dbw2OtmVEeuUvIptb1Coyg"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -5,70 +5,18 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
|
||||||
|
|
||||||
func (d *PikPakShare) login() error {
|
|
||||||
url := "https://user.mypikpak.com/v1/auth/signin"
|
|
||||||
var e RespErr
|
|
||||||
res, err := base.RestyClient.R().SetError(&e).SetBody(base.Json{
|
|
||||||
"captcha_token": "",
|
|
||||||
"client_id": "YNxT9w7GMdWvEOKa",
|
|
||||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
|
||||||
"username": d.Username,
|
|
||||||
"password": d.Password,
|
|
||||||
}).Post(url)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if e.ErrorCode != 0 {
|
|
||||||
return errors.New(e.Error)
|
|
||||||
}
|
|
||||||
data := res.Body()
|
|
||||||
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
|
|
||||||
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) refreshToken() error {
|
|
||||||
url := "https://user.mypikpak.com/v1/auth/token"
|
|
||||||
var e RespErr
|
|
||||||
res, err := base.RestyClient.R().SetError(&e).
|
|
||||||
SetHeader("user-agent", "").SetBody(base.Json{
|
|
||||||
"client_id": "YNxT9w7GMdWvEOKa",
|
|
||||||
"client_secret": "dbw2OtmVEeuUvIptb1Coyg",
|
|
||||||
"grant_type": "refresh_token",
|
|
||||||
"refresh_token": d.RefreshToken,
|
|
||||||
}).Post(url)
|
|
||||||
if err != nil {
|
|
||||||
d.Status = err.Error()
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if e.ErrorCode != 0 {
|
|
||||||
if e.ErrorCode == 4126 {
|
|
||||||
// refresh_token invalid, re-login
|
|
||||||
return d.login()
|
|
||||||
}
|
|
||||||
d.Status = e.Error
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return errors.New(e.Error)
|
|
||||||
}
|
|
||||||
data := res.Body()
|
|
||||||
d.Status = "work"
|
|
||||||
d.RefreshToken = jsoniter.Get(data, "refresh_token").ToString()
|
|
||||||
d.AccessToken = jsoniter.Get(data, "access_token").ToString()
|
|
||||||
op.MustSaveDriverStorage(d)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *PikPakShare) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
|
||||||
|
token, err := d.oauth2Token.Token()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
req.SetAuthScheme(token.TokenType).SetAuthToken(token.AccessToken)
|
||||||
|
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
}
|
}
|
||||||
@ -82,14 +30,6 @@ func (d *PikPakShare) request(url string, method string, callback base.ReqCallba
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if e.ErrorCode != 0 {
|
if e.ErrorCode != 0 {
|
||||||
if e.ErrorCode == 16 {
|
|
||||||
// login / refresh token
|
|
||||||
err = d.refreshToken()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return d.request(url, method, callback, resp)
|
|
||||||
}
|
|
||||||
return nil, errors.New(e.Error)
|
return nil, errors.New(e.Error)
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
|
@ -143,7 +143,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
}()
|
}()
|
||||||
m := md5.New()
|
m := md5.New()
|
||||||
_, err = io.Copy(m, tempFile)
|
_, err = utils.CopyWithBuffer(m, tempFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -153,7 +153,7 @@ func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.File
|
|||||||
}
|
}
|
||||||
md5Str := hex.EncodeToString(m.Sum(nil))
|
md5Str := hex.EncodeToString(m.Sum(nil))
|
||||||
s := sha1.New()
|
s := sha1.New()
|
||||||
_, err = io.Copy(s, tempFile)
|
_, err = utils.CopyWithBuffer(s, tempFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ type TmpTokenResponse struct {
|
|||||||
}
|
}
|
||||||
type TmpTokenResponseData struct {
|
type TmpTokenResponseData struct {
|
||||||
Credentials Credentials `json:"Credentials"`
|
Credentials Credentials `json:"Credentials"`
|
||||||
|
ExpiredAt int `json:"ExpiredAt"`
|
||||||
}
|
}
|
||||||
type Credentials struct {
|
type Credentials struct {
|
||||||
AccessKeyId string `json:"accessKeyId,omitempty"`
|
AccessKeyId string `json:"accessKeyId,omitempty"`
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/stream"
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/cron"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
@ -28,6 +29,7 @@ type S3 struct {
|
|||||||
linkClient *s3.S3
|
linkClient *s3.S3
|
||||||
|
|
||||||
config driver.Config
|
config driver.Config
|
||||||
|
cron *cron.Cron
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) Config() driver.Config {
|
func (d *S3) Config() driver.Config {
|
||||||
@ -42,6 +44,18 @@ func (d *S3) Init(ctx context.Context) error {
|
|||||||
if d.Region == "" {
|
if d.Region == "" {
|
||||||
d.Region = "alist"
|
d.Region = "alist"
|
||||||
}
|
}
|
||||||
|
if d.config.Name == "Doge" {
|
||||||
|
// 多吉云每次临时生成的秘钥有效期为 2h,所以这里设置为 118 分钟重新生成一次
|
||||||
|
d.cron = cron.NewCron(time.Minute * 118)
|
||||||
|
d.cron.Do(func() {
|
||||||
|
err := d.initSession()
|
||||||
|
if err != nil {
|
||||||
|
log.Errorln("Doge init session error:", err)
|
||||||
|
}
|
||||||
|
d.client = d.getClient(false)
|
||||||
|
d.linkClient = d.getClient(true)
|
||||||
|
})
|
||||||
|
}
|
||||||
err := d.initSession()
|
err := d.initSession()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -52,6 +66,9 @@ func (d *S3) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) Drop(ctx context.Context) error {
|
func (d *S3) Drop(ctx context.Context) error {
|
||||||
|
if d.cron != nil {
|
||||||
|
d.cron.Stop()
|
||||||
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,8 +9,9 @@ type Addition struct {
|
|||||||
driver.RootPath
|
driver.RootPath
|
||||||
|
|
||||||
Address string `json:"address" required:"true"`
|
Address string `json:"address" required:"true"`
|
||||||
UserName string `json:"username" required:"true"`
|
UserName string `json:"username" required:"false"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"false"`
|
||||||
|
Token string `json:"token" required:"false"`
|
||||||
RepoId string `json:"repoId" required:"false"`
|
RepoId string `json:"repoId" required:"false"`
|
||||||
RepoPwd string `json:"repoPwd" required:"false"`
|
RepoPwd string `json:"repoPwd" required:"false"`
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,10 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *Seafile) getToken() error {
|
func (d *Seafile) getToken() error {
|
||||||
|
if d.Token != "" {
|
||||||
|
d.authorization = fmt.Sprintf("Token %s", d.Token)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
var authResp AuthTokenResp
|
var authResp AuthTokenResp
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetResult(&authResp).
|
SetResult(&authResp).
|
||||||
|
@ -16,7 +16,8 @@ import (
|
|||||||
type SFTP struct {
|
type SFTP struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
client *sftp.Client
|
client *sftp.Client
|
||||||
|
clientConnectionError error
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Config() driver.Config {
|
func (d *SFTP) Config() driver.Config {
|
||||||
@ -39,6 +40,9 @@ func (d *SFTP) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
log.Debugf("[sftp] list dir: %s", dir.GetPath())
|
log.Debugf("[sftp] list dir: %s", dir.GetPath())
|
||||||
files, err := d.client.ReadDir(dir.GetPath())
|
files, err := d.client.ReadDir(dir.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -51,6 +55,9 @@ func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
remoteFile, err := d.client.Open(file.GetPath())
|
remoteFile, err := d.client.Open(file.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -62,14 +69,23 @@ func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *SFTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return d.client.MkdirAll(path.Join(parentDir.GetPath(), dirName))
|
return d.client.MkdirAll(path.Join(parentDir.GetPath(), dirName))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *SFTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return d.client.Rename(srcObj.GetPath(), path.Join(dstDir.GetPath(), srcObj.GetName()))
|
return d.client.Rename(srcObj.GetPath(), path.Join(dstDir.GetPath(), srcObj.GetName()))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *SFTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return d.client.Rename(srcObj.GetPath(), path.Join(path.Dir(srcObj.GetPath()), newName))
|
return d.client.Rename(srcObj.GetPath(), path.Join(path.Dir(srcObj.GetPath()), newName))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,10 +94,16 @@ func (d *SFTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *SFTP) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
return d.remove(obj.GetPath())
|
return d.remove(obj.GetPath())
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *SFTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
if err := d.clientReconnectOnConnectionError(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
dstFile, err := d.client.Create(path.Join(dstDir.GetPath(), stream.GetName()))
|
dstFile, err := d.client.Create(path.Join(dstDir.GetPath(), stream.GetName()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"path"
|
"path"
|
||||||
|
|
||||||
"github.com/pkg/sftp"
|
"github.com/pkg/sftp"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
"golang.org/x/crypto/ssh"
|
"golang.org/x/crypto/ssh"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -30,6 +31,23 @@ func (d *SFTP) initClient() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.client, err = sftp.NewClient(conn)
|
d.client, err = sftp.NewClient(conn)
|
||||||
|
if err == nil {
|
||||||
|
d.clientConnectionError = nil
|
||||||
|
go func(d *SFTP) {
|
||||||
|
d.clientConnectionError = d.client.Wait()
|
||||||
|
}(d)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *SFTP) clientReconnectOnConnectionError() error {
|
||||||
|
err := d.clientConnectionError
|
||||||
|
if err == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
log.Debugf("[sftp] discarding closed sftp connection: %v", err)
|
||||||
|
_ = d.client.Close()
|
||||||
|
err = d.initClient()
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
package smb
|
package smb
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"io"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"net"
|
"net"
|
||||||
"os"
|
"os"
|
||||||
@ -74,7 +74,7 @@ func (d *SMB) CopyFile(src, dst string) error {
|
|||||||
}
|
}
|
||||||
defer dstfd.Close()
|
defer dstfd.Close()
|
||||||
|
|
||||||
if _, err = io.Copy(dstfd, srcfd); err != nil {
|
if _, err = utils.CopyWithBuffer(dstfd, srcfd); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if srcinfo, err = d.fs.Stat(src); err != nil {
|
if srcinfo, err = d.fs.Stat(src); err != nil {
|
||||||
|
@ -190,7 +190,7 @@ func getGcid(r io.Reader, size int64) (string, error) {
|
|||||||
readSize := calcBlockSize(size)
|
readSize := calcBlockSize(size)
|
||||||
for {
|
for {
|
||||||
hash2.Reset()
|
hash2.Reset()
|
||||||
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
if n, err := utils.CopyWithBufferN(hash2, r, readSize); err != nil && n == 0 {
|
||||||
if err != io.EOF {
|
if err != io.EOF {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
527
drivers/thunderx/driver.go
Normal file
527
drivers/thunderx/driver.go
Normal file
@ -0,0 +1,527 @@
|
|||||||
|
package thunderx
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||||
|
"github.com/aws/aws-sdk-go/aws"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||||
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ThunderX struct {
|
||||||
|
*XunLeiXCommon
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
|
||||||
|
identity string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderX) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderX) GetAddition() driver.Additional {
|
||||||
|
return &x.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderX) Init(ctx context.Context) (err error) {
|
||||||
|
// 初始化所需参数
|
||||||
|
if x.XunLeiXCommon == nil {
|
||||||
|
x.XunLeiXCommon = &XunLeiXCommon{
|
||||||
|
Common: &Common{
|
||||||
|
client: base.NewRestyClient(),
|
||||||
|
Algorithms: []string{
|
||||||
|
"lHwINjLeqssT28Ym99p5MvR",
|
||||||
|
"xvFcxvtqPKCa9Ajf",
|
||||||
|
"2ywOP8spKHzfuhZMUYZ9IpsViq0t8vT0",
|
||||||
|
"FTBrJism20SHKQ2m2",
|
||||||
|
"BHrWJsPwjnr5VeLtOUr2191X9uXhWmt",
|
||||||
|
"yu0QgHEjNmDoPNwXN17so2hQlDT83T",
|
||||||
|
"OcaMfLMCGZ7oYlvZGIbTqb4U7cCY",
|
||||||
|
"jBGGu0GzXOjtCXYwkOBb+c6TZ/Nymv",
|
||||||
|
"YLWRjVor2rOuYEL",
|
||||||
|
"94wjoPazejyNC+gRpOj+JOm1XXvxa",
|
||||||
|
},
|
||||||
|
DeviceID: utils.GetMD5EncodeStr(x.Username + x.Password),
|
||||||
|
ClientID: "ZQL_zwA4qhHcoe_2",
|
||||||
|
ClientSecret: "Og9Vr1L8Ee6bh0olFxFDRg",
|
||||||
|
ClientVersion: "1.05.0.2115",
|
||||||
|
PackageName: "com.thunder.downloader",
|
||||||
|
UserAgent: "ANDROID-com.thunder.downloader/1.05.0.2115 netWorkType/5G appid/40 deviceName/Xiaomi_M2004j7ac deviceModel/M2004J7AC OSVersion/12 protocolVersion/301 platformVersion/10 sdkVersion/220200 Oauth2Client/0.9 (Linux 4_14_186-perf-gddfs8vbb238b) (JAVA 0)",
|
||||||
|
DownloadUserAgent: "Dalvik/2.1.0 (Linux; U; Android 12; M2004J7AC Build/SP1A.210812.016)",
|
||||||
|
UseVideoUrl: x.UseVideoUrl,
|
||||||
|
|
||||||
|
refreshCTokenCk: func(token string) {
|
||||||
|
x.CaptchaToken = token
|
||||||
|
op.MustSaveDriverStorage(x)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
refreshTokenFunc: func() error {
|
||||||
|
// 通过RefreshToken刷新
|
||||||
|
token, err := x.RefreshToken(x.TokenResp.RefreshToken)
|
||||||
|
if err != nil {
|
||||||
|
// 重新登录
|
||||||
|
token, err = x.Login(x.Username, x.Password)
|
||||||
|
if err != nil {
|
||||||
|
x.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
|
||||||
|
op.MustSaveDriverStorage(x)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
return err
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 自定义验证码token
|
||||||
|
ctoekn := strings.TrimSpace(x.CaptchaToken)
|
||||||
|
if ctoekn != "" {
|
||||||
|
x.SetCaptchaToken(ctoekn)
|
||||||
|
}
|
||||||
|
x.XunLeiXCommon.UseVideoUrl = x.UseVideoUrl
|
||||||
|
x.Addition.RootFolderID = x.RootFolderID
|
||||||
|
// 防止重复登录
|
||||||
|
identity := x.GetIdentity()
|
||||||
|
if x.identity != identity || !x.IsLogin() {
|
||||||
|
x.identity = identity
|
||||||
|
// 登录
|
||||||
|
token, err := x.Login(x.Username, x.Password)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderX) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type ThunderXExpert struct {
|
||||||
|
*XunLeiXCommon
|
||||||
|
model.Storage
|
||||||
|
ExpertAddition
|
||||||
|
|
||||||
|
identity string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderXExpert) Config() driver.Config {
|
||||||
|
return configExpert
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderXExpert) GetAddition() driver.Additional {
|
||||||
|
return &x.ExpertAddition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderXExpert) Init(ctx context.Context) (err error) {
|
||||||
|
// 防止重复登录
|
||||||
|
identity := x.GetIdentity()
|
||||||
|
if identity != x.identity || !x.IsLogin() {
|
||||||
|
x.identity = identity
|
||||||
|
x.XunLeiXCommon = &XunLeiXCommon{
|
||||||
|
Common: &Common{
|
||||||
|
client: base.NewRestyClient(),
|
||||||
|
|
||||||
|
DeviceID: func() string {
|
||||||
|
if len(x.DeviceID) != 32 {
|
||||||
|
return utils.GetMD5EncodeStr(x.DeviceID)
|
||||||
|
}
|
||||||
|
return x.DeviceID
|
||||||
|
}(),
|
||||||
|
ClientID: x.ClientID,
|
||||||
|
ClientSecret: x.ClientSecret,
|
||||||
|
ClientVersion: x.ClientVersion,
|
||||||
|
PackageName: x.PackageName,
|
||||||
|
UserAgent: x.UserAgent,
|
||||||
|
DownloadUserAgent: x.DownloadUserAgent,
|
||||||
|
UseVideoUrl: x.UseVideoUrl,
|
||||||
|
|
||||||
|
refreshCTokenCk: func(token string) {
|
||||||
|
x.CaptchaToken = token
|
||||||
|
op.MustSaveDriverStorage(x)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if x.CaptchaToken != "" {
|
||||||
|
x.SetCaptchaToken(x.CaptchaToken)
|
||||||
|
}
|
||||||
|
x.XunLeiXCommon.UseVideoUrl = x.UseVideoUrl
|
||||||
|
x.ExpertAddition.RootFolderID = x.RootFolderID
|
||||||
|
// 签名方法
|
||||||
|
if x.SignType == "captcha_sign" {
|
||||||
|
x.Common.Timestamp = x.Timestamp
|
||||||
|
x.Common.CaptchaSign = x.CaptchaSign
|
||||||
|
} else {
|
||||||
|
x.Common.Algorithms = strings.Split(x.Algorithms, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
// 登录方式
|
||||||
|
if x.LoginType == "refresh_token" {
|
||||||
|
// 通过RefreshToken登录
|
||||||
|
token, err := x.XunLeiXCommon.RefreshToken(x.ExpertAddition.RefreshToken)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
|
||||||
|
// 刷新token方法
|
||||||
|
x.SetRefreshTokenFunc(func() error {
|
||||||
|
token, err := x.XunLeiXCommon.RefreshToken(x.TokenResp.RefreshToken)
|
||||||
|
if err != nil {
|
||||||
|
x.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
op.MustSaveDriverStorage(x)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
// 通过用户密码登录
|
||||||
|
token, err := x.Login(x.Username, x.Password)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
x.SetRefreshTokenFunc(func() error {
|
||||||
|
token, err := x.XunLeiXCommon.RefreshToken(x.TokenResp.RefreshToken)
|
||||||
|
if err != nil {
|
||||||
|
token, err = x.Login(x.Username, x.Password)
|
||||||
|
if err != nil {
|
||||||
|
x.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
x.SetTokenResp(token)
|
||||||
|
op.MustSaveDriverStorage(x)
|
||||||
|
return err
|
||||||
|
})
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// 仅修改验证码token
|
||||||
|
if x.CaptchaToken != "" {
|
||||||
|
x.SetCaptchaToken(x.CaptchaToken)
|
||||||
|
}
|
||||||
|
x.XunLeiXCommon.UserAgent = x.UserAgent
|
||||||
|
x.XunLeiXCommon.DownloadUserAgent = x.DownloadUserAgent
|
||||||
|
x.XunLeiXCommon.UseVideoUrl = x.UseVideoUrl
|
||||||
|
x.ExpertAddition.RootFolderID = x.RootFolderID
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderXExpert) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (x *ThunderXExpert) SetTokenResp(token *TokenResp) {
|
||||||
|
x.XunLeiXCommon.SetTokenResp(token)
|
||||||
|
if token != nil {
|
||||||
|
x.ExpertAddition.RefreshToken = token.RefreshToken
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type XunLeiXCommon struct {
|
||||||
|
*Common
|
||||||
|
*TokenResp // 登录信息
|
||||||
|
|
||||||
|
refreshTokenFunc func() error
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
return xc.getFiles(ctx, dir.GetID())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
var lFile Files
|
||||||
|
_, err := xc.Request(FILE_API_URL+"/{fileID}", http.MethodGet, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetPathParam("fileID", file.GetID())
|
||||||
|
//r.SetQueryParam("space", "")
|
||||||
|
}, &lFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
link := &model.Link{
|
||||||
|
URL: lFile.WebContentLink,
|
||||||
|
Header: http.Header{
|
||||||
|
"User-Agent": {xc.DownloadUserAgent},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
if xc.UseVideoUrl {
|
||||||
|
for _, media := range lFile.Medias {
|
||||||
|
if media.Link.URL != "" {
|
||||||
|
link.URL = media.Link.URL
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
strs := regexp.MustCompile(`e=([0-9]*)`).FindStringSubmatch(lFile.WebContentLink)
|
||||||
|
if len(strs) == 2 {
|
||||||
|
timestamp, err := strconv.ParseInt(strs[1], 10, 64)
|
||||||
|
if err == nil {
|
||||||
|
expired := time.Duration(timestamp-time.Now().Unix()) * time.Second
|
||||||
|
link.Expiration = &expired
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetBody(&base.Json{
|
||||||
|
"kind": FOLDER,
|
||||||
|
"name": dirName,
|
||||||
|
"parent_id": parentDir.GetID(),
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
_, err := xc.Request(FILE_API_URL+":batchMove", http.MethodPost, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetBody(&base.Json{
|
||||||
|
"to": base.Json{"parent_id": dstDir.GetID()},
|
||||||
|
"ids": []string{srcObj.GetID()},
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
_, err := xc.Request(FILE_API_URL+"/{fileID}", http.MethodPatch, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetPathParam("fileID", srcObj.GetID())
|
||||||
|
r.SetBody(&base.Json{"name": newName})
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
_, err := xc.Request(FILE_API_URL+":batchCopy", http.MethodPost, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetBody(&base.Json{
|
||||||
|
"to": base.Json{"parent_id": dstDir.GetID()},
|
||||||
|
"ids": []string{srcObj.GetID()},
|
||||||
|
})
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
_, err := xc.Request(FILE_API_URL+"/{fileID}/trash", http.MethodPatch, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetPathParam("fileID", obj.GetID())
|
||||||
|
r.SetBody("{}")
|
||||||
|
}, nil)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
hi := stream.GetHash()
|
||||||
|
gcid := hi.GetHash(hash_extend.GCID)
|
||||||
|
if len(gcid) < hash_extend.GCID.Width {
|
||||||
|
tFile, err := stream.CacheFullInTempFile()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
gcid, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp UploadTaskResponse
|
||||||
|
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetBody(&base.Json{
|
||||||
|
"kind": FILE,
|
||||||
|
"parent_id": dstDir.GetID(),
|
||||||
|
"name": stream.GetName(),
|
||||||
|
"size": stream.GetSize(),
|
||||||
|
"hash": gcid,
|
||||||
|
"upload_type": UPLOAD_TYPE_RESUMABLE,
|
||||||
|
})
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
param := resp.Resumable.Params
|
||||||
|
if resp.UploadType == UPLOAD_TYPE_RESUMABLE {
|
||||||
|
param.Endpoint = strings.TrimLeft(param.Endpoint, param.Bucket+".")
|
||||||
|
s, err := session.NewSession(&aws.Config{
|
||||||
|
Credentials: credentials.NewStaticCredentials(param.AccessKeyID, param.AccessKeySecret, param.SecurityToken),
|
||||||
|
Region: aws.String("xunlei"),
|
||||||
|
Endpoint: aws.String(param.Endpoint),
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
uploader := s3manager.NewUploader(s)
|
||||||
|
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||||
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
|
}
|
||||||
|
_, err = uploader.UploadWithContext(ctx, &s3manager.UploadInput{
|
||||||
|
Bucket: aws.String(param.Bucket),
|
||||||
|
Key: aws.String(param.Key),
|
||||||
|
Expires: aws.Time(param.Expiration),
|
||||||
|
Body: stream,
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) getFiles(ctx context.Context, folderId string) ([]model.Obj, error) {
|
||||||
|
files := make([]model.Obj, 0)
|
||||||
|
var pageToken string
|
||||||
|
for {
|
||||||
|
var fileList FileList
|
||||||
|
_, err := xc.Request(FILE_API_URL, http.MethodGet, func(r *resty.Request) {
|
||||||
|
r.SetContext(ctx)
|
||||||
|
r.SetQueryParams(map[string]string{
|
||||||
|
"space": "",
|
||||||
|
"__type": "drive",
|
||||||
|
"refresh": "true",
|
||||||
|
"__sync": "true",
|
||||||
|
"parent_id": folderId,
|
||||||
|
"page_token": pageToken,
|
||||||
|
"with_audit": "true",
|
||||||
|
"limit": "100",
|
||||||
|
"filters": `{"phase":{"eq":"PHASE_TYPE_COMPLETE"},"trashed":{"eq":false}}`,
|
||||||
|
})
|
||||||
|
}, &fileList)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < len(fileList.Files); i++ {
|
||||||
|
files = append(files, &fileList.Files[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
if fileList.NextPageToken == "" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pageToken = fileList.NextPageToken
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 设置刷新Token的方法
|
||||||
|
func (xc *XunLeiXCommon) SetRefreshTokenFunc(fn func() error) {
|
||||||
|
xc.refreshTokenFunc = fn
|
||||||
|
}
|
||||||
|
|
||||||
|
// 设置Token
|
||||||
|
func (xc *XunLeiXCommon) SetTokenResp(tr *TokenResp) {
|
||||||
|
xc.TokenResp = tr
|
||||||
|
}
|
||||||
|
|
||||||
|
// 携带Authorization和CaptchaToken的请求
|
||||||
|
func (xc *XunLeiXCommon) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
data, err := xc.Common.Request(url, method, func(req *resty.Request) {
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"Authorization": xc.Token(),
|
||||||
|
"X-Captcha-Token": xc.GetCaptchaToken(),
|
||||||
|
})
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
}, resp)
|
||||||
|
|
||||||
|
errResp, ok := err.(*ErrResp)
|
||||||
|
if !ok {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch errResp.ErrorCode {
|
||||||
|
case 0:
|
||||||
|
return data, nil
|
||||||
|
case 4122, 4121, 10, 16:
|
||||||
|
if xc.refreshTokenFunc != nil {
|
||||||
|
if err = xc.refreshTokenFunc(); err == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
case 9: // 验证码token过期
|
||||||
|
if err = xc.RefreshCaptchaTokenAtLogin(GetAction(method, url), xc.UserID); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return xc.Request(url, method, callback, resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 刷新Token
|
||||||
|
func (xc *XunLeiXCommon) RefreshToken(refreshToken string) (*TokenResp, error) {
|
||||||
|
var resp TokenResp
|
||||||
|
_, err := xc.Common.Request(XLUSER_API_URL+"/auth/token", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(&base.Json{
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"refresh_token": refreshToken,
|
||||||
|
"client_id": xc.ClientID,
|
||||||
|
"client_secret": xc.ClientSecret,
|
||||||
|
})
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.RefreshToken == "" {
|
||||||
|
return nil, errs.EmptyToken
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 登录
|
||||||
|
func (xc *XunLeiXCommon) Login(username, password string) (*TokenResp, error) {
|
||||||
|
url := XLUSER_API_URL + "/auth/signin"
|
||||||
|
err := xc.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), username)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var resp TokenResp
|
||||||
|
_, err = xc.Common.Request(url, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(&SignInRequest{
|
||||||
|
CaptchaToken: xc.GetCaptchaToken(),
|
||||||
|
ClientID: xc.ClientID,
|
||||||
|
ClientSecret: xc.ClientSecret,
|
||||||
|
Username: username,
|
||||||
|
Password: password,
|
||||||
|
})
|
||||||
|
}, &resp)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (xc *XunLeiXCommon) IsLogin() bool {
|
||||||
|
if xc.TokenResp == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
_, err := xc.Request(XLUSER_API_URL+"/user/me", http.MethodGet, nil, nil)
|
||||||
|
return err == nil
|
||||||
|
}
|
103
drivers/thunderx/meta.go
Normal file
103
drivers/thunderx/meta.go
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
package thunderx
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"encoding/hex"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
)
|
||||||
|
|
||||||
|
// 高级设置
|
||||||
|
type ExpertAddition struct {
|
||||||
|
driver.RootID
|
||||||
|
|
||||||
|
LoginType string `json:"login_type" type:"select" options:"user,refresh_token" default:"user"`
|
||||||
|
SignType string `json:"sign_type" type:"select" options:"algorithms,captcha_sign" default:"algorithms"`
|
||||||
|
|
||||||
|
// 登录方式1
|
||||||
|
Username string `json:"username" required:"true" help:"login type is user,this is required"`
|
||||||
|
Password string `json:"password" required:"true" help:"login type is user,this is required"`
|
||||||
|
// 登录方式2
|
||||||
|
RefreshToken string `json:"refresh_token" required:"true" help:"login type is refresh_token,this is required"`
|
||||||
|
|
||||||
|
// 签名方法1
|
||||||
|
Algorithms string `json:"algorithms" required:"true" help:"sign type is algorithms,this is required" default:"lHwINjLeqssT28Ym99p5MvR,xvFcxvtqPKCa9Ajf,2ywOP8spKHzfuhZMUYZ9IpsViq0t8vT0,FTBrJism20SHKQ2m2,BHrWJsPwjnr5VeLtOUr2191X9uXhWmt,yu0QgHEjNmDoPNwXN17so2hQlDT83T,OcaMfLMCGZ7oYlvZGIbTqb4U7cCY,jBGGu0GzXOjtCXYwkOBb+c6TZ/Nymv,YLWRjVor2rOuYEL,94wjoPazejyNC+gRpOj+JOm1XXvxa"`
|
||||||
|
// 签名方法2
|
||||||
|
CaptchaSign string `json:"captcha_sign" required:"true" help:"sign type is captcha_sign,this is required"`
|
||||||
|
Timestamp string `json:"timestamp" required:"true" help:"sign type is captcha_sign,this is required"`
|
||||||
|
|
||||||
|
// 验证码
|
||||||
|
CaptchaToken string `json:"captcha_token"`
|
||||||
|
|
||||||
|
// 必要且影响登录,由签名决定
|
||||||
|
DeviceID string `json:"device_id" required:"true" default:"9aa5c268e7bcfc197a9ad88e2fb330e5"`
|
||||||
|
ClientID string `json:"client_id" required:"true" default:"ZQL_zwA4qhHcoe_2"`
|
||||||
|
ClientSecret string `json:"client_secret" required:"true" default:"Og9Vr1L8Ee6bh0olFxFDRg"`
|
||||||
|
ClientVersion string `json:"client_version" required:"true" default:"1.05.0.2115"`
|
||||||
|
PackageName string `json:"package_name" required:"true" default:"com.thunder.downloader"`
|
||||||
|
|
||||||
|
//不影响登录,影响下载速度
|
||||||
|
UserAgent string `json:"user_agent" required:"true" default:"ANDROID-com.thunder.downloader/1.05.0.2115 netWorkType/4G appid/40 deviceName/Xiaomi_M2004j7ac deviceModel/M2004J7AC OSVersion/12 protocolVersion/301 platformVersion/10 sdkVersion/220200 Oauth2Client/0.9 (Linux 4_14_186-perf-gdcf98eab238b) (JAVA 0)"`
|
||||||
|
DownloadUserAgent string `json:"download_user_agent" required:"true" default:"Dalvik/2.1.0 (Linux; U; Android 12; M2004J7AC Build/SP1A.210812.016)"`
|
||||||
|
|
||||||
|
//优先使用视频链接代替下载链接
|
||||||
|
UseVideoUrl bool `json:"use_video_url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// 登录特征,用于判断是否重新登录
|
||||||
|
func (i *ExpertAddition) GetIdentity() string {
|
||||||
|
hash := md5.New()
|
||||||
|
if i.LoginType == "refresh_token" {
|
||||||
|
hash.Write([]byte(i.RefreshToken))
|
||||||
|
} else {
|
||||||
|
hash.Write([]byte(i.Username + i.Password))
|
||||||
|
}
|
||||||
|
|
||||||
|
if i.SignType == "captcha_sign" {
|
||||||
|
hash.Write([]byte(i.CaptchaSign + i.Timestamp))
|
||||||
|
} else {
|
||||||
|
hash.Write([]byte(i.Algorithms))
|
||||||
|
}
|
||||||
|
|
||||||
|
hash.Write([]byte(i.DeviceID))
|
||||||
|
hash.Write([]byte(i.ClientID))
|
||||||
|
hash.Write([]byte(i.ClientSecret))
|
||||||
|
hash.Write([]byte(i.ClientVersion))
|
||||||
|
hash.Write([]byte(i.PackageName))
|
||||||
|
return hex.EncodeToString(hash.Sum(nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
driver.RootID
|
||||||
|
Username string `json:"username" required:"true"`
|
||||||
|
Password string `json:"password" required:"true"`
|
||||||
|
CaptchaToken string `json:"captcha_token"`
|
||||||
|
UseVideoUrl bool `json:"use_video_url" default:"true"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// 登录特征,用于判断是否重新登录
|
||||||
|
func (i *Addition) GetIdentity() string {
|
||||||
|
return utils.GetMD5EncodeStr(i.Username + i.Password)
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "ThunderX",
|
||||||
|
LocalSort: true,
|
||||||
|
OnlyProxy: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
var configExpert = driver.Config{
|
||||||
|
Name: "ThunderXExpert",
|
||||||
|
LocalSort: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &ThunderX{}
|
||||||
|
})
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &ThunderXExpert{}
|
||||||
|
})
|
||||||
|
}
|
206
drivers/thunderx/types.go
Normal file
206
drivers/thunderx/types.go
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
package thunderx
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||||
|
)
|
||||||
|
|
||||||
|
type ErrResp struct {
|
||||||
|
ErrorCode int64 `json:"error_code"`
|
||||||
|
ErrorMsg string `json:"error"`
|
||||||
|
ErrorDescription string `json:"error_description"`
|
||||||
|
// ErrorDetails interface{} `json:"error_details"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrResp) IsError() bool {
|
||||||
|
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ErrorDescription != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *ErrResp) Error() string {
|
||||||
|
return fmt.Sprintf("ErrorCode: %d ,Error: %s ,ErrorDescription: %s ", e.ErrorCode, e.ErrorMsg, e.ErrorDescription)
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* 验证码Token
|
||||||
|
**/
|
||||||
|
type CaptchaTokenRequest struct {
|
||||||
|
Action string `json:"action"`
|
||||||
|
CaptchaToken string `json:"captcha_token"`
|
||||||
|
ClientID string `json:"client_id"`
|
||||||
|
DeviceID string `json:"device_id"`
|
||||||
|
Meta map[string]string `json:"meta"`
|
||||||
|
RedirectUri string `json:"redirect_uri"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type CaptchaTokenResponse struct {
|
||||||
|
CaptchaToken string `json:"captcha_token"`
|
||||||
|
ExpiresIn int64 `json:"expires_in"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* 登录
|
||||||
|
**/
|
||||||
|
type TokenResp struct {
|
||||||
|
TokenType string `json:"token_type"`
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
ExpiresIn int64 `json:"expires_in"`
|
||||||
|
|
||||||
|
Sub string `json:"sub"`
|
||||||
|
UserID string `json:"user_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *TokenResp) Token() string {
|
||||||
|
return fmt.Sprint(t.TokenType, " ", t.AccessToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
type SignInRequest struct {
|
||||||
|
CaptchaToken string `json:"captcha_token"`
|
||||||
|
|
||||||
|
ClientID string `json:"client_id"`
|
||||||
|
ClientSecret string `json:"client_secret"`
|
||||||
|
|
||||||
|
Username string `json:"username"`
|
||||||
|
Password string `json:"password"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* 文件
|
||||||
|
**/
|
||||||
|
type FileList struct {
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
NextPageToken string `json:"next_page_token"`
|
||||||
|
Files []Files `json:"files"`
|
||||||
|
Version string `json:"version"`
|
||||||
|
VersionOutdated bool `json:"version_outdated"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Link struct {
|
||||||
|
URL string `json:"url"`
|
||||||
|
Token string `json:"token"`
|
||||||
|
Expire time.Time `json:"expire"`
|
||||||
|
Type string `json:"type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ model.Obj = (*Files)(nil)
|
||||||
|
|
||||||
|
type Files struct {
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
ParentID string `json:"parent_id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
//UserID string `json:"user_id"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
//Revision string `json:"revision"`
|
||||||
|
//FileExtension string `json:"file_extension"`
|
||||||
|
//MimeType string `json:"mime_type"`
|
||||||
|
//Starred bool `json:"starred"`
|
||||||
|
WebContentLink string `json:"web_content_link"`
|
||||||
|
CreatedTime time.Time `json:"created_time"`
|
||||||
|
ModifiedTime time.Time `json:"modified_time"`
|
||||||
|
IconLink string `json:"icon_link"`
|
||||||
|
ThumbnailLink string `json:"thumbnail_link"`
|
||||||
|
// Md5Checksum string `json:"md5_checksum"`
|
||||||
|
Hash string `json:"hash"`
|
||||||
|
// Links map[string]Link `json:"links"`
|
||||||
|
// Phase string `json:"phase"`
|
||||||
|
// Audit struct {
|
||||||
|
// Status string `json:"status"`
|
||||||
|
// Message string `json:"message"`
|
||||||
|
// Title string `json:"title"`
|
||||||
|
// } `json:"audit"`
|
||||||
|
Medias []struct {
|
||||||
|
//Category string `json:"category"`
|
||||||
|
//IconLink string `json:"icon_link"`
|
||||||
|
//IsDefault bool `json:"is_default"`
|
||||||
|
//IsOrigin bool `json:"is_origin"`
|
||||||
|
//IsVisible bool `json:"is_visible"`
|
||||||
|
Link Link `json:"link"`
|
||||||
|
//MediaID string `json:"media_id"`
|
||||||
|
//MediaName string `json:"media_name"`
|
||||||
|
//NeedMoreQuota bool `json:"need_more_quota"`
|
||||||
|
//Priority int `json:"priority"`
|
||||||
|
//RedirectLink string `json:"redirect_link"`
|
||||||
|
//ResolutionName string `json:"resolution_name"`
|
||||||
|
// Video struct {
|
||||||
|
// AudioCodec string `json:"audio_codec"`
|
||||||
|
// BitRate int `json:"bit_rate"`
|
||||||
|
// Duration int `json:"duration"`
|
||||||
|
// FrameRate int `json:"frame_rate"`
|
||||||
|
// Height int `json:"height"`
|
||||||
|
// VideoCodec string `json:"video_codec"`
|
||||||
|
// VideoType string `json:"video_type"`
|
||||||
|
// Width int `json:"width"`
|
||||||
|
// } `json:"video"`
|
||||||
|
// VipTypes []string `json:"vip_types"`
|
||||||
|
} `json:"medias"`
|
||||||
|
Trashed bool `json:"trashed"`
|
||||||
|
DeleteTime string `json:"delete_time"`
|
||||||
|
OriginalURL string `json:"original_url"`
|
||||||
|
//Params struct{} `json:"params"`
|
||||||
|
//OriginalFileIndex int `json:"original_file_index"`
|
||||||
|
//Space string `json:"space"`
|
||||||
|
//Apps []interface{} `json:"apps"`
|
||||||
|
//Writable bool `json:"writable"`
|
||||||
|
//FolderType string `json:"folder_type"`
|
||||||
|
//Collection interface{} `json:"collection"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Files) GetHash() utils.HashInfo {
|
||||||
|
return utils.NewHashInfo(hash_extend.GCID, c.Hash)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
|
||||||
|
func (c *Files) GetName() string { return c.Name }
|
||||||
|
func (c *Files) CreateTime() time.Time { return c.CreatedTime }
|
||||||
|
func (c *Files) ModTime() time.Time { return c.ModifiedTime }
|
||||||
|
func (c *Files) IsDir() bool { return c.Kind == FOLDER }
|
||||||
|
func (c *Files) GetID() string { return c.ID }
|
||||||
|
func (c *Files) GetPath() string { return "" }
|
||||||
|
func (c *Files) Thumb() string { return c.ThumbnailLink }
|
||||||
|
|
||||||
|
/*
|
||||||
|
* 上传
|
||||||
|
**/
|
||||||
|
type UploadTaskResponse struct {
|
||||||
|
UploadType string `json:"upload_type"`
|
||||||
|
|
||||||
|
/*//UPLOAD_TYPE_FORM
|
||||||
|
Form struct {
|
||||||
|
//Headers struct{} `json:"headers"`
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
Method string `json:"method"`
|
||||||
|
MultiParts struct {
|
||||||
|
OSSAccessKeyID string `json:"OSSAccessKeyId"`
|
||||||
|
Signature string `json:"Signature"`
|
||||||
|
Callback string `json:"callback"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
Policy string `json:"policy"`
|
||||||
|
XUserData string `json:"x:user_data"`
|
||||||
|
} `json:"multi_parts"`
|
||||||
|
URL string `json:"url"`
|
||||||
|
} `json:"form"`*/
|
||||||
|
|
||||||
|
//UPLOAD_TYPE_RESUMABLE
|
||||||
|
Resumable struct {
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
Params struct {
|
||||||
|
AccessKeyID string `json:"access_key_id"`
|
||||||
|
AccessKeySecret string `json:"access_key_secret"`
|
||||||
|
Bucket string `json:"bucket"`
|
||||||
|
Endpoint string `json:"endpoint"`
|
||||||
|
Expiration time.Time `json:"expiration"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
SecurityToken string `json:"security_token"`
|
||||||
|
} `json:"params"`
|
||||||
|
Provider string `json:"provider"`
|
||||||
|
} `json:"resumable"`
|
||||||
|
|
||||||
|
File Files `json:"file"`
|
||||||
|
}
|
202
drivers/thunderx/util.go
Normal file
202
drivers/thunderx/util.go
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
package thunderx
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"regexp"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
API_URL = "https://api-pan.xunleix.com/drive/v1"
|
||||||
|
FILE_API_URL = API_URL + "/files"
|
||||||
|
XLUSER_API_URL = "https://xluser-ssl.xunleix.com/v1"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
FOLDER = "drive#folder"
|
||||||
|
FILE = "drive#file"
|
||||||
|
RESUMABLE = "drive#resumable"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
UPLOAD_TYPE_UNKNOWN = "UPLOAD_TYPE_UNKNOWN"
|
||||||
|
//UPLOAD_TYPE_FORM = "UPLOAD_TYPE_FORM"
|
||||||
|
UPLOAD_TYPE_RESUMABLE = "UPLOAD_TYPE_RESUMABLE"
|
||||||
|
UPLOAD_TYPE_URL = "UPLOAD_TYPE_URL"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetAction(method string, url string) string {
|
||||||
|
urlpath := regexp.MustCompile(`://[^/]+((/[^/\s?#]+)*)`).FindStringSubmatch(url)[1]
|
||||||
|
return method + ":" + urlpath
|
||||||
|
}
|
||||||
|
|
||||||
|
type Common struct {
|
||||||
|
client *resty.Client
|
||||||
|
|
||||||
|
captchaToken string
|
||||||
|
|
||||||
|
// 签名相关,二选一
|
||||||
|
Algorithms []string
|
||||||
|
Timestamp, CaptchaSign string
|
||||||
|
|
||||||
|
// 必要值,签名相关
|
||||||
|
DeviceID string
|
||||||
|
ClientID string
|
||||||
|
ClientSecret string
|
||||||
|
ClientVersion string
|
||||||
|
PackageName string
|
||||||
|
UserAgent string
|
||||||
|
DownloadUserAgent string
|
||||||
|
UseVideoUrl bool
|
||||||
|
|
||||||
|
// 验证码token刷新成功回调
|
||||||
|
refreshCTokenCk func(token string)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Common) SetCaptchaToken(captchaToken string) {
|
||||||
|
c.captchaToken = captchaToken
|
||||||
|
}
|
||||||
|
func (c *Common) GetCaptchaToken() string {
|
||||||
|
return c.captchaToken
|
||||||
|
}
|
||||||
|
|
||||||
|
// 刷新验证码token(登录后)
|
||||||
|
func (c *Common) RefreshCaptchaTokenAtLogin(action, userID string) error {
|
||||||
|
metas := map[string]string{
|
||||||
|
"client_version": c.ClientVersion,
|
||||||
|
"package_name": c.PackageName,
|
||||||
|
"user_id": userID,
|
||||||
|
}
|
||||||
|
metas["timestamp"], metas["captcha_sign"] = c.GetCaptchaSign()
|
||||||
|
return c.refreshCaptchaToken(action, metas)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 刷新验证码token(登录时)
|
||||||
|
func (c *Common) RefreshCaptchaTokenInLogin(action, username string) error {
|
||||||
|
metas := make(map[string]string)
|
||||||
|
if ok, _ := regexp.MatchString(`\w+([-+.]\w+)*@\w+([-.]\w+)*\.\w+([-.]\w+)*`, username); ok {
|
||||||
|
metas["email"] = username
|
||||||
|
} else if len(username) >= 11 && len(username) <= 18 {
|
||||||
|
metas["phone_number"] = username
|
||||||
|
} else {
|
||||||
|
metas["username"] = username
|
||||||
|
}
|
||||||
|
return c.refreshCaptchaToken(action, metas)
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取验证码签名
|
||||||
|
func (c *Common) GetCaptchaSign() (timestamp, sign string) {
|
||||||
|
if len(c.Algorithms) == 0 {
|
||||||
|
return c.Timestamp, c.CaptchaSign
|
||||||
|
}
|
||||||
|
timestamp = fmt.Sprint(time.Now().UnixMilli())
|
||||||
|
str := fmt.Sprint(c.ClientID, c.ClientVersion, c.PackageName, c.DeviceID, timestamp)
|
||||||
|
for _, algorithm := range c.Algorithms {
|
||||||
|
str = utils.GetMD5EncodeStr(str + algorithm)
|
||||||
|
}
|
||||||
|
sign = "1." + str
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// 刷新验证码token
|
||||||
|
func (c *Common) refreshCaptchaToken(action string, metas map[string]string) error {
|
||||||
|
param := CaptchaTokenRequest{
|
||||||
|
Action: action,
|
||||||
|
CaptchaToken: c.captchaToken,
|
||||||
|
ClientID: c.ClientID,
|
||||||
|
DeviceID: c.DeviceID,
|
||||||
|
Meta: metas,
|
||||||
|
RedirectUri: "xlaccsdk01://xbase.cloud/callback?state=harbor",
|
||||||
|
}
|
||||||
|
var e ErrResp
|
||||||
|
var resp CaptchaTokenResponse
|
||||||
|
_, err := c.Request(XLUSER_API_URL+"/shield/captcha/init", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetError(&e).SetBody(param)
|
||||||
|
}, &resp)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.IsError() {
|
||||||
|
return &e
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.Url != "" {
|
||||||
|
return fmt.Errorf(`need verify: <a target="_blank" href="%s">Click Here</a>`, resp.Url)
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.CaptchaToken == "" {
|
||||||
|
return fmt.Errorf("empty captchaToken")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.refreshCTokenCk != nil {
|
||||||
|
c.refreshCTokenCk(resp.CaptchaToken)
|
||||||
|
}
|
||||||
|
c.SetCaptchaToken(resp.CaptchaToken)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 只有基础信息的请求
|
||||||
|
func (c *Common) Request(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
|
req := c.client.R().SetHeaders(map[string]string{
|
||||||
|
"user-agent": c.UserAgent,
|
||||||
|
"accept": "application/json;charset=UTF-8",
|
||||||
|
"x-device-id": c.DeviceID,
|
||||||
|
"x-client-id": c.ClientID,
|
||||||
|
"x-client-version": c.ClientVersion,
|
||||||
|
})
|
||||||
|
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
if resp != nil {
|
||||||
|
req.SetResult(resp)
|
||||||
|
}
|
||||||
|
res, err := req.Execute(method, url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var erron ErrResp
|
||||||
|
utils.Json.Unmarshal(res.Body(), &erron)
|
||||||
|
if erron.IsError() {
|
||||||
|
return nil, &erron
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 计算文件Gcid
|
||||||
|
func getGcid(r io.Reader, size int64) (string, error) {
|
||||||
|
calcBlockSize := func(j int64) int64 {
|
||||||
|
var psize int64 = 0x40000
|
||||||
|
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
||||||
|
psize = psize << 1
|
||||||
|
}
|
||||||
|
return psize
|
||||||
|
}
|
||||||
|
|
||||||
|
hash1 := sha1.New()
|
||||||
|
hash2 := sha1.New()
|
||||||
|
readSize := calcBlockSize(size)
|
||||||
|
for {
|
||||||
|
hash2.Reset()
|
||||||
|
if n, err := utils.CopyWithBufferN(hash2, r, readSize); err != nil && n == 0 {
|
||||||
|
if err != io.EOF {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
hash1.Write(hash2.Sum(nil))
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(hash1.Sum(nil)), nil
|
||||||
|
}
|
7
go.mod
7
go.mod
@ -3,11 +3,11 @@ module github.com/alist-org/alist/v3
|
|||||||
go 1.21
|
go 1.21
|
||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700
|
|
||||||
github.com/SheltonZhu/115driver v1.0.22
|
github.com/SheltonZhu/115driver v1.0.22
|
||||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a
|
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a
|
||||||
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4
|
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4
|
||||||
github.com/Xhofe/wopan-sdk-go v0.1.2
|
github.com/Xhofe/wopan-sdk-go v0.1.2
|
||||||
|
github.com/alist-org/gofakes3 v0.0.4
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible
|
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible
|
||||||
github.com/avast/retry-go v3.0.0+incompatible
|
github.com/avast/retry-go v3.0.0+incompatible
|
||||||
github.com/aws/aws-sdk-go v1.50.24
|
github.com/aws/aws-sdk-go v1.50.24
|
||||||
@ -18,11 +18,12 @@ require (
|
|||||||
github.com/charmbracelet/lipgloss v0.9.1
|
github.com/charmbracelet/lipgloss v0.9.1
|
||||||
github.com/coreos/go-oidc v2.2.1+incompatible
|
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||||
github.com/deckarep/golang-set/v2 v2.6.0
|
github.com/deckarep/golang-set/v2 v2.6.0
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8
|
||||||
github.com/disintegration/imaging v1.6.2
|
github.com/disintegration/imaging v1.6.2
|
||||||
github.com/djherbis/times v1.6.0
|
github.com/djherbis/times v1.6.0
|
||||||
github.com/dlclark/regexp2 v1.10.0
|
github.com/dlclark/regexp2 v1.10.0
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
|
||||||
github.com/foxxorcat/mopan-sdk-go v0.1.5
|
github.com/foxxorcat/mopan-sdk-go v0.1.6
|
||||||
github.com/foxxorcat/weiyun-sdk-go v0.1.3
|
github.com/foxxorcat/weiyun-sdk-go v0.1.3
|
||||||
github.com/gaoyb7/115drive-webdav v0.1.8
|
github.com/gaoyb7/115drive-webdav v0.1.8
|
||||||
github.com/gin-contrib/cors v1.5.0
|
github.com/gin-contrib/cors v1.5.0
|
||||||
@ -36,6 +37,7 @@ require (
|
|||||||
github.com/ipfs/go-ipfs-api v0.7.0
|
github.com/ipfs/go-ipfs-api v0.7.0
|
||||||
github.com/jlaffaye/ftp v0.2.0
|
github.com/jlaffaye/ftp v0.2.0
|
||||||
github.com/json-iterator/go v1.1.12
|
github.com/json-iterator/go v1.1.12
|
||||||
|
github.com/larksuite/oapi-sdk-go/v3 v3.2.5
|
||||||
github.com/maruel/natural v1.1.1
|
github.com/maruel/natural v1.1.1
|
||||||
github.com/meilisearch/meilisearch-go v0.26.1
|
github.com/meilisearch/meilisearch-go v0.26.1
|
||||||
github.com/minio/sio v0.3.0
|
github.com/minio/sio v0.3.0
|
||||||
@ -127,6 +129,7 @@ require (
|
|||||||
github.com/google/go-tpm v0.9.0 // indirect
|
github.com/google/go-tpm v0.9.0 // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
|
github.com/hashicorp/go-version v1.6.0 // indirect
|
||||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||||
github.com/ipfs/boxo v0.12.0 // indirect
|
github.com/ipfs/boxo v0.12.0 // indirect
|
||||||
github.com/ipfs/go-cid v0.4.1 // indirect
|
github.com/ipfs/go-cid v0.4.1 // indirect
|
||||||
|
30
go.sum
30
go.sum
@ -7,8 +7,6 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ
|
|||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd h1:nzE1YQBdx1bq9IlZinHa+HVffy+NmVRoKr+wHN8fpLE=
|
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd h1:nzE1YQBdx1bq9IlZinHa+HVffy+NmVRoKr+wHN8fpLE=
|
||||||
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd/go.mod h1:C8yoIfvESpM3GD07OCHU7fqI7lhwyZ2Td1rbNbTAhnc=
|
github.com/Max-Sum/base32768 v0.0.0-20230304063302-18e6ce5945fd/go.mod h1:C8yoIfvESpM3GD07OCHU7fqI7lhwyZ2Td1rbNbTAhnc=
|
||||||
github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700 h1:r3fp2/Ro+0RtpjNY0/wsbN7vRmCW//dXTOZDQTct25Q=
|
|
||||||
github.com/Mikubill/gofakes3 v0.0.3-0.20230622102024-284c0f988700/go.mod h1:OSXqXEGUe9CmPiwLMMnVrbXonMf4BeLBkBdLufxxiyY=
|
|
||||||
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
|
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
|
||||||
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
|
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
|
||||||
github.com/SheltonZhu/115driver v1.0.22 h1:Wp8pN7/gK3YwEO5P18ggbIOHM++lo9eP/pBhuvXfI6U=
|
github.com/SheltonZhu/115driver v1.0.22 h1:Wp8pN7/gK3YwEO5P18ggbIOHM++lo9eP/pBhuvXfI6U=
|
||||||
@ -25,6 +23,8 @@ github.com/abbot/go-http-auth v0.4.0 h1:QjmvZ5gSC7jm3Zg54DqWE/T5m1t2AfDu6QlXJT0E
|
|||||||
github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
|
github.com/abbot/go-http-auth v0.4.0/go.mod h1:Cz6ARTIzApMJDzh5bRMSUou6UMSp0IEXg9km/ci7TJM=
|
||||||
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
|
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
|
||||||
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
|
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
|
||||||
|
github.com/alist-org/gofakes3 v0.0.4 h1:/ID4+1llsiB8EweLcC65rVmgBZKL95e3P7Wa+aJGUiE=
|
||||||
|
github.com/alist-org/gofakes3 v0.0.4/go.mod h1:bLPZXt45XYMgaoGGLe5t0d1p13oZTQTptTEDLrku070=
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible h1:8psS8a+wKfiLt1iVDX79F7Y6wUM49Lcha2FMXt4UM8g=
|
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible h1:8psS8a+wKfiLt1iVDX79F7Y6wUM49Lcha2FMXt4UM8g=
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
|
github.com/aliyun/aliyun-oss-go-sdk v3.0.2+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
|
||||||
github.com/andreburgaud/crypt2go v1.2.0 h1:oly/ENAodeqTYpUafgd4r3v+VKLQnmOKUyfpj+TxHbE=
|
github.com/andreburgaud/crypt2go v1.2.0 h1:oly/ENAodeqTYpUafgd4r3v+VKLQnmOKUyfpj+TxHbE=
|
||||||
@ -123,6 +123,8 @@ github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK
|
|||||||
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 h1:HbphB4TFFXpv7MNrT52FGrrgVXF1owhMVTHFZIlnvd4=
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc=
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0/go.mod h1:DZGJHZMqrU4JJqFAWUS2UO1+lbSKsdiOoYi9Zzey7Fc=
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8 h1:OtSeLS5y0Uy01jaKK4mA/WVIYtpzVm63vLVAPzJXigg=
|
||||||
|
github.com/dhowden/tag v0.0.0-20240417053706-3d75831295e8/go.mod h1:apkPC/CR3s48O2D7Y++n1XWEpgPNNCjXYga3PPbJe2E=
|
||||||
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1ei82L+c=
|
||||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||||
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
|
github.com/djherbis/times v1.6.0 h1:w2ctJ92J8fBvWPxugmXIv7Nz7Q3iDMKNx9v5ocVH20c=
|
||||||
@ -131,8 +133,8 @@ github.com/dlclark/regexp2 v1.10.0 h1:+/GIL799phkJqYW+3YbOd8LCcbHzT0Pbo8zl70MHsq
|
|||||||
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
github.com/dlclark/regexp2 v1.10.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8=
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
|
||||||
github.com/foxxorcat/mopan-sdk-go v0.1.5 h1:N3LqOvk2aWWxszsFIkArP5udIv74uTei/bH2jM3tfSc=
|
github.com/foxxorcat/mopan-sdk-go v0.1.6 h1:6J37oI4wMZLj8EPgSCcSTTIbnI5D6RCNW/srX8vQd1Y=
|
||||||
github.com/foxxorcat/mopan-sdk-go v0.1.5/go.mod h1:iWHA2JFhzmKR28ySp1ON0g6DjLaYtvb5jhTqPVTDW9A=
|
github.com/foxxorcat/mopan-sdk-go v0.1.6/go.mod h1:UaY6D88yBXWGrcu/PcyLWyL4lzrk5pSxSABPHftOvxs=
|
||||||
github.com/foxxorcat/weiyun-sdk-go v0.1.3 h1:I5c5nfGErhq9DBumyjCVCggRA74jhgriMqRRFu5jeeY=
|
github.com/foxxorcat/weiyun-sdk-go v0.1.3 h1:I5c5nfGErhq9DBumyjCVCggRA74jhgriMqRRFu5jeeY=
|
||||||
github.com/foxxorcat/weiyun-sdk-go v0.1.3/go.mod h1:TPxzN0d2PahweUEHlOBWlwZSA+rELSUlGYMWgXRn9ps=
|
github.com/foxxorcat/weiyun-sdk-go v0.1.3/go.mod h1:TPxzN0d2PahweUEHlOBWlwZSA+rELSUlGYMWgXRn9ps=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
@ -182,6 +184,7 @@ github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGF
|
|||||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||||
|
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||||
github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw=
|
github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw=
|
||||||
@ -213,6 +216,7 @@ github.com/googleapis/enterprise-certificate-proxy v0.2.5 h1:UR4rDjcgpgEnqpIEvki
|
|||||||
github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w=
|
github.com/googleapis/enterprise-certificate-proxy v0.2.5/go.mod h1:RxW0N9901Cko1VOCW3SXCpWP+mlIEkk2tP7jnHy9a3w=
|
||||||
github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas=
|
github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas=
|
||||||
github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
|
github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
|
||||||
|
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
|
github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY=
|
||||||
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
|
github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY=
|
||||||
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
@ -220,6 +224,8 @@ github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY
|
|||||||
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
|
||||||
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
|
||||||
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
|
||||||
|
github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek=
|
||||||
|
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
|
||||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0 h1:b6hs9qKIql9eVXAiN0M2wSFY5xnhbHAQoCwRKbaRTZI=
|
github.com/hirochachacha/go-smb2 v1.1.0 h1:b6hs9qKIql9eVXAiN0M2wSFY5xnhbHAQoCwRKbaRTZI=
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0/go.mod h1:8F1A4d5EZzrGu5R7PU163UcMRDJQl4FtcxjBfsY8TZE=
|
github.com/hirochachacha/go-smb2 v1.1.0/go.mod h1:8F1A4d5EZzrGu5R7PU163UcMRDJQl4FtcxjBfsY8TZE=
|
||||||
@ -259,6 +265,7 @@ github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHm
|
|||||||
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004 h1:G+9t9cEtnC9jFiTxyptEKuNIAbiN5ZCQzX2a74lj3xg=
|
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004 h1:G+9t9cEtnC9jFiTxyptEKuNIAbiN5ZCQzX2a74lj3xg=
|
||||||
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004/go.mod h1:KmHnJWQrgEvbuy0vcvj00gtMqbvNn1L+3YUZLK/B92c=
|
github.com/jzelinskie/whirlpool v0.0.0-20201016144138-0675e54bb004/go.mod h1:KmHnJWQrgEvbuy0vcvj00gtMqbvNn1L+3YUZLK/B92c=
|
||||||
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
|
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
|
||||||
|
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
github.com/klauspost/compress v1.15.0/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||||
github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
github.com/klauspost/compress v1.15.6/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||||
@ -279,6 +286,8 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
|
github.com/larksuite/oapi-sdk-go/v3 v3.2.5 h1:MkmkfCHzvmi35EId9SeFPJMZ8bUsijnxwneAWHnnk0k=
|
||||||
|
github.com/larksuite/oapi-sdk-go/v3 v3.2.5/go.mod h1:ZEplY+kwuIrj/nqw5uSCINNATcH3KdxSN7y+UxYY5fI=
|
||||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
@ -481,6 +490,8 @@ github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25 h1:eDfebW/yfq9DtG9RO3K
|
|||||||
github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25/go.mod h1:fH4oNm5F9NfI5dLi0oIMtsLNKQOirUDbEMCIBb/7SU0=
|
github.com/xhofe/gsync v0.0.0-20230917091818-2111ceb38a25/go.mod h1:fH4oNm5F9NfI5dLi0oIMtsLNKQOirUDbEMCIBb/7SU0=
|
||||||
github.com/xhofe/tache v0.1.1 h1:O5QY4cVjIGELx3UGh6LbVAc18MWGXgRNQjMt72x6w/8=
|
github.com/xhofe/tache v0.1.1 h1:O5QY4cVjIGELx3UGh6LbVAc18MWGXgRNQjMt72x6w/8=
|
||||||
github.com/xhofe/tache v0.1.1/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
|
github.com/xhofe/tache v0.1.1/go.mod h1:iKumPFvywf30FRpAHHCt64G0JHLMzT0K+wyGedHsmTQ=
|
||||||
|
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
|
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
|
github.com/yusufpapurcu/wmi v1.2.3 h1:E1ctvB7uKFMOJw3fdOW32DwGE9I7t++CRUEMKvFoFiw=
|
||||||
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
github.com/yusufpapurcu/wmi v1.2.3/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||||
@ -495,6 +506,7 @@ golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
|||||||
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190513172903-22d7a77e9e5f/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200728195943-123391ffb6de/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||||
@ -512,10 +524,14 @@ golang.org/x/exp v0.0.0-20240222234643-814bf88cf225/go.mod h1:CxmFvTBINI24O/j8iY
|
|||||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
|
golang.org/x/image v0.15.0 h1:kOELfmgrmJlw4Cdb7g/QGuB3CvDrXbqEIww/pNtNBm8=
|
||||||
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
|
golang.org/x/image v0.15.0/go.mod h1:HUYqC05R2ZcZ3ejNQsIHQDQiwWM4JBqmm6MKANTp4LE=
|
||||||
|
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
|
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||||
@ -532,6 +548,8 @@ golang.org/x/oauth2 v0.16.0 h1:aDkGMBSYxElaoP81NpoUoz2oo2R2wHdZpGToUxfyQrQ=
|
|||||||
golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o=
|
golang.org/x/oauth2 v0.16.0/go.mod h1:hqZ+0LWXsiVoZpeld6jVt06P3adbS2Uu911W1SsJv2o=
|
||||||
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
|
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
golang.org/x/sync v0.6.0 h1:5BMeUDZ7vkXGfEr1x9B4bRcTH4lpkTkpdh0T/J+qjbQ=
|
||||||
@ -594,12 +612,16 @@ golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGm
|
|||||||
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20190829051458-42f498d34c4d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||||
|
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ=
|
golang.org/x/tools v0.18.0 h1:k8NLag8AGHnn+PHbl7g43CtqZAwG60vZkLqgyZgIHgQ=
|
||||||
golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg=
|
golang.org/x/tools v0.18.0/go.mod h1:GL7B4CwcLLeo59yx/9UWWuNOW1n3VZ4f5axWfML7Lcg=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw=
|
google.golang.org/api v0.134.0 h1:ktL4Goua+UBgoP1eL1/60LwZJqa1sIzkLmvoR3hR6Gw=
|
||||||
google.golang.org/api v0.134.0/go.mod h1:sjRL3UnjTx5UqNQS9EWr9N8p7xbHpy1k0XGRLCf3Spk=
|
google.golang.org/api v0.134.0/go.mod h1:sjRL3UnjTx5UqNQS9EWr9N8p7xbHpy1k0XGRLCf3Spk=
|
||||||
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package authn
|
package authn
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
@ -19,7 +20,7 @@ func NewAuthnInstance(r *http.Request) (*webauthn.WebAuthn, error) {
|
|||||||
RPDisplayName: setting.GetStr(conf.SiteTitle),
|
RPDisplayName: setting.GetStr(conf.SiteTitle),
|
||||||
RPID: siteUrl.Hostname(),
|
RPID: siteUrl.Hostname(),
|
||||||
//RPOrigin: siteUrl.String(),
|
//RPOrigin: siteUrl.String(),
|
||||||
RPOrigins: []string{siteUrl.String()},
|
RPOrigins: []string{fmt.Sprintf("%s://%s", siteUrl.Scheme, siteUrl.Host)},
|
||||||
// RPOrigin: "http://localhost:5173"
|
// RPOrigin: "http://localhost:5173"
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -21,8 +21,8 @@ func LoadStorages() {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
utils.Log.Errorf("failed get enabled storages: %+v", err)
|
utils.Log.Errorf("failed get enabled storages: %+v", err)
|
||||||
} else {
|
} else {
|
||||||
utils.Log.Infof("success load storage: [%s], driver: [%s]",
|
utils.Log.Infof("success load storage: [%s], driver: [%s], order: [%d]",
|
||||||
storages[i].MountPath, storages[i].Driver)
|
storages[i].MountPath, storages[i].Driver, storages[i].Order)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
conf.StoragesLoaded = true
|
conf.StoragesLoaded = true
|
||||||
|
@ -77,7 +77,7 @@ type Config struct {
|
|||||||
JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
|
JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
|
||||||
TokenExpiresIn int `json:"token_expires_in" env:"TOKEN_EXPIRES_IN"`
|
TokenExpiresIn int `json:"token_expires_in" env:"TOKEN_EXPIRES_IN"`
|
||||||
Database Database `json:"database" envPrefix:"DB_"`
|
Database Database `json:"database" envPrefix:"DB_"`
|
||||||
Meilisearch Meilisearch `json:"meilisearch" env:"MEILISEARCH"`
|
Meilisearch Meilisearch `json:"meilisearch" envPrefix:"MEILISEARCH_"`
|
||||||
Scheme Scheme `json:"scheme"`
|
Scheme Scheme `json:"scheme"`
|
||||||
TempDir string `json:"temp_dir" env:"TEMP_DIR"`
|
TempDir string `json:"temp_dir" env:"TEMP_DIR"`
|
||||||
BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
|
BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
|
||||||
|
@ -2,6 +2,7 @@ package db
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"sort"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
@ -65,5 +66,8 @@ func GetEnabledStorages() ([]model.Storage, error) {
|
|||||||
if err := db.Where(fmt.Sprintf("%s = ?", columnName("disabled")), false).Find(&storages).Error; err != nil {
|
if err := db.Where(fmt.Sprintf("%s = ?", columnName("disabled")), false).Find(&storages).Error; err != nil {
|
||||||
return nil, errors.WithStack(err)
|
return nil, errors.WithStack(err)
|
||||||
}
|
}
|
||||||
|
sort.Slice(storages, func(i, j int) bool {
|
||||||
|
return storages[i].Order < storages[j].Order
|
||||||
|
})
|
||||||
return storages, nil
|
return storages, nil
|
||||||
}
|
}
|
||||||
|
@ -12,6 +12,7 @@ type Config struct {
|
|||||||
CheckStatus bool `json:"-"`
|
CheckStatus bool `json:"-"`
|
||||||
Alert string `json:"alert"` //info,success,warning,danger
|
Alert string `json:"alert"` //info,success,warning,danger
|
||||||
NoOverwriteUpload bool `json:"-"` // whether to support overwrite upload
|
NoOverwriteUpload bool `json:"-"` // whether to support overwrite upload
|
||||||
|
ProxyRangeOption bool `json:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c Config) MustProxy() bool {
|
func (c Config) MustProxy() bool {
|
||||||
|
@ -3,6 +3,7 @@ package errs
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
pkgerr "github.com/pkg/errors"
|
pkgerr "github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -33,3 +34,6 @@ func IsNotFoundError(err error) bool {
|
|||||||
func IsNotSupportError(err error) bool {
|
func IsNotSupportError(err error) bool {
|
||||||
return errors.Is(pkgerr.Cause(err), NotSupport)
|
return errors.Is(pkgerr.Cause(err), NotSupport)
|
||||||
}
|
}
|
||||||
|
func IsNotImplement(err error) bool {
|
||||||
|
return errors.Is(pkgerr.Cause(err), NotImplement)
|
||||||
|
}
|
||||||
|
@ -41,6 +41,7 @@ type FileStreamer interface {
|
|||||||
GetMimetype() string
|
GetMimetype() string
|
||||||
//SetReader(io.Reader)
|
//SetReader(io.Reader)
|
||||||
NeedStore() bool
|
NeedStore() bool
|
||||||
|
IsForceStreamUpload() bool
|
||||||
GetExist() Obj
|
GetExist() Obj
|
||||||
SetExist(Obj)
|
SetExist(Obj)
|
||||||
//for a non-seekable Stream, RangeRead supports peeking some data, and CacheFullInTempFile still works
|
//for a non-seekable Stream, RangeRead supports peeking some data, and CacheFullInTempFile still works
|
||||||
|
@ -27,6 +27,7 @@ type Sort struct {
|
|||||||
type Proxy struct {
|
type Proxy struct {
|
||||||
WebProxy bool `json:"web_proxy"`
|
WebProxy bool `json:"web_proxy"`
|
||||||
WebdavPolicy string `json:"webdav_policy"`
|
WebdavPolicy string `json:"webdav_policy"`
|
||||||
|
ProxyRange bool `json:"proxy_range"`
|
||||||
DownProxyUrl string `json:"down_proxy_url"`
|
DownProxyUrl string `json:"down_proxy_url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -271,7 +272,7 @@ func (d *downloader) tryDownloadChunk(params *HttpRequestParams, ch *chunk) (int
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := io.Copy(ch.buf, resp.Body)
|
n, err := utils.CopyWithBuffer(ch.buf, resp.Body)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return n, &errReadingBody{err: err}
|
return n, &errReadingBody{err: err}
|
||||||
|
@ -162,7 +162,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
|
|||||||
pw.CloseWithError(err)
|
pw.CloseWithError(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if _, err := io.CopyN(part, reader, ra.Length); err != nil {
|
if _, err := utils.CopyWithBufferN(part, reader, ra.Length); err != nil {
|
||||||
pw.CloseWithError(err)
|
pw.CloseWithError(err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -182,7 +182,7 @@ func ServeHTTP(w http.ResponseWriter, r *http.Request, name string, modTime time
|
|||||||
w.WriteHeader(code)
|
w.WriteHeader(code)
|
||||||
|
|
||||||
if r.Method != "HEAD" {
|
if r.Method != "HEAD" {
|
||||||
written, err := io.CopyN(w, sendContent, sendSize)
|
written, err := utils.CopyWithBufferN(w, sendContent, sendSize)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Warnf("ServeHttp error. err: %s ", err)
|
log.Warnf("ServeHttp error. err: %s ", err)
|
||||||
if written != sendSize {
|
if written != sendSize {
|
||||||
|
@ -2,6 +2,7 @@ package net
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
"mime/multipart"
|
"mime/multipart"
|
||||||
@ -327,10 +328,10 @@ func GetRangedHttpReader(readCloser io.ReadCloser, offset, length int64) (io.Rea
|
|||||||
length_int = int(length)
|
length_int = int(length)
|
||||||
|
|
||||||
if offset > 100*1024*1024 {
|
if offset > 100*1024*1024 {
|
||||||
log.Warnf("offset is more than 100MB, if loading data from internet, high-latency and wasting of bandwith is expected")
|
log.Warnf("offset is more than 100MB, if loading data from internet, high-latency and wasting of bandwidth is expected")
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := io.Copy(io.Discard, io.LimitReader(readCloser, offset)); err != nil {
|
if _, err := utils.CopyWithBuffer(io.Discard, io.LimitReader(readCloser, offset)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,6 +93,17 @@ func getMainItems(config driver.Config) []driver.Item {
|
|||||||
Required: true,
|
Required: true,
|
||||||
},
|
},
|
||||||
}...)
|
}...)
|
||||||
|
if config.ProxyRangeOption {
|
||||||
|
item := driver.Item{
|
||||||
|
Name: "proxy_range",
|
||||||
|
Type: conf.TypeBool,
|
||||||
|
Help: "Need to enable proxy",
|
||||||
|
}
|
||||||
|
if config.Name == "139Yun" {
|
||||||
|
item.Default = "true"
|
||||||
|
}
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
items = append(items, driver.Item{
|
items = append(items, driver.Item{
|
||||||
Name: "webdav_policy",
|
Name: "webdav_policy",
|
||||||
|
@ -60,7 +60,6 @@ func TestGetStorageVirtualFilesByPath(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestGetBalancedStorage(t *testing.T) {
|
func TestGetBalancedStorage(t *testing.T) {
|
||||||
setupStorages(t)
|
|
||||||
set := mapset.NewSet[string]()
|
set := mapset.NewSet[string]()
|
||||||
for i := 0; i < 5; i++ {
|
for i := 0; i < 5; i++ {
|
||||||
storage := op.GetBalancedStorage("/a/d/e1")
|
storage := op.GetBalancedStorage("/a/d/e1")
|
||||||
|
@ -18,9 +18,10 @@ type FileStream struct {
|
|||||||
Ctx context.Context
|
Ctx context.Context
|
||||||
model.Obj
|
model.Obj
|
||||||
io.Reader
|
io.Reader
|
||||||
Mimetype string
|
Mimetype string
|
||||||
WebPutAsTask bool
|
WebPutAsTask bool
|
||||||
Exist model.Obj //the file existed in the destination, we can reuse some info since we wil overwrite it
|
ForceStreamUpload bool
|
||||||
|
Exist model.Obj //the file existed in the destination, we can reuse some info since we wil overwrite it
|
||||||
utils.Closers
|
utils.Closers
|
||||||
tmpFile *os.File //if present, tmpFile has full content, it will be deleted at last
|
tmpFile *os.File //if present, tmpFile has full content, it will be deleted at last
|
||||||
peekBuff *bytes.Reader
|
peekBuff *bytes.Reader
|
||||||
@ -43,6 +44,11 @@ func (f *FileStream) GetMimetype() string {
|
|||||||
func (f *FileStream) NeedStore() bool {
|
func (f *FileStream) NeedStore() bool {
|
||||||
return f.WebPutAsTask
|
return f.WebPutAsTask
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f *FileStream) IsForceStreamUpload() bool {
|
||||||
|
return f.ForceStreamUpload
|
||||||
|
}
|
||||||
|
|
||||||
func (f *FileStream) Close() error {
|
func (f *FileStream) Close() error {
|
||||||
var err1, err2 error
|
var err1, err2 error
|
||||||
err1 = f.Closers.Close()
|
err1 = f.Closers.Close()
|
||||||
@ -98,7 +104,7 @@ func (f *FileStream) RangeRead(httpRange http_range.Range) (io.Reader, error) {
|
|||||||
if httpRange.Start == 0 && httpRange.Length <= InMemoryBufMaxSizeBytes && f.peekBuff == nil {
|
if httpRange.Start == 0 && httpRange.Length <= InMemoryBufMaxSizeBytes && f.peekBuff == nil {
|
||||||
bufSize := utils.Min(httpRange.Length, f.GetSize())
|
bufSize := utils.Min(httpRange.Length, f.GetSize())
|
||||||
newBuf := bytes.NewBuffer(make([]byte, 0, bufSize))
|
newBuf := bytes.NewBuffer(make([]byte, 0, bufSize))
|
||||||
n, err := io.CopyN(newBuf, f.Reader, bufSize)
|
n, err := utils.CopyWithBufferN(newBuf, f.Reader, bufSize)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"encoding/xml"
|
"encoding/xml"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
@ -419,7 +420,7 @@ func (c *Client) ReadStreamRange(path string, offset, length int64) (io.ReadClos
|
|||||||
// stream in rs.Body
|
// stream in rs.Body
|
||||||
if rs.StatusCode == 200 {
|
if rs.StatusCode == 200 {
|
||||||
// discard first 'offset' bytes.
|
// discard first 'offset' bytes.
|
||||||
if _, err := io.Copy(io.Discard, io.LimitReader(rs.Body, offset)); err != nil {
|
if _, err := utils.CopyWithBuffer(io.Discard, io.LimitReader(rs.Body, offset)); err != nil {
|
||||||
return nil, newPathErrorErr("ReadStreamRange", path, err)
|
return nil, newPathErrorErr("ReadStreamRange", path, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -32,7 +32,7 @@ func CopyFile(src, dst string) error {
|
|||||||
}
|
}
|
||||||
defer dstfd.Close()
|
defer dstfd.Close()
|
||||||
|
|
||||||
if _, err = io.Copy(dstfd, srcfd); err != nil {
|
if _, err = CopyWithBuffer(dstfd, srcfd); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if srcinfo, err = os.Stat(src); err != nil {
|
if srcinfo, err = os.Stat(src); err != nil {
|
||||||
@ -121,7 +121,7 @@ func CreateTempFile(r io.Reader, size int64) (*os.File, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
readBytes, err := io.Copy(f, r)
|
readBytes, err := CopyWithBuffer(f, r)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
_ = os.Remove(f.Name())
|
_ = os.Remove(f.Name())
|
||||||
return nil, errs.NewErr(err, "CreateTempFile failed")
|
return nil, errs.NewErr(err, "CreateTempFile failed")
|
||||||
|
@ -96,7 +96,7 @@ func HashData(hashType *HashType, data []byte, params ...any) string {
|
|||||||
// HashReader get hash of one hashType from a reader
|
// HashReader get hash of one hashType from a reader
|
||||||
func HashReader(hashType *HashType, reader io.Reader, params ...any) (string, error) {
|
func HashReader(hashType *HashType, reader io.Reader, params ...any) (string, error) {
|
||||||
h := hashType.NewFunc(params...)
|
h := hashType.NewFunc(params...)
|
||||||
_, err := io.Copy(h, reader)
|
_, err := CopyWithBuffer(h, reader)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", errs.NewErr(err, "HashReader error")
|
return "", errs.NewErr(err, "HashReader error")
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
"io"
|
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -36,7 +35,7 @@ var hashTestSet = []hashTest{
|
|||||||
func TestMultiHasher(t *testing.T) {
|
func TestMultiHasher(t *testing.T) {
|
||||||
for _, test := range hashTestSet {
|
for _, test := range hashTestSet {
|
||||||
mh := NewMultiHasher([]*HashType{MD5, SHA1, SHA256})
|
mh := NewMultiHasher([]*HashType{MD5, SHA1, SHA256})
|
||||||
n, err := io.Copy(mh, bytes.NewBuffer(test.input))
|
n, err := CopyWithBuffer(mh, bytes.NewBuffer(test.input))
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Len(t, test.input, int(n))
|
assert.Len(t, test.input, int(n))
|
||||||
hashInfo := mh.GetHashInfo()
|
hashInfo := mh.GetHashInfo()
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"golang.org/x/exp/constraints"
|
"golang.org/x/exp/constraints"
|
||||||
@ -29,7 +30,7 @@ func CopyWithCtx(ctx context.Context, out io.Writer, in io.Reader, size int64, p
|
|||||||
// possible in the call process.
|
// possible in the call process.
|
||||||
var finish int64 = 0
|
var finish int64 = 0
|
||||||
s := size / 100
|
s := size / 100
|
||||||
_, err := io.Copy(out, readerFunc(func(p []byte) (int, error) {
|
_, err := CopyWithBuffer(out, readerFunc(func(p []byte) (int, error) {
|
||||||
// golang non-blocking channel: https://gobyexample.com/non-blocking-channel-operations
|
// golang non-blocking channel: https://gobyexample.com/non-blocking-channel-operations
|
||||||
select {
|
select {
|
||||||
// if context has been canceled
|
// if context has been canceled
|
||||||
@ -204,3 +205,31 @@ func Max[T constraints.Ordered](a, b T) T {
|
|||||||
}
|
}
|
||||||
return a
|
return a
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var IoBuffPool = &sync.Pool{
|
||||||
|
New: func() interface{} {
|
||||||
|
return make([]byte, 32*1024*2) // Two times of size in io package
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func CopyWithBuffer(dst io.Writer, src io.Reader) (written int64, err error) {
|
||||||
|
buff := IoBuffPool.Get().([]byte)
|
||||||
|
defer IoBuffPool.Put(buff)
|
||||||
|
written, err = io.CopyBuffer(dst, src, buff)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
return written, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func CopyWithBufferN(dst io.Writer, src io.Reader, n int64) (written int64, err error) {
|
||||||
|
written, err = CopyWithBuffer(dst, io.LimitReader(src, n))
|
||||||
|
if written == n {
|
||||||
|
return n, nil
|
||||||
|
}
|
||||||
|
if written < n && err == nil {
|
||||||
|
// src stopped early; must have been EOF.
|
||||||
|
err = io.EOF
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
@ -37,3 +37,28 @@ func NewDebounce2(interval time.Duration, f func()) func() {
|
|||||||
(*time.Timer)(timer).Reset(interval)
|
(*time.Timer)(timer).Reset(interval)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func NewThrottle(interval time.Duration) func(func()) {
|
||||||
|
var lastCall time.Time
|
||||||
|
|
||||||
|
return func(fn func()) {
|
||||||
|
now := time.Now()
|
||||||
|
if now.Sub(lastCall) < interval {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
time.AfterFunc(interval, fn)
|
||||||
|
lastCall = now
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewThrottle2(interval time.Duration, fn func()) func() {
|
||||||
|
var lastCall time.Time
|
||||||
|
return func() {
|
||||||
|
now := time.Now()
|
||||||
|
if now.Sub(lastCall) < interval {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
time.AfterFunc(interval, fn)
|
||||||
|
lastCall = now
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -9,8 +9,10 @@ import (
|
|||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/net"
|
"github.com/alist-org/alist/v3/internal/net"
|
||||||
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.Obj) error {
|
func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.Obj) error {
|
||||||
@ -82,3 +84,21 @@ func attachFileName(w http.ResponseWriter, file model.Obj) {
|
|||||||
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, fileName, url.PathEscape(fileName)))
|
w.Header().Set("Content-Disposition", fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, fileName, url.PathEscape(fileName)))
|
||||||
w.Header().Set("Content-Type", utils.GetMimeType(fileName))
|
w.Header().Set("Content-Type", utils.GetMimeType(fileName))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var NoProxyRange = &model.RangeReadCloser{}
|
||||||
|
|
||||||
|
func ProxyRange(link *model.Link, size int64) {
|
||||||
|
if link.MFile != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if link.RangeReadCloser == nil {
|
||||||
|
var rrc, err = stream.GetRangeReadCloserFromLink(size, link)
|
||||||
|
if err != nil {
|
||||||
|
log.Warnf("ProxyRange error: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
link.RangeReadCloser = rrc
|
||||||
|
} else if link.RangeReadCloser == NoProxyRange {
|
||||||
|
link.RangeReadCloser = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -106,6 +106,9 @@ func Proxy(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if storage.GetStorage().ProxyRange {
|
||||||
|
common.ProxyRange(link, file.GetSize())
|
||||||
|
}
|
||||||
err = common.Proxy(c.Writer, c.Request, link, file)
|
err = common.Proxy(c.Writer, c.Request, link, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
common.ErrorResp(c, err, 500, true)
|
common.ErrorResp(c, err, 500, true)
|
||||||
|
@ -398,7 +398,7 @@ func SSOLoginCallback(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
userID := utils.Json.Get(resp.Body(), idField).ToString()
|
userID := utils.Json.Get(resp.Body(), idField).ToString()
|
||||||
if utils.SliceContains([]string{"", "0"}, userID) {
|
if utils.SliceContains([]string{"", "0"}, userID) {
|
||||||
common.ErrorResp(c, errors.New("error occured"), 400)
|
common.ErrorResp(c, errors.New("error occurred"), 400)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if argument == "get_sso_id" {
|
if argument == "get_sso_id" {
|
||||||
|
@ -34,12 +34,6 @@ func S3(g *gin.RouterGroup) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func S3Server(g *gin.RouterGroup) {
|
func S3Server(g *gin.RouterGroup) {
|
||||||
if !conf.Conf.S3.Enable {
|
|
||||||
g.Any("/*path", func(c *gin.Context) {
|
|
||||||
common.ErrorStrResp(c, "S3 server is not enabled", 403)
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
h, _ := s3.NewServer(context.Background())
|
h, _ := s3.NewServer(context.Background())
|
||||||
g.Any("/*path", gin.WrapH(h))
|
g.Any("/*path", gin.WrapH(h))
|
||||||
}
|
}
|
||||||
|
@ -12,7 +12,6 @@ import (
|
|||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/Mikubill/gofakes3"
|
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
"github.com/alist-org/alist/v3/internal/fs"
|
"github.com/alist-org/alist/v3/internal/fs"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
@ -20,12 +19,13 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/stream"
|
"github.com/alist-org/alist/v3/internal/stream"
|
||||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/gofakes3"
|
||||||
"github.com/ncw/swift/v2"
|
"github.com/ncw/swift/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
emptyPrefix = &gofakes3.Prefix{}
|
emptyPrefix = &gofakes3.Prefix{}
|
||||||
timeFormat = "Mon, 2 Jan 2006 15:04:05.999999999 GMT"
|
timeFormat = "Mon, 2 Jan 2006 15:04:05 GMT"
|
||||||
)
|
)
|
||||||
|
|
||||||
// s3Backend implements the gofacess3.Backend interface to make an S3
|
// s3Backend implements the gofacess3.Backend interface to make an S3
|
||||||
@ -42,13 +42,12 @@ func newBackend() gofakes3.Backend {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ListBuckets always returns the default bucket.
|
// ListBuckets always returns the default bucket.
|
||||||
func (b *s3Backend) ListBuckets() ([]gofakes3.BucketInfo, error) {
|
func (b *s3Backend) ListBuckets(ctx context.Context) ([]gofakes3.BucketInfo, error) {
|
||||||
buckets, err := getAndParseBuckets()
|
buckets, err := getAndParseBuckets()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var response []gofakes3.BucketInfo
|
var response []gofakes3.BucketInfo
|
||||||
ctx := context.Background()
|
|
||||||
for _, b := range buckets {
|
for _, b := range buckets {
|
||||||
node, _ := fs.Get(ctx, b.Path, &fs.GetArgs{})
|
node, _ := fs.Get(ctx, b.Path, &fs.GetArgs{})
|
||||||
response = append(response, gofakes3.BucketInfo{
|
response = append(response, gofakes3.BucketInfo{
|
||||||
@ -61,7 +60,7 @@ func (b *s3Backend) ListBuckets() ([]gofakes3.BucketInfo, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ListBucket lists the objects in the given bucket.
|
// ListBucket lists the objects in the given bucket.
|
||||||
func (b *s3Backend) ListBucket(bucketName string, prefix *gofakes3.Prefix, page gofakes3.ListBucketPage) (*gofakes3.ObjectList, error) {
|
func (b *s3Backend) ListBucket(ctx context.Context, bucketName string, prefix *gofakes3.Prefix, page gofakes3.ListBucketPage) (*gofakes3.ObjectList, error) {
|
||||||
bucket, err := getBucketByName(bucketName)
|
bucket, err := getBucketByName(bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -97,8 +96,7 @@ func (b *s3Backend) ListBucket(bucketName string, prefix *gofakes3.Prefix, page
|
|||||||
// HeadObject returns the fileinfo for the given object name.
|
// HeadObject returns the fileinfo for the given object name.
|
||||||
//
|
//
|
||||||
// Note that the metadata is not supported yet.
|
// Note that the metadata is not supported yet.
|
||||||
func (b *s3Backend) HeadObject(bucketName, objectName string) (*gofakes3.Object, error) {
|
func (b *s3Backend) HeadObject(ctx context.Context, bucketName, objectName string) (*gofakes3.Object, error) {
|
||||||
ctx := context.Background()
|
|
||||||
bucket, err := getBucketByName(bucketName)
|
bucket, err := getBucketByName(bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -141,8 +139,7 @@ func (b *s3Backend) HeadObject(bucketName, objectName string) (*gofakes3.Object,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// GetObject fetchs the object from the filesystem.
|
// GetObject fetchs the object from the filesystem.
|
||||||
func (b *s3Backend) GetObject(bucketName, objectName string, rangeRequest *gofakes3.ObjectRangeRequest) (obj *gofakes3.Object, err error) {
|
func (b *s3Backend) GetObject(ctx context.Context, bucketName, objectName string, rangeRequest *gofakes3.ObjectRangeRequest) (obj *gofakes3.Object, err error) {
|
||||||
ctx := context.Background()
|
|
||||||
bucket, err := getBucketByName(bucketName)
|
bucket, err := getBucketByName(bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -251,18 +248,17 @@ func (b *s3Backend) GetObject(bucketName, objectName string, rangeRequest *gofak
|
|||||||
}
|
}
|
||||||
|
|
||||||
// TouchObject creates or updates meta on specified object.
|
// TouchObject creates or updates meta on specified object.
|
||||||
func (b *s3Backend) TouchObject(fp string, meta map[string]string) (result gofakes3.PutObjectResult, err error) {
|
func (b *s3Backend) TouchObject(ctx context.Context, fp string, meta map[string]string) (result gofakes3.PutObjectResult, err error) {
|
||||||
//TODO: implement
|
//TODO: implement
|
||||||
return result, gofakes3.ErrNotImplemented
|
return result, gofakes3.ErrNotImplemented
|
||||||
}
|
}
|
||||||
|
|
||||||
// PutObject creates or overwrites the object with the given name.
|
// PutObject creates or overwrites the object with the given name.
|
||||||
func (b *s3Backend) PutObject(
|
func (b *s3Backend) PutObject(
|
||||||
bucketName, objectName string,
|
ctx context.Context, bucketName, objectName string,
|
||||||
meta map[string]string,
|
meta map[string]string,
|
||||||
input io.Reader, size int64,
|
input io.Reader, size int64,
|
||||||
) (result gofakes3.PutObjectResult, err error) {
|
) (result gofakes3.PutObjectResult, err error) {
|
||||||
ctx := context.Background()
|
|
||||||
bucket, err := getBucketByName(bucketName)
|
bucket, err := getBucketByName(bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return result, err
|
return result, err
|
||||||
@ -316,9 +312,9 @@ func (b *s3Backend) PutObject(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// DeleteMulti deletes multiple objects in a single request.
|
// DeleteMulti deletes multiple objects in a single request.
|
||||||
func (b *s3Backend) DeleteMulti(bucketName string, objects ...string) (result gofakes3.MultiDeleteResult, rerr error) {
|
func (b *s3Backend) DeleteMulti(ctx context.Context, bucketName string, objects ...string) (result gofakes3.MultiDeleteResult, rerr error) {
|
||||||
for _, object := range objects {
|
for _, object := range objects {
|
||||||
if err := b.deleteObject(bucketName, object); err != nil {
|
if err := b.deleteObject(ctx, bucketName, object); err != nil {
|
||||||
utils.Log.Errorf("serve s3", "delete object failed: %v", err)
|
utils.Log.Errorf("serve s3", "delete object failed: %v", err)
|
||||||
result.Error = append(result.Error, gofakes3.ErrorResult{
|
result.Error = append(result.Error, gofakes3.ErrorResult{
|
||||||
Code: gofakes3.ErrInternal,
|
Code: gofakes3.ErrInternal,
|
||||||
@ -336,13 +332,12 @@ func (b *s3Backend) DeleteMulti(bucketName string, objects ...string) (result go
|
|||||||
}
|
}
|
||||||
|
|
||||||
// DeleteObject deletes the object with the given name.
|
// DeleteObject deletes the object with the given name.
|
||||||
func (b *s3Backend) DeleteObject(bucketName, objectName string) (result gofakes3.ObjectDeleteResult, rerr error) {
|
func (b *s3Backend) DeleteObject(ctx context.Context, bucketName, objectName string) (result gofakes3.ObjectDeleteResult, rerr error) {
|
||||||
return result, b.deleteObject(bucketName, objectName)
|
return result, b.deleteObject(ctx, bucketName, objectName)
|
||||||
}
|
}
|
||||||
|
|
||||||
// deleteObject deletes the object from the filesystem.
|
// deleteObject deletes the object from the filesystem.
|
||||||
func (b *s3Backend) deleteObject(bucketName, objectName string) error {
|
func (b *s3Backend) deleteObject(ctx context.Context, bucketName, objectName string) error {
|
||||||
ctx := context.Background()
|
|
||||||
bucket, err := getBucketByName(bucketName)
|
bucket, err := getBucketByName(bucketName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -362,17 +357,17 @@ func (b *s3Backend) deleteObject(bucketName, objectName string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CreateBucket creates a new bucket.
|
// CreateBucket creates a new bucket.
|
||||||
func (b *s3Backend) CreateBucket(name string) error {
|
func (b *s3Backend) CreateBucket(ctx context.Context, name string) error {
|
||||||
return gofakes3.ErrNotImplemented
|
return gofakes3.ErrNotImplemented
|
||||||
}
|
}
|
||||||
|
|
||||||
// DeleteBucket deletes the bucket with the given name.
|
// DeleteBucket deletes the bucket with the given name.
|
||||||
func (b *s3Backend) DeleteBucket(name string) error {
|
func (b *s3Backend) DeleteBucket(ctx context.Context, name string) error {
|
||||||
return gofakes3.ErrNotImplemented
|
return gofakes3.ErrNotImplemented
|
||||||
}
|
}
|
||||||
|
|
||||||
// BucketExists checks if the bucket exists.
|
// BucketExists checks if the bucket exists.
|
||||||
func (b *s3Backend) BucketExists(name string) (exists bool, err error) {
|
func (b *s3Backend) BucketExists(ctx context.Context, name string) (exists bool, err error) {
|
||||||
buckets, err := getAndParseBuckets()
|
buckets, err := getAndParseBuckets()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
@ -386,13 +381,12 @@ func (b *s3Backend) BucketExists(name string) (exists bool, err error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CopyObject copy specified object from srcKey to dstKey.
|
// CopyObject copy specified object from srcKey to dstKey.
|
||||||
func (b *s3Backend) CopyObject(srcBucket, srcKey, dstBucket, dstKey string, meta map[string]string) (result gofakes3.CopyObjectResult, err error) {
|
func (b *s3Backend) CopyObject(ctx context.Context, srcBucket, srcKey, dstBucket, dstKey string, meta map[string]string) (result gofakes3.CopyObjectResult, err error) {
|
||||||
if srcBucket == dstBucket && srcKey == dstKey {
|
if srcBucket == dstBucket && srcKey == dstKey {
|
||||||
//TODO: update meta
|
//TODO: update meta
|
||||||
return result, nil
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx := context.Background()
|
|
||||||
srcB, err := getBucketByName(srcBucket)
|
srcB, err := getBucketByName(srcBucket)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return result, err
|
return result, err
|
||||||
@ -403,7 +397,7 @@ func (b *s3Backend) CopyObject(srcBucket, srcKey, dstBucket, dstKey string, meta
|
|||||||
fmeta, _ := op.GetNearestMeta(srcFp)
|
fmeta, _ := op.GetNearestMeta(srcFp)
|
||||||
srcNode, err := fs.Get(context.WithValue(ctx, "meta", fmeta), srcFp, &fs.GetArgs{})
|
srcNode, err := fs.Get(context.WithValue(ctx, "meta", fmeta), srcFp, &fs.GetArgs{})
|
||||||
|
|
||||||
c, err := b.GetObject(srcBucket, srcKey, nil)
|
c, err := b.GetObject(ctx, srcBucket, srcKey, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -420,7 +414,7 @@ func (b *s3Backend) CopyObject(srcBucket, srcKey, dstBucket, dstKey string, meta
|
|||||||
meta["mtime"] = swift.TimeToFloatString(srcNode.ModTime())
|
meta["mtime"] = swift.TimeToFloatString(srcNode.ModTime())
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = b.PutObject(dstBucket, dstKey, meta, c.Contents, c.Size)
|
_, err = b.PutObject(ctx, dstBucket, dstKey, meta, c.Contents, c.Size)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user