Compare commits
39 Commits
feat/dropb
...
v3.22.1
Author | SHA1 | Date | |
---|---|---|---|
afcfbf02ea | |||
cad04e07dd | |||
30f732138c | |||
04034bd03b | |||
6ec9a8d4c7 | |||
3f7882b467 | |||
a4511c1963 | |||
9d1f122717 | |||
5dd73d80d8 | |||
fce872bc1b | |||
df6c4c80c2 | |||
d2ff040cf8 | |||
a31af209cc | |||
3f8b3da52b | |||
6887f14ec6 | |||
3e0de5eaac | |||
61101a60f4 | |||
3529023bf9 | |||
d1d1a089a4 | |||
fa66358b1e | |||
2b533e4b91 | |||
d3530a8d80 | |||
6052eb3512 | |||
d17f7f7cad | |||
8bdc67ec3d | |||
4fabc27366 | |||
e4c7b0f17c | |||
5e8bfb017e | |||
7d20a01dba | |||
59dbf4496f | |||
12f40608e6 | |||
89832c296f | |||
f09bb88846 | |||
c518f59528 | |||
e9c74f9959 | |||
21b8e7f6e5 | |||
2ae9cd8634 | |||
cfee536b96 | |||
1c8fe3b24c |
@ -73,6 +73,7 @@ English | [中文](./README_cn.md) | [Contributing](./CONTRIBUTING.md) | [CODE_O
|
||||
- [x] SMB
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] Easy to deploy and out-of-the-box
|
||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||
- [x] Image preview in gallery mode
|
||||
|
@ -72,6 +72,7 @@
|
||||
- [x] SMB
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] 部署方便,开箱即用
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||
- [x] 画廊模式下的图像预览
|
||||
|
@ -3,9 +3,11 @@ package cmd
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strconv"
|
||||
"sync"
|
||||
"syscall"
|
||||
"time"
|
||||
@ -42,26 +44,50 @@ the address is defined in config file`,
|
||||
r := gin.New()
|
||||
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||
server.Init(r)
|
||||
var httpSrv, httpsSrv *http.Server
|
||||
if !conf.Conf.Scheme.DisableHttp {
|
||||
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
|
||||
var httpSrv, httpsSrv, unixSrv *http.Server
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpPort)
|
||||
utils.Log.Infof("start HTTP server @ %s", httpBase)
|
||||
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
||||
go func() {
|
||||
err := httpSrv.ListenAndServe()
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start: %s", err.Error())
|
||||
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.Https {
|
||||
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.HttpsPort)
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpsPort)
|
||||
utils.Log.Infof("start HTTPS server @ %s", httpsBase)
|
||||
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||
go func() {
|
||||
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start: %s", err.Error())
|
||||
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
utils.Log.Infof("start unix server @ %s", conf.Conf.Scheme.UnixFile)
|
||||
unixSrv = &http.Server{Handler: r}
|
||||
go func() {
|
||||
listener, err := net.Listen("unix", conf.Conf.Scheme.UnixFile)
|
||||
if err != nil {
|
||||
utils.Log.Fatalf("failed to listen unix: %+v", err)
|
||||
}
|
||||
// set socket file permission
|
||||
mode, err := strconv.ParseUint(conf.Conf.Scheme.UnixFilePerm, 8, 32)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to parse socket file permission: %+v", err)
|
||||
} else {
|
||||
err = os.Chmod(conf.Conf.Scheme.UnixFile, os.FileMode(mode))
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to chmod socket file: %+v", err)
|
||||
}
|
||||
}
|
||||
err = unixSrv.Serve(listener)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
}
|
||||
@ -78,21 +104,30 @@ the address is defined in config file`,
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||
defer cancel()
|
||||
var wg sync.WaitGroup
|
||||
if !conf.Conf.Scheme.DisableHttp {
|
||||
if conf.Conf.Scheme.HttpPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTP server shutdown:", err)
|
||||
utils.Log.Fatal("HTTP server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.Https {
|
||||
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := httpsSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("HTTPS server shutdown:", err)
|
||||
utils.Log.Fatal("HTTPS server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
if conf.Conf.Scheme.UnixFile != "" {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
if err := unixSrv.Shutdown(ctx); err != nil {
|
||||
utils.Log.Fatal("Unix server shutdown err: ", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
@ -1,11 +1,9 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
@ -45,6 +43,9 @@ func (d *Pan123) Init(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{})
|
||||
}, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -98,7 +99,7 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
}
|
||||
u_ := u.String()
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().Get(u_)
|
||||
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -109,9 +110,12 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() == 200 {
|
||||
} else if res.StatusCode() < 300 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
link.Header = http.Header{
|
||||
"Referer": []string{"https://www.123pan.com/"},
|
||||
}
|
||||
return &link, nil
|
||||
} else {
|
||||
return nil, fmt.Errorf("can't convert obj")
|
||||
@ -177,24 +181,9 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
const DEFAULT int64 = 10485760
|
||||
var uploadFile io.Reader
|
||||
// const DEFAULT int64 = 10485760
|
||||
h := md5.New()
|
||||
if d.StreamUpload && stream.GetSize() > DEFAULT {
|
||||
// 只计算前10MIB
|
||||
buf := bytes.NewBuffer(make([]byte, 0, DEFAULT))
|
||||
if n, err := io.CopyN(io.MultiWriter(buf, h), stream, DEFAULT); err != io.EOF && n == 0 {
|
||||
return err
|
||||
}
|
||||
// 增加额外参数防止MD5碰撞
|
||||
h.Write([]byte(stream.GetName()))
|
||||
num := make([]byte, 8)
|
||||
binary.BigEndian.PutUint64(num, uint64(stream.GetSize()))
|
||||
h.Write(num)
|
||||
// 拼装
|
||||
uploadFile = io.MultiReader(buf, stream)
|
||||
} else {
|
||||
// 计算完整文件MD5
|
||||
// need to calculate md5 of the full content
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||
if err != nil {
|
||||
return err
|
||||
@ -210,8 +199,6 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadFile = tempFile
|
||||
}
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
data := base.Json{
|
||||
"driveId": 0,
|
||||
@ -234,7 +221,8 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return nil
|
||||
}
|
||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||
err = d.newUpload(ctx, &resp, stream, uploadFile, up)
|
||||
err = d.newUpload(ctx, &resp, stream, tempFile, up)
|
||||
return err
|
||||
} else {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
@ -250,7 +238,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: uploadFile,
|
||||
Body: tempFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ type Addition struct {
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
StreamUpload bool `json:"stream_upload"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,10 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -42,7 +45,30 @@ func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
|
@ -34,14 +34,17 @@ func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, star
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp) error {
|
||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.FileStreamer, isMultipart bool) error {
|
||||
data := base.Json{
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"fileId": upReq.Data.FileId,
|
||||
"fileSize": file.GetSize(),
|
||||
"isMultipart": isMultipart,
|
||||
"key": upReq.Data.Key,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
_, err := d.request(S3Complete, http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
@ -83,7 +86,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
||||
}
|
||||
}
|
||||
// complete s3 upload
|
||||
return d.completeS3(ctx, upReq)
|
||||
return d.completeS3(ctx, upReq, file, chunkCount > 1)
|
||||
}
|
||||
|
||||
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool) error {
|
||||
|
@ -15,19 +15,24 @@ import (
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
const (
|
||||
API = "https://www.123pan.com/b/api"
|
||||
SignIn = API + "/user/sign_in"
|
||||
UserInfo = API + "/user/info"
|
||||
FileList = API + "/file/list/new"
|
||||
DownloadInfo = "https://www.123pan.com/a/api/file/download_info"
|
||||
Mkdir = API + "/file/upload_request"
|
||||
Move = API + "/file/mod_pid"
|
||||
Rename = API + "/file/rename"
|
||||
Trash = API + "/file/trash"
|
||||
UploadRequest = API + "/file/upload_request"
|
||||
UploadComplete = API + "/file/upload_complete"
|
||||
S3PreSignedUrls = API + "/file/s3_repare_upload_parts_batch"
|
||||
S3Complete = API + "/file/s3_complete_multipart_upload"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = AApi
|
||||
SignIn = MainApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
FileList = MainApi + "/file/list/new"
|
||||
DownloadInfo = MainApi + "/file/download_info"
|
||||
Mkdir = MainApi + "/file/upload_request"
|
||||
Move = MainApi + "/file/mod_pid"
|
||||
Rename = MainApi + "/file/rename"
|
||||
Trash = MainApi + "/file/trash"
|
||||
UploadRequest = MainApi + "/file/upload_request"
|
||||
UploadComplete = MainApi + "/file/upload_complete"
|
||||
S3PreSignedUrls = MainApi + "/file/s3_repare_upload_parts_batch"
|
||||
S3Auth = MainApi + "/file/s3_upload_object/auth"
|
||||
UploadCompleteV2 = MainApi + "/file/upload_complete/v2"
|
||||
S3Complete = MainApi + "/file/s3_complete_multipart_upload"
|
||||
)
|
||||
|
||||
func (d *Pan123) login() error {
|
||||
@ -42,9 +47,17 @@ func (d *Pan123) login() error {
|
||||
body = base.Json{
|
||||
"passport": d.Username,
|
||||
"password": d.Password,
|
||||
"remember": true,
|
||||
}
|
||||
}
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
"user-agent": base.UserAgent,
|
||||
}).
|
||||
SetBody(body).Post(SignIn)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -61,9 +74,11 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"platform": "web",
|
||||
"app-version": "1.2",
|
||||
"app-version": "3",
|
||||
"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
|
@ -42,8 +42,8 @@ func calSign(body, ts, randStr string) string {
|
||||
sort.Strings(strs)
|
||||
body = strings.Join(strs, "")
|
||||
body = base64.StdEncoding.EncodeToString([]byte(body))
|
||||
res := utils.GetMD5Encode(body) + utils.GetMD5Encode(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5Encode(res))
|
||||
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
|
||||
return res
|
||||
}
|
||||
|
||||
|
@ -385,7 +385,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
|
||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||
sliceMd5 := fileMd5
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5 = utils.GetMD5Encode(strings.Join(md5s, "\n"))
|
||||
sliceMd5 = utils.GetMD5EncodeStr(strings.Join(md5s, "\n"))
|
||||
}
|
||||
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
|
||||
"uploadFileId": uploadFileId,
|
||||
|
@ -51,7 +51,7 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
}
|
||||
|
||||
// 避免重复登陆
|
||||
identity := utils.GetMD5Encode(y.Username + y.Password)
|
||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||
if !y.isLogin() || y.identity != identity {
|
||||
y.identity = identity
|
||||
if err = y.login(); err != nil {
|
||||
|
@ -519,7 +519,7 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 提交上传
|
||||
@ -577,7 +577,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 检测是否支持快传
|
||||
|
@ -67,7 +67,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
// init deviceID
|
||||
deviceID := utils.GetSHA256Encode(d.UserID)
|
||||
deviceID := utils.GetSHA256Encode([]byte(d.UserID))
|
||||
// init privateKey
|
||||
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
||||
state := State{
|
||||
@ -193,7 +193,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
if d.RapidUpload {
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
io.CopyN(buf, file, 1024)
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.String())
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
if localFile != nil {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
@ -259,7 +259,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
||||
*/
|
||||
buf := make([]byte, 8)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5Encode(d.AccessToken)[:16], 16)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5EncodeStr(d.AccessToken)[:16], 16)
|
||||
i := new(big.Int).SetInt64(file.GetSize())
|
||||
o := new(big.Int).SetInt64(0)
|
||||
if file.GetSize() > 0 {
|
||||
|
@ -2,8 +2,7 @@ package aliyundrive_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"math"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
@ -50,6 +49,9 @@ func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.limitList == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
files, err := d.getFiles(ctx, dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -79,6 +81,9 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if d.limitLink == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitLink(ctx, file)
|
||||
}
|
||||
|
||||
@ -146,59 +151,7 @@ func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// rapid_upload is not currently supported
|
||||
// 1. create
|
||||
const DEFAULT int64 = 20971520
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := 1
|
||||
if stream.GetSize() > DEFAULT {
|
||||
count = int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err = d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, DEFAULT)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err = d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
return d.upload(ctx, dstDir, stream, up)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
|
@ -14,6 +14,7 @@ type Addition struct {
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||
RapidUpload bool `json:"rapid_upload" help:"If you enable this option, the file will be uploaded to the server first, so the progress will be incorrect"`
|
||||
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
||||
AccessToken string
|
||||
}
|
||||
|
268
drivers/aliyundrive_open/upload.go
Normal file
268
drivers/aliyundrive_open/upload.go
Normal file
@ -0,0 +1,268 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func calPartSize(fileSize int64) int64 {
|
||||
var partSize int64 = 20 * 1024 * 1024
|
||||
if fileSize > partSize {
|
||||
if fileSize > 1*1024*1024*1024*1024 { // file Size over 1TB
|
||||
partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
|
||||
} else if fileSize > 768*1024*1024*1024 { // over 768GB
|
||||
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||
} else if fileSize > 512*1024*1024*1024 { // over 512GB
|
||||
partSize = 82463373 // ≈ 78.6432MB
|
||||
} else if fileSize > 384*1024*1024*1024 { // over 384GB
|
||||
partSize = 54975582 // ≈ 52.4288MB
|
||||
} else if fileSize > 256*1024*1024*1024 { // over 256GB
|
||||
partSize = 41231687 // ≈ 39.3216MB
|
||||
} else if fileSize > 128*1024*1024*1024 { // over 128GB
|
||||
partSize = 27487791 // ≈ 26.2144MB
|
||||
}
|
||||
}
|
||||
return partSize
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) normalUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, createResp CreateResp, count int, partSize int64) error {
|
||||
log.Debugf("[aliyundive_open] normal upload")
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err := d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
type ProofRange struct {
|
||||
Start int64
|
||||
End int64
|
||||
}
|
||||
|
||||
func getProofRange(input string, size int64) (*ProofRange, error) {
|
||||
if size == 0 {
|
||||
return &ProofRange{}, nil
|
||||
}
|
||||
tmpStr := utils.GetMD5EncodeStr(input)[0:16]
|
||||
tmpInt, err := strconv.ParseUint(tmpStr, 16, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
index := tmpInt % uint64(size)
|
||||
pr := &ProofRange{
|
||||
Start: int64(index),
|
||||
End: int64(index) + 8,
|
||||
}
|
||||
if pr.End >= size {
|
||||
pr.End = size
|
||||
}
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) calProofCode(file *os.File, fileSize int64) (string, error) {
|
||||
proofRange, err := getProofRange(d.AccessToken, fileSize)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
buf := make([]byte, proofRange.End-proofRange.Start)
|
||||
_, err = file.ReadAt(buf, proofRange.Start)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(buf), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// 1. create
|
||||
// Part Size Unit: Bytes, Default: 20MB,
|
||||
// Maximum number of slices 10,000, ≈195.3125GB
|
||||
var partSize = calPartSize(stream.GetSize())
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
// rapid upload
|
||||
rapidUpload := stream.GetSize() > 100*1024 && d.RapidUpload
|
||||
if rapidUpload {
|
||||
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||
// read 1024 bytes to calculate pre hash
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
_, err := io.CopyN(buf, stream, 1024)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createData["size"] = stream.GetSize()
|
||||
createData["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
// if support seek, seek to start
|
||||
if localFile, ok := stream.(io.Seeker); ok {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Put spliced head back to stream
|
||||
stream.SetReadCloser(struct {
|
||||
io.Reader
|
||||
io.Closer
|
||||
}{
|
||||
Reader: io.MultiReader(buf, stream.GetReadCloser()),
|
||||
Closer: stream.GetReadCloser(),
|
||||
})
|
||||
}
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
if e.Code != "PreHashMatched" || !rapidUpload {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
|
||||
// convert to local file
|
||||
file, err := utils.CreateTempFile(stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = stream.GetReadCloser().Close()
|
||||
stream.SetReadCloser(file)
|
||||
// calculate full hash
|
||||
h := sha1.New()
|
||||
_, err = io.Copy(h, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
delete(createData, "pre_hash")
|
||||
createData["proof_version"] = "v1"
|
||||
createData["content_hash_name"] = "sha1"
|
||||
createData["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||
// seek to start
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
createData["proof_code"], err = d.calProofCode(file, stream.GetSize())
|
||||
if err != nil {
|
||||
return fmt.Errorf("cal proof code error: %s", err.Error())
|
||||
}
|
||||
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if createResp.RapidUpload {
|
||||
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||
return nil
|
||||
}
|
||||
// failed to rapid upload, try normal upload
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
|
||||
return d.normalUpload(ctx, stream, up, createResp, count, partSize)
|
||||
}
|
@ -5,7 +5,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
@ -48,6 +47,11 @@ func (d *AliyundriveOpen) refreshToken() error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
b, err, _ := d.requestReturnErrResp(uri, method, callback, retry...)
|
||||
return b, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||
req := base.RestyClient.R()
|
||||
// TODO check whether access_token is expired
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
@ -61,20 +65,20 @@ func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback,
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, err, nil
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if e.Code != "" {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, err, nil
|
||||
}
|
||||
return d.request(uri, method, callback, true)
|
||||
return d.requestReturnErrResp(uri, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message)
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message), &e
|
||||
}
|
||||
return res.Body(), nil
|
||||
return res.Body(), nil, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) list(ctx context.Context, data base.Json) (*Files, error) {
|
||||
@ -117,59 +121,3 @@ func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File,
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ package aliyundrive_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
@ -22,6 +23,9 @@ type AliyundriveShare struct {
|
||||
ShareToken string
|
||||
DriveId string
|
||||
cron *cron.Cron
|
||||
|
||||
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
|
||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Config() driver.Config {
|
||||
@ -48,6 +52,8 @@ func (d *AliyundriveShare) Init(ctx context.Context) error {
|
||||
log.Errorf("%+v", err)
|
||||
}
|
||||
})
|
||||
d.limitList = utils.LimitRateCtx(d.list, time.Second/4)
|
||||
d.limitLink = utils.LimitRateCtx(d.link, time.Second)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -60,6 +66,13 @@ func (d *AliyundriveShare) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.limitList == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitList(ctx, dir)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -70,6 +83,13 @@ func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.L
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if d.limitLink == nil {
|
||||
return nil, fmt.Errorf("driver not init")
|
||||
}
|
||||
return d.limitLink(ctx, file)
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": file.GetID(),
|
||||
@ -79,7 +99,7 @@ func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.
|
||||
}
|
||||
var resp ShareLinkResp
|
||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -9,6 +9,12 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const (
|
||||
// CanaryHeaderKey CanaryHeaderValue for lifting rate limit restrictions
|
||||
CanaryHeaderKey = "X-Canary"
|
||||
CanaryHeaderValue = "client=web,app=share,version=v2.3.1"
|
||||
)
|
||||
|
||||
func (d *AliyundriveShare) refreshToken() error {
|
||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||
var resp base.TokenResp
|
||||
@ -58,6 +64,7 @@ func (d *AliyundriveShare) request(url, method string, callback base.ReqCallback
|
||||
SetError(&e).
|
||||
SetHeader("content-type", "application/json").
|
||||
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||
SetHeader("x-share-token", d.ShareToken)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
@ -107,6 +114,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
||||
var resp ListResp
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeader("x-share-token", d.ShareToken).
|
||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||
SetResult(&resp).SetError(&e).SetBody(data).
|
||||
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
||||
if err != nil {
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||
@ -24,6 +25,7 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||
|
222
drivers/dropbox/driver.go
Normal file
222
drivers/dropbox/driver.go
Normal file
@ -0,0 +1,222 @@
|
||||
package dropbox
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type Dropbox struct {
|
||||
model.Storage
|
||||
Addition
|
||||
base string
|
||||
contentBase string
|
||||
}
|
||||
|
||||
func (d *Dropbox) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Dropbox) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Dropbox) Init(ctx context.Context) error {
|
||||
query := "foo"
|
||||
res, err := d.request("/2/check/user", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"query": query,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
result := utils.Json.Get(res, "result").ToString()
|
||||
if result != query {
|
||||
return fmt.Errorf("failed to check user: %s", string(res))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(ctx, dir.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Dropbox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
res, err := d.request("/2/files/get_temporary_link", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"path": file.GetPath(),
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
url := utils.Json.Get(res, "link").ToString()
|
||||
exp := time.Hour
|
||||
return &model.Link{
|
||||
URL: url,
|
||||
Expiration: &exp,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
_, err := d.request("/2/files/create_folder_v2", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"autorename": false,
|
||||
"path": parentDir.GetPath() + "/" + dirName,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
toPath := dstDir.GetPath() + "/" + srcObj.GetName()
|
||||
|
||||
_, err := d.request("/2/files/move_v2", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"allow_ownership_transfer": false,
|
||||
"allow_shared_folder": false,
|
||||
"autorename": false,
|
||||
"from_path": srcObj.GetID(),
|
||||
"to_path": toPath,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
path := srcObj.GetPath()
|
||||
fileName := srcObj.GetName()
|
||||
toPath := path[:len(path)-len(fileName)] + newName
|
||||
|
||||
_, err := d.request("/2/files/move_v2", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"allow_ownership_transfer": false,
|
||||
"allow_shared_folder": false,
|
||||
"autorename": false,
|
||||
"from_path": srcObj.GetID(),
|
||||
"to_path": toPath,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
toPath := dstDir.GetPath() + "/" + srcObj.GetName()
|
||||
_, err := d.request("/2/files/copy_v2", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"allow_ownership_transfer": false,
|
||||
"allow_shared_folder": false,
|
||||
"autorename": false,
|
||||
"from_path": srcObj.GetID(),
|
||||
"to_path": toPath,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) Remove(ctx context.Context, obj model.Obj) error {
|
||||
uri := "/2/files/delete_v2"
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(base.Json{
|
||||
"path": obj.GetID(),
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// 1. start
|
||||
sessionId, err := d.startUploadSession(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// 2.append
|
||||
// A single request should not upload more than 150 MB, and each call must be multiple of 4MB (except for last call)
|
||||
const PartSize = 20971520
|
||||
count := 1
|
||||
if stream.GetSize() > PartSize {
|
||||
count = int(math.Ceil(float64(stream.GetSize()) / float64(PartSize)))
|
||||
}
|
||||
offset := int64(0)
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
start := i * PartSize
|
||||
byteSize := stream.GetSize() - int64(start)
|
||||
if byteSize > PartSize {
|
||||
byteSize = PartSize
|
||||
}
|
||||
|
||||
url := d.contentBase + "/2/files/upload_session/append_v2"
|
||||
reader := io.LimitReader(stream, PartSize)
|
||||
req, err := http.NewRequest(http.MethodPost, url, reader)
|
||||
if err != nil {
|
||||
log.Errorf("failed to update file when append to upload session, err: %+v", err)
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||
|
||||
args := UploadAppendArgs{
|
||||
Close: false,
|
||||
Cursor: UploadCursor{
|
||||
Offset: offset,
|
||||
SessionID: sessionId,
|
||||
},
|
||||
}
|
||||
argsJson, err := utils.Json.MarshalToString(args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Dropbox-API-Arg", argsJson)
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
|
||||
if count > 0 {
|
||||
up((i + 1) * 100 / count)
|
||||
}
|
||||
|
||||
offset += byteSize
|
||||
|
||||
}
|
||||
// 3.finish
|
||||
toPath := dstDir.GetPath() + "/" + stream.GetName()
|
||||
err2 := d.finishUploadSession(ctx, toPath, offset, sessionId)
|
||||
if err2 != nil {
|
||||
return err2
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Dropbox)(nil)
|
42
drivers/dropbox/meta.go
Normal file
42
drivers/dropbox/meta.go
Normal file
@ -0,0 +1,42 @@
|
||||
package dropbox
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultClientID = "76lrwrklhdn1icb"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
driver.RootPath
|
||||
|
||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/dropbox/token"`
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Dropbox",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "",
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Dropbox{
|
||||
base: "https://api.dropboxapi.com",
|
||||
contentBase: "https://content.dropboxapi.com",
|
||||
}
|
||||
})
|
||||
}
|
79
drivers/dropbox/types.go
Normal file
79
drivers/dropbox/types.go
Normal file
@ -0,0 +1,79 @@
|
||||
package dropbox
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"time"
|
||||
)
|
||||
|
||||
type TokenResp struct {
|
||||
AccessToken string `json:"access_token"`
|
||||
TokenType string `json:"token_type"`
|
||||
ExpiresIn int `json:"expires_in"`
|
||||
}
|
||||
|
||||
type ErrorResp struct {
|
||||
Error struct {
|
||||
Tag string `json:".tag"`
|
||||
} `json:"error"`
|
||||
ErrorSummary string `json:"error_summary"`
|
||||
}
|
||||
|
||||
type RefreshTokenErrorResp struct {
|
||||
Error string `json:"error"`
|
||||
ErrorDescription string `json:"error_description"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Tag string `json:".tag"`
|
||||
Name string `json:"name"`
|
||||
PathLower string `json:"path_lower"`
|
||||
PathDisplay string `json:"path_display"`
|
||||
ID string `json:"id"`
|
||||
ClientModified time.Time `json:"client_modified"`
|
||||
ServerModified time.Time `json:"server_modified"`
|
||||
Rev string `json:"rev"`
|
||||
Size int `json:"size"`
|
||||
IsDownloadable bool `json:"is_downloadable"`
|
||||
ContentHash string `json:"content_hash"`
|
||||
}
|
||||
|
||||
type ListResp struct {
|
||||
Entries []File `json:"entries"`
|
||||
Cursor string `json:"cursor"`
|
||||
HasMore bool `json:"has_more"`
|
||||
}
|
||||
|
||||
type UploadCursor struct {
|
||||
Offset int64 `json:"offset"`
|
||||
SessionID string `json:"session_id"`
|
||||
}
|
||||
|
||||
type UploadAppendArgs struct {
|
||||
Close bool `json:"close"`
|
||||
Cursor UploadCursor `json:"cursor"`
|
||||
}
|
||||
|
||||
type UploadFinishArgs struct {
|
||||
Commit struct {
|
||||
Autorename bool `json:"autorename"`
|
||||
Mode string `json:"mode"`
|
||||
Mute bool `json:"mute"`
|
||||
Path string `json:"path"`
|
||||
StrictConflict bool `json:"strict_conflict"`
|
||||
} `json:"commit"`
|
||||
Cursor UploadCursor `json:"cursor"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.ID,
|
||||
Path: f.PathDisplay,
|
||||
Name: f.Name,
|
||||
Size: int64(f.Size),
|
||||
Modified: f.ServerModified,
|
||||
IsFolder: f.Tag == "folder",
|
||||
},
|
||||
Thumbnail: model.Thumbnail{},
|
||||
}
|
||||
}
|
199
drivers/dropbox/util.go
Normal file
199
drivers/dropbox/util.go
Normal file
@ -0,0 +1,199 @@
|
||||
package dropbox
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *Dropbox) refreshToken() error {
|
||||
url := d.base + "/oauth2/token"
|
||||
if utils.SliceContains([]string{"", DefaultClientID}, d.ClientID) {
|
||||
url = d.OauthTokenURL
|
||||
}
|
||||
var tokenResp TokenResp
|
||||
resp, err := base.RestyClient.R().
|
||||
//ForceContentType("application/x-www-form-urlencoded").
|
||||
//SetBasicAuth(d.ClientID, d.ClientSecret).
|
||||
SetFormData(map[string]string{
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": d.RefreshToken,
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
}).
|
||||
Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[dropbox] refresh token response: %s", resp.String())
|
||||
if resp.StatusCode() != 200 {
|
||||
return fmt.Errorf("failed to refresh token: %s", resp.String())
|
||||
}
|
||||
_ = utils.Json.UnmarshalFromString(resp.String(), &tokenResp)
|
||||
d.AccessToken = tokenResp.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
if method == http.MethodPost {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
var e ErrorResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf("[dropbox] request (%s) response: %s", uri, res.String())
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if res.StatusCode() != 200 {
|
||||
body := res.String()
|
||||
if !isRetry && (utils.SliceMeet([]string{"expired_access_token", "invalid_access_token", "authorization"}, body,
|
||||
func(item string, v string) bool {
|
||||
return strings.Contains(v, item)
|
||||
}) || d.AccessToken == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(uri, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%s:%s", e.Error, e.ErrorSummary)
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) list(ctx context.Context, data base.Json, isContinue bool) (*ListResp, error) {
|
||||
var resp ListResp
|
||||
uri := "/2/files/list_folder"
|
||||
if isContinue {
|
||||
uri += "/continue"
|
||||
}
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetContext(ctx).SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &resp, nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) getFiles(ctx context.Context, path string) ([]File, error) {
|
||||
hasMore := true
|
||||
var marker string
|
||||
res := make([]File, 0)
|
||||
|
||||
data := base.Json{
|
||||
"include_deleted": false,
|
||||
"include_has_explicit_shared_members": false,
|
||||
"include_mounted_folders": false,
|
||||
"include_non_downloadable_files": false,
|
||||
"limit": 2000,
|
||||
"path": path,
|
||||
"recursive": false,
|
||||
}
|
||||
resp, err := d.list(ctx, data, false)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
marker = resp.Cursor
|
||||
hasMore = resp.HasMore
|
||||
res = append(res, resp.Entries...)
|
||||
|
||||
for hasMore {
|
||||
data := base.Json{
|
||||
"cursor": marker,
|
||||
}
|
||||
resp, err := d.list(ctx, data, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
marker = resp.Cursor
|
||||
hasMore = resp.HasMore
|
||||
res = append(res, resp.Entries...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) finishUploadSession(ctx context.Context, toPath string, offset int64, sessionId string) error {
|
||||
url := d.contentBase + "/2/files/upload_session/finish"
|
||||
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||
|
||||
uploadFinishArgs := UploadFinishArgs{
|
||||
Commit: struct {
|
||||
Autorename bool `json:"autorename"`
|
||||
Mode string `json:"mode"`
|
||||
Mute bool `json:"mute"`
|
||||
Path string `json:"path"`
|
||||
StrictConflict bool `json:"strict_conflict"`
|
||||
}{
|
||||
Autorename: true,
|
||||
Mode: "add",
|
||||
Mute: false,
|
||||
Path: toPath,
|
||||
StrictConflict: false,
|
||||
},
|
||||
Cursor: UploadCursor{
|
||||
Offset: offset,
|
||||
SessionID: sessionId,
|
||||
},
|
||||
}
|
||||
|
||||
argsJson, err := utils.Json.MarshalToString(uploadFinishArgs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Dropbox-API-Arg", argsJson)
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
log.Errorf("failed to update file when finish session, err: %+v", err)
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) startUploadSession(ctx context.Context) (string, error) {
|
||||
url := d.contentBase + "/2/files/upload_session/start"
|
||||
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||
req.Header.Set("Dropbox-API-Arg", "{\"close\":false}")
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
log.Errorf("failed to update file when start session, err: %+v", err)
|
||||
return "", err
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(res.Body)
|
||||
sessionId := utils.Json.Get(body, "session_id").ToString()
|
||||
|
||||
_ = res.Body.Close()
|
||||
return sessionId, nil
|
||||
}
|
@ -64,7 +64,7 @@ func readDir(dirname string) ([]fs.FileInfo, error) {
|
||||
func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||
fullPath := file.GetPath()
|
||||
thumbPrefix := "alist_thumb_"
|
||||
thumbName := thumbPrefix + utils.GetMD5Encode(fullPath) + ".png"
|
||||
thumbName := thumbPrefix + utils.GetMD5EncodeStr(fullPath) + ".png"
|
||||
if d.ThumbCacheFolder != "" {
|
||||
// skip if the file is a thumbnail
|
||||
if strings.HasPrefix(file.GetName(), thumbPrefix) {
|
||||
@ -91,7 +91,7 @@ func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||
srcBuf = imgBuf
|
||||
}
|
||||
|
||||
image, err := imaging.Decode(srcBuf)
|
||||
image, err := imaging.Decode(srcBuf, imaging.AutoOrientation(true))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
295
drivers/mopan/driver.go
Normal file
295
drivers/mopan/driver.go
Normal file
@ -0,0 +1,295 @@
|
||||
package mopan
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/avast/retry-go"
|
||||
"github.com/foxxorcat/mopan-sdk-go"
|
||||
)
|
||||
|
||||
type MoPan struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *mopan.MoClient
|
||||
|
||||
userID string
|
||||
}
|
||||
|
||||
func (d *MoPan) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *MoPan) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *MoPan) Init(ctx context.Context) error {
|
||||
login := func() error {
|
||||
data, err := d.client.Login(d.Phone, d.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.client.SetAuthorization(data.Token)
|
||||
|
||||
info, err := d.client.GetUserInfo()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.userID = info.UserID
|
||||
return nil
|
||||
}
|
||||
d.client = mopan.NewMoClient().
|
||||
SetRestyClient(base.RestyClient).
|
||||
SetOnAuthorizationExpired(func(_ error) error {
|
||||
err := login()
|
||||
if err != nil {
|
||||
d.Status = err.Error()
|
||||
op.MustSaveDriverStorage(d)
|
||||
}
|
||||
return err
|
||||
}).SetDeviceInfo(d.DeviceInfo)
|
||||
d.DeviceInfo = d.client.GetDeviceInfo()
|
||||
return login()
|
||||
}
|
||||
|
||||
func (d *MoPan) Drop(ctx context.Context) error {
|
||||
d.client = nil
|
||||
d.userID = ""
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *MoPan) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
var files []model.Obj
|
||||
for page := 1; ; page++ {
|
||||
data, err := d.client.QueryFiles(dir.GetID(), page, mopan.WarpParamOption(
|
||||
func(j mopan.Json) {
|
||||
j["orderBy"] = d.OrderBy
|
||||
j["descending"] = d.OrderDirection == "desc"
|
||||
},
|
||||
mopan.ParamOptionShareFile(d.CloudID),
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(data.FileListAO.FileList)+len(data.FileListAO.FolderList) == 0 {
|
||||
break
|
||||
}
|
||||
|
||||
files = append(files, utils.MustSliceConvert(data.FileListAO.FolderList, folderToObj)...)
|
||||
files = append(files, utils.MustSliceConvert(data.FileListAO.FileList, fileToObj)...)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
data, err := d.client.GetFileDownloadUrl(file.GetID(), mopan.WarpParamOption(mopan.ParamOptionShareFile(d.CloudID)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &model.Link{
|
||||
URL: data.DownloadUrl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *MoPan) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
f, err := d.client.CreateFolder(dirName, parentDir.GetID(), mopan.WarpParamOption(
|
||||
mopan.ParamOptionShareFile(d.CloudID),
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return folderToObj(*f), nil
|
||||
}
|
||||
|
||||
func (d *MoPan) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return d.newTask(srcObj, dstDir, mopan.TASK_MOVE)
|
||||
}
|
||||
|
||||
func (d *MoPan) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
if srcObj.IsDir() {
|
||||
_, err := d.client.RenameFolder(srcObj.GetID(), newName, mopan.WarpParamOption(
|
||||
mopan.ParamOptionShareFile(d.CloudID),
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
_, err := d.client.RenameFile(srcObj.GetID(), newName, mopan.WarpParamOption(
|
||||
mopan.ParamOptionShareFile(d.CloudID),
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return CloneObj(srcObj, srcObj.GetID(), newName), nil
|
||||
}
|
||||
|
||||
func (d *MoPan) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
return d.newTask(srcObj, dstDir, mopan.TASK_COPY)
|
||||
}
|
||||
|
||||
func (d *MoPan) newTask(srcObj, dstDir model.Obj, taskType mopan.TaskType) (model.Obj, error) {
|
||||
param := mopan.TaskParam{
|
||||
UserOrCloudID: d.userID,
|
||||
Source: 1,
|
||||
TaskType: taskType,
|
||||
TargetSource: 1,
|
||||
TargetUserOrCloudID: d.userID,
|
||||
TargetType: 1,
|
||||
TargetFolderID: dstDir.GetID(),
|
||||
TaskStatusDetailDTOList: []mopan.TaskFileParam{
|
||||
{
|
||||
FileID: srcObj.GetID(),
|
||||
IsFolder: srcObj.IsDir(),
|
||||
FileName: srcObj.GetName(),
|
||||
},
|
||||
},
|
||||
}
|
||||
if d.CloudID != "" {
|
||||
param.UserOrCloudID = d.CloudID
|
||||
param.Source = 2
|
||||
param.TargetSource = 2
|
||||
param.TargetUserOrCloudID = d.CloudID
|
||||
}
|
||||
|
||||
task, err := d.client.AddBatchTask(param)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for count := 0; count < 5; count++ {
|
||||
stat, err := d.client.CheckBatchTask(mopan.TaskCheckParam{
|
||||
TaskId: task.TaskIDList[0],
|
||||
TaskType: task.TaskType,
|
||||
TargetType: 1,
|
||||
TargetFolderID: task.TargetFolderID,
|
||||
TargetSource: param.TargetSource,
|
||||
TargetUserOrCloudID: param.TargetUserOrCloudID,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch stat.TaskStatus {
|
||||
case 2:
|
||||
if err := d.client.CancelBatchTask(stat.TaskID, task.TaskType); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return nil, errors.New("file name conflict")
|
||||
case 4:
|
||||
if task.TaskType == mopan.TASK_MOVE {
|
||||
return CloneObj(srcObj, srcObj.GetID(), srcObj.GetName()), nil
|
||||
}
|
||||
return CloneObj(srcObj, stat.SuccessedFileIDList[0], srcObj.GetName()), nil
|
||||
}
|
||||
time.Sleep(time.Second)
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (d *MoPan) Remove(ctx context.Context, obj model.Obj) error {
|
||||
_, err := d.client.DeleteToRecycle([]mopan.TaskFileParam{
|
||||
{
|
||||
FileID: obj.GetID(),
|
||||
IsFolder: obj.IsDir(),
|
||||
FileName: obj.GetName(),
|
||||
},
|
||||
}, mopan.WarpParamOption(mopan.ParamOptionShareFile(d.CloudID)))
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
file, err := utils.CreateTempFile(stream)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = file.Close()
|
||||
_ = os.Remove(file.Name())
|
||||
}()
|
||||
|
||||
initUpdload, err := d.client.InitMultiUpload(ctx, mopan.UpdloadFileParam{
|
||||
ParentFolderId: dstDir.GetID(),
|
||||
FileName: stream.GetName(),
|
||||
FileSize: stream.GetSize(),
|
||||
File: file,
|
||||
}, mopan.WarpParamOption(
|
||||
mopan.ParamOptionShareFile(d.CloudID),
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !initUpdload.FileDataExists {
|
||||
parts, err := d.client.GetAllMultiUploadUrls(initUpdload.UploadFileID, initUpdload.PartInfo)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
d.client.CloudDiskStartBusiness()
|
||||
for i, part := range parts {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
err := retry.Do(func() error {
|
||||
if _, err := file.Seek(int64(part.PartNumber-1)*int64(initUpdload.PartSize), io.SeekStart); err != nil {
|
||||
return retry.Unrecoverable(err)
|
||||
}
|
||||
|
||||
req, err := part.NewRequest(ctx, io.LimitReader(file, int64(initUpdload.PartSize)))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
resp, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
|
||||
}
|
||||
return nil
|
||||
},
|
||||
retry.Context(ctx),
|
||||
retry.Attempts(3),
|
||||
retry.Delay(time.Second),
|
||||
retry.MaxDelay(5*time.Second))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up(100 * (i + 1) / len(parts))
|
||||
}
|
||||
}
|
||||
uFile, err := d.client.CommitMultiUploadFile(initUpdload.UploadFileID, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
ID: uFile.UserFileID,
|
||||
Name: uFile.FileName,
|
||||
Size: int64(uFile.FileSize),
|
||||
Modified: time.Time(uFile.CreateDate),
|
||||
}, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*MoPan)(nil)
|
||||
var _ driver.MkdirResult = (*MoPan)(nil)
|
||||
var _ driver.MoveResult = (*MoPan)(nil)
|
||||
var _ driver.RenameResult = (*MoPan)(nil)
|
||||
var _ driver.Remove = (*MoPan)(nil)
|
||||
var _ driver.CopyResult = (*MoPan)(nil)
|
||||
var _ driver.PutResult = (*MoPan)(nil)
|
37
drivers/mopan/meta.go
Normal file
37
drivers/mopan/meta.go
Normal file
@ -0,0 +1,37 @@
|
||||
package mopan
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Phone string `json:"phone" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
|
||||
RootFolderID string `json:"root_folder_id" default:"-11" required:"true" help:"be careful when using the -11 value, some operations may cause system errors"`
|
||||
|
||||
CloudID string `json:"cloud_id"`
|
||||
|
||||
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
|
||||
DeviceInfo string `json:"device_info"`
|
||||
}
|
||||
|
||||
func (a *Addition) GetRootId() string {
|
||||
return a.RootFolderID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "MoPan",
|
||||
// DefaultRoot: "root, / or other",
|
||||
CheckStatus: true,
|
||||
Alert: "warning|This network disk may store your password in clear text. Please set your password carefully",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &MoPan{}
|
||||
})
|
||||
}
|
1
drivers/mopan/types.go
Normal file
1
drivers/mopan/types.go
Normal file
@ -0,0 +1 @@
|
||||
package mopan
|
58
drivers/mopan/util.go
Normal file
58
drivers/mopan/util.go
Normal file
@ -0,0 +1,58 @@
|
||||
package mopan
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/foxxorcat/mopan-sdk-go"
|
||||
)
|
||||
|
||||
func fileToObj(f mopan.File) model.Obj {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: string(f.ID),
|
||||
Name: f.Name,
|
||||
Size: int64(f.Size),
|
||||
Modified: time.Time(f.LastOpTime),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: f.Icon.SmallURL,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
func folderToObj(f mopan.Folder) model.Obj {
|
||||
return &model.Object{
|
||||
ID: string(f.ID),
|
||||
Name: f.Name,
|
||||
Modified: time.Time(f.LastOpTime),
|
||||
IsFolder: true,
|
||||
}
|
||||
}
|
||||
|
||||
func CloneObj(o model.Obj, newID, newName string) model.Obj {
|
||||
if o.IsDir() {
|
||||
return &model.Object{
|
||||
ID: newID,
|
||||
Name: newName,
|
||||
IsFolder: true,
|
||||
Modified: o.ModTime(),
|
||||
}
|
||||
}
|
||||
|
||||
thumb := ""
|
||||
if o, ok := o.(model.Thumb); ok {
|
||||
thumb = o.Thumb()
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: newID,
|
||||
Name: newName,
|
||||
Size: o.GetSize(),
|
||||
Modified: o.ModTime(),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: thumb,
|
||||
},
|
||||
}
|
||||
}
|
@ -2,8 +2,6 @@ package pikpak
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
@ -19,7 +17,6 @@ import (
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
@ -66,7 +63,7 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
link := model.Link{
|
||||
URL: resp.WebContentLink,
|
||||
}
|
||||
if len(resp.Medias) > 0 && resp.Medias[0].Link.Url != "" {
|
||||
if !d.DisableMediaLink && len(resp.Medias) > 0 && resp.Medias[0].Link.Url != "" {
|
||||
log.Debugln("use media link")
|
||||
link.URL = resp.Medias[0].Link.Url
|
||||
}
|
||||
@ -135,9 +132,8 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
// cal sha1
|
||||
s := sha1.New()
|
||||
_, err = io.Copy(s, tempFile)
|
||||
// cal gcid
|
||||
sha1Str, err := getGcid(tempFile, stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -145,8 +141,9 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sha1Str := hex.EncodeToString(s.Sum(nil))
|
||||
data := base.Json{
|
||||
var resp UploadTaskData
|
||||
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"kind": "drive#file",
|
||||
"name": stream.GetName(),
|
||||
"size": stream.GetSize(),
|
||||
@ -154,28 +151,23 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"upload_type": "UPLOAD_TYPE_RESUMABLE",
|
||||
"objProvider": base.Json{"provider": "UPLOAD_TYPE_UNKNOWN"},
|
||||
"parent_id": dstDir.GetID(),
|
||||
}
|
||||
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
"folder_type": "NORMAL",
|
||||
})
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if stream.GetSize() == 0 {
|
||||
|
||||
// 秒传成功
|
||||
if resp.Resumable == nil {
|
||||
log.Debugln(string(res))
|
||||
return nil
|
||||
}
|
||||
params := jsoniter.Get(res, "resumable").Get("params")
|
||||
endpoint := params.Get("endpoint").ToString()
|
||||
endpointS := strings.Split(endpoint, ".")
|
||||
endpoint = strings.Join(endpointS[1:], ".")
|
||||
accessKeyId := params.Get("access_key_id").ToString()
|
||||
accessKeySecret := params.Get("access_key_secret").ToString()
|
||||
securityToken := params.Get("security_token").ToString()
|
||||
key := params.Get("key").ToString()
|
||||
bucket := params.Get("bucket").ToString()
|
||||
|
||||
params := resp.Resumable.Params
|
||||
endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(accessKeyId, accessKeySecret, securityToken),
|
||||
Credentials: credentials.NewStaticCredentials(params.AccessKeyID, params.AccessKeySecret, params.SecurityToken),
|
||||
Region: aws.String("pikpak"),
|
||||
Endpoint: &endpoint,
|
||||
}
|
||||
@ -185,8 +177,8 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
}
|
||||
uploader := s3manager.NewUploader(ss)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &bucket,
|
||||
Key: &key,
|
||||
Bucket: ¶ms.Bucket,
|
||||
Key: ¶ms.Key,
|
||||
Body: tempFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
|
@ -9,6 +9,7 @@ type Addition struct {
|
||||
driver.RootID
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
DisableMediaLink bool `json:"disable_media_link"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -73,3 +73,23 @@ type Media struct {
|
||||
IsVisible bool `json:"is_visible"`
|
||||
Category string `json:"category"`
|
||||
}
|
||||
|
||||
type UploadTaskData struct {
|
||||
UploadType string `json:"upload_type"`
|
||||
//UPLOAD_TYPE_RESUMABLE
|
||||
Resumable *struct {
|
||||
Kind string `json:"kind"`
|
||||
Params struct {
|
||||
AccessKeyID string `json:"access_key_id"`
|
||||
AccessKeySecret string `json:"access_key_secret"`
|
||||
Bucket string `json:"bucket"`
|
||||
Endpoint string `json:"endpoint"`
|
||||
Expiration time.Time `json:"expiration"`
|
||||
Key string `json:"key"`
|
||||
SecurityToken string `json:"security_token"`
|
||||
} `json:"params"`
|
||||
Provider string `json:"provider"`
|
||||
} `json:"resumable"`
|
||||
|
||||
File File `json:"file"`
|
||||
}
|
||||
|
@ -1,7 +1,10 @@
|
||||
package pikpak
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -123,3 +126,28 @@ func (d *PikPak) getFiles(id string) ([]File, error) {
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func getGcid(r io.Reader, size int64) (string, error) {
|
||||
calcBlockSize := func(j int64) int64 {
|
||||
var psize int64 = 0x40000
|
||||
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
||||
psize = psize << 1
|
||||
}
|
||||
return psize
|
||||
}
|
||||
|
||||
hash1 := sha1.New()
|
||||
hash2 := sha1.New()
|
||||
readSize := calcBlockSize(size)
|
||||
for {
|
||||
hash2.Reset()
|
||||
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
||||
if err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
break
|
||||
}
|
||||
hash1.Write(hash2.Sum(nil))
|
||||
}
|
||||
return hex.EncodeToString(hash1.Sum(nil)), nil
|
||||
}
|
||||
|
@ -53,9 +53,9 @@ func (d *S3) Drop(ctx context.Context) error {
|
||||
|
||||
func (d *S3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.ListObjectVersion == "v2" {
|
||||
return d.listV2(dir.GetPath())
|
||||
return d.listV2(dir.GetPath(), args)
|
||||
}
|
||||
return d.listV1(dir.GetPath())
|
||||
return d.listV1(dir.GetPath(), args)
|
||||
}
|
||||
|
||||
func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
|
@ -12,6 +12,7 @@ type Addition struct {
|
||||
Region string `json:"region"`
|
||||
AccessKeyID string `json:"access_key_id" required:"true"`
|
||||
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
||||
SessionToken string `json:"session_token"`
|
||||
CustomHost string `json:"custom_host"`
|
||||
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
||||
Placeholder string `json:"placeholder"`
|
||||
|
@ -22,7 +22,7 @@ import (
|
||||
|
||||
func (d *S3) initSession() error {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, ""),
|
||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, d.SessionToken),
|
||||
Region: &d.Region,
|
||||
Endpoint: &d.Endpoint,
|
||||
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
||||
@ -69,7 +69,7 @@ func getPlaceholderName(placeholder string) string {
|
||||
return placeholder
|
||||
}
|
||||
|
||||
func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
||||
func (d *S3) listV1(prefix string, args model.ListArgs) ([]model.Obj, error) {
|
||||
prefix = getKey(prefix, true)
|
||||
log.Debugf("list: %s", prefix)
|
||||
files := make([]model.Obj, 0)
|
||||
@ -97,7 +97,7 @@ func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
||||
}
|
||||
for _, object := range listObjectsResult.Contents {
|
||||
name := path.Base(*object.Key)
|
||||
if name == getPlaceholderName(d.Placeholder) || name == d.Placeholder {
|
||||
if !args.S3ShowPlaceholder && (name == getPlaceholderName(d.Placeholder) || name == d.Placeholder) {
|
||||
continue
|
||||
}
|
||||
file := model.Object{
|
||||
@ -120,7 +120,7 @@ func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *S3) listV2(prefix string) ([]model.Obj, error) {
|
||||
func (d *S3) listV2(prefix string, args model.ListArgs) ([]model.Obj, error) {
|
||||
prefix = getKey(prefix, true)
|
||||
files := make([]model.Obj, 0)
|
||||
var continuationToken, startAfter *string
|
||||
@ -152,7 +152,7 @@ func (d *S3) listV2(prefix string) ([]model.Obj, error) {
|
||||
continue
|
||||
}
|
||||
name := path.Base(*object.Key)
|
||||
if name == getPlaceholderName(d.Placeholder) || name == d.Placeholder {
|
||||
if !args.S3ShowPlaceholder && (name == getPlaceholderName(d.Placeholder) || name == d.Placeholder) {
|
||||
continue
|
||||
}
|
||||
file := model.Object{
|
||||
@ -198,7 +198,7 @@ func (d *S3) copyFile(ctx context.Context, src string, dst string) error {
|
||||
}
|
||||
|
||||
func (d *S3) copyDir(ctx context.Context, src string, dst string) error {
|
||||
objs, err := op.List(ctx, d, src, model.ListArgs{})
|
||||
objs, err := op.List(ctx, d, src, model.ListArgs{S3ShowPlaceholder: true})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/pkg/sftp"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type SFTP struct {
|
||||
@ -39,13 +40,15 @@ func (d *SFTP) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
log.Debugf("[sftp] list dir: %s", dir.GetPath())
|
||||
files, err := d.client.ReadDir(dir.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src os.FileInfo) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
objs, err := utils.SliceConvert(files, func(src os.FileInfo) (model.Obj, error) {
|
||||
return d.fileToObj(src, dir.GetPath())
|
||||
})
|
||||
return objs, err
|
||||
}
|
||||
|
||||
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
|
@ -2,15 +2,44 @@ package sftp
|
||||
|
||||
import (
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func fileToObj(f os.FileInfo) model.Obj {
|
||||
func (d *SFTP) fileToObj(f os.FileInfo, dir string) (model.Obj, error) {
|
||||
symlink := f.Mode()&os.ModeSymlink != 0
|
||||
if !symlink {
|
||||
return &model.Object{
|
||||
Name: f.Name(),
|
||||
Size: f.Size(),
|
||||
Modified: f.ModTime(),
|
||||
IsFolder: f.IsDir(),
|
||||
}, nil
|
||||
}
|
||||
path := stdpath.Join(dir, f.Name())
|
||||
// set target path
|
||||
target, err := d.client.ReadLink(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !strings.HasPrefix(target, "/") {
|
||||
target = stdpath.Join(dir, target)
|
||||
}
|
||||
_f, err := d.client.Stat(target)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// set basic info
|
||||
obj := &model.Object{
|
||||
Name: f.Name(),
|
||||
Size: _f.Size(),
|
||||
Modified: _f.ModTime(),
|
||||
IsFolder: _f.IsDir(),
|
||||
Path: target,
|
||||
}
|
||||
log.Debugf("[sftp] obj: %+v, is symlink: %v", obj, symlink)
|
||||
return obj, nil
|
||||
}
|
||||
|
@ -3,7 +3,9 @@ package thunder
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -54,7 +56,7 @@ func (x *Thunder) Init(ctx context.Context) (err error) {
|
||||
"j",
|
||||
"4scKJNdd7F27Hv7tbt",
|
||||
},
|
||||
DeviceID: utils.GetMD5Encode(x.Username + x.Password),
|
||||
DeviceID: utils.GetMD5EncodeStr(x.Username + x.Password),
|
||||
ClientID: "Xp6vsxz_7IYVw2BB",
|
||||
ClientSecret: "Xp6vsy4tN9toTVdMSpomVdXpRmES",
|
||||
ClientVersion: "7.51.0.8196",
|
||||
@ -135,7 +137,7 @@ func (x *ThunderExpert) Init(ctx context.Context) (err error) {
|
||||
|
||||
DeviceID: func() string {
|
||||
if len(x.DeviceID) != 32 {
|
||||
return utils.GetMD5Encode(x.DeviceID)
|
||||
return utils.GetMD5EncodeStr(x.DeviceID)
|
||||
}
|
||||
return x.DeviceID
|
||||
}(),
|
||||
@ -331,15 +333,32 @@ func (xc *XunLeiCommon) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
|
||||
gcid, err := getGcid(tempFile, stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var resp UploadTaskResponse
|
||||
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
||||
_, err = xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
||||
r.SetContext(ctx)
|
||||
r.SetBody(&base.Json{
|
||||
"kind": FILE,
|
||||
"parent_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"size": stream.GetSize(),
|
||||
"hash": "1CF254FBC456E1B012CD45C546636AA62CF8350E",
|
||||
"hash": gcid,
|
||||
"upload_type": UPLOAD_TYPE_RESUMABLE,
|
||||
})
|
||||
}, &resp)
|
||||
@ -362,7 +381,7 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
|
||||
Bucket: aws.String(param.Bucket),
|
||||
Key: aws.String(param.Key),
|
||||
Expires: aws.Time(param.Expiration),
|
||||
Body: stream,
|
||||
Body: tempFile,
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
@ -78,7 +78,7 @@ type Addition struct {
|
||||
|
||||
// 登录特征,用于判断是否重新登录
|
||||
func (i *Addition) GetIdentity() string {
|
||||
return utils.GetMD5Encode(i.Username + i.Password)
|
||||
return utils.GetMD5EncodeStr(i.Username + i.Password)
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -1,7 +1,10 @@
|
||||
package thunder
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"time"
|
||||
@ -97,7 +100,7 @@ func (c *Common) GetCaptchaSign() (timestamp, sign string) {
|
||||
timestamp = fmt.Sprint(time.Now().UnixMilli())
|
||||
str := fmt.Sprint(c.ClientID, c.ClientVersion, c.PackageName, c.DeviceID, timestamp)
|
||||
for _, algorithm := range c.Algorithms {
|
||||
str = utils.GetMD5Encode(str + algorithm)
|
||||
str = utils.GetMD5EncodeStr(str + algorithm)
|
||||
}
|
||||
sign = "1." + str
|
||||
return
|
||||
@ -171,3 +174,29 @@ func (c *Common) Request(url, method string, callback base.ReqCallback, resp int
|
||||
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
// 计算文件Gcid
|
||||
func getGcid(r io.Reader, size int64) (string, error) {
|
||||
calcBlockSize := func(j int64) int64 {
|
||||
var psize int64 = 0x40000
|
||||
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
||||
psize = psize << 1
|
||||
}
|
||||
return psize
|
||||
}
|
||||
|
||||
hash1 := sha1.New()
|
||||
hash2 := sha1.New()
|
||||
readSize := calcBlockSize(size)
|
||||
for {
|
||||
hash2.Reset()
|
||||
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
||||
if err != io.EOF {
|
||||
return "", err
|
||||
}
|
||||
break
|
||||
}
|
||||
hash1.Write(hash2.Sum(nil))
|
||||
}
|
||||
return hex.EncodeToString(hash1.Sum(nil)), nil
|
||||
}
|
||||
|
@ -81,7 +81,7 @@ func (d *USS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
||||
expireAt := time.Now().Add(downExp).Unix()
|
||||
upd := url.QueryEscape(path.Base(file.GetPath()))
|
||||
signStr := strings.Join([]string{d.OperatorPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
|
||||
upt := utils.GetMD5Encode(signStr)[12:20] + fmt.Sprint(expireAt)
|
||||
upt := utils.GetMD5EncodeStr(signStr)[12:20] + fmt.Sprint(expireAt)
|
||||
link := fmt.Sprintf("%s?_upd=%s&_upt=%s", u, upd, upt)
|
||||
return &model.Link{URL: link}, nil
|
||||
}
|
||||
|
43
go.mod
43
go.mod
@ -8,12 +8,13 @@ require (
|
||||
github.com/Xhofe/wopan-sdk-go v0.1.1
|
||||
github.com/avast/retry-go v3.0.0+incompatible
|
||||
github.com/aws/aws-sdk-go v1.44.262
|
||||
github.com/blevesearch/bleve/v2 v2.3.8
|
||||
github.com/caarlos0/env/v7 v7.1.0
|
||||
github.com/blevesearch/bleve/v2 v2.3.9
|
||||
github.com/caarlos0/env/v9 v9.0.0
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||
github.com/deckarep/golang-set/v2 v2.3.0
|
||||
github.com/disintegration/imaging v1.6.2
|
||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
|
||||
github.com/foxxorcat/mopan-sdk-go v0.1.1
|
||||
github.com/gin-contrib/cors v1.4.0
|
||||
github.com/gin-gonic/gin v1.9.1
|
||||
github.com/go-resty/resty/v2 v2.7.0
|
||||
@ -22,23 +23,23 @@ require (
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/hirochachacha/go-smb2 v1.1.0
|
||||
github.com/ipfs/go-ipfs-api v0.6.0
|
||||
github.com/jlaffaye/ftp v0.1.0
|
||||
github.com/jlaffaye/ftp v0.2.0
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/maruel/natural v1.1.0
|
||||
github.com/natefinch/lumberjack v2.0.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/pkg/sftp v1.13.5
|
||||
github.com/pquerna/otp v1.4.0
|
||||
github.com/sirupsen/logrus v1.9.2
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca
|
||||
github.com/u2takey/ffmpeg-go v0.4.1
|
||||
github.com/upyun/go-sdk/v3 v3.0.4
|
||||
github.com/winfsp/cgofuse v1.5.0
|
||||
golang.org/x/crypto v0.10.0
|
||||
golang.org/x/image v0.7.0
|
||||
golang.org/x/net v0.11.0
|
||||
golang.org/x/oauth2 v0.4.0
|
||||
golang.org/x/crypto v0.11.0
|
||||
golang.org/x/image v0.9.0
|
||||
golang.org/x/net v0.12.0
|
||||
golang.org/x/oauth2 v0.10.0
|
||||
gorm.io/driver/mysql v1.4.7
|
||||
gorm.io/driver/postgres v1.4.8
|
||||
gorm.io/driver/sqlite v1.4.4
|
||||
@ -47,7 +48,7 @@ require (
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||
github.com/RoaringBitmap/roaring v0.9.4 // indirect
|
||||
github.com/RoaringBitmap/roaring v1.2.3 // indirect
|
||||
github.com/aead/ecdh v0.2.0 // indirect
|
||||
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible // indirect
|
||||
github.com/andreburgaud/crypt2go v1.1.0 // indirect
|
||||
@ -58,16 +59,16 @@ require (
|
||||
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
|
||||
github.com/blevesearch/gtreap v0.1.1 // indirect
|
||||
github.com/blevesearch/mmap-go v1.0.4 // indirect
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 // indirect
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 // indirect
|
||||
github.com/blevesearch/segment v0.9.1 // indirect
|
||||
github.com/blevesearch/snowballstem v0.9.0 // indirect
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
|
||||
github.com/blevesearch/vellum v1.0.9 // indirect
|
||||
github.com/blevesearch/zapx/v11 v11.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v12 v12.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v13 v13.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v14 v14.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v15 v15.3.10 // indirect
|
||||
github.com/blevesearch/vellum v1.0.10 // indirect
|
||||
github.com/blevesearch/zapx/v11 v11.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v12 v12.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v13 v13.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v14 v14.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v15 v15.3.12 // indirect
|
||||
github.com/bluele/gcache v0.0.2 // indirect
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
|
||||
github.com/bytedance/sonic v1.9.1 // indirect
|
||||
@ -84,7 +85,7 @@ require (
|
||||
github.com/go-sql-driver/mysql v1.7.0 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/golang/protobuf v1.5.3 // indirect
|
||||
github.com/golang/snappy v0.0.4 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
@ -130,13 +131,13 @@ require (
|
||||
github.com/u2takey/go-utils v0.3.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||
github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c // indirect
|
||||
go.etcd.io/bbolt v1.3.5 // indirect
|
||||
go.etcd.io/bbolt v1.3.7 // indirect
|
||||
golang.org/x/arch v0.3.0 // indirect
|
||||
golang.org/x/sys v0.9.0 // indirect
|
||||
golang.org/x/text v0.10.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/protobuf v1.30.0 // indirect
|
||||
google.golang.org/protobuf v1.31.0 // indirect
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
||||
gopkg.in/square/go-jose.v2 v2.6.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
|
92
go.sum
92
go.sum
@ -1,7 +1,7 @@
|
||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/RoaringBitmap/roaring v0.9.4 h1:ckvZSX5gwCRaJYBNe7syNawCU5oruY9gQmjXlp4riwo=
|
||||
github.com/RoaringBitmap/roaring v0.9.4/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA=
|
||||
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
|
||||
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
|
||||
github.com/SheltonZhu/115driver v1.0.14 h1:uW3dl8J9KDMw+3gPxQdhTysoGhw0/uI1484GT9xhfU4=
|
||||
github.com/SheltonZhu/115driver v1.0.14/go.mod h1:00ixivHH5HqDj4S7kAWbkuUrjtsJTxc7cGv5RMw3RVs=
|
||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a h1:RenIAa2q4H8UcS/cqmwdT1WCWIAH5aumP8m8RpbqVsE=
|
||||
@ -23,8 +23,8 @@ github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT
|
||||
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA=
|
||||
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
||||
github.com/blevesearch/bleve/v2 v2.3.8 h1:IqFyMJ73n4gY8AmVqM8Sa6EtAZ5beE8yramVqCvs2kQ=
|
||||
github.com/blevesearch/bleve/v2 v2.3.8/go.mod h1:Lh9aZEHrLKxwPnW4z4lsBEGnflZQ1V/aWP/t+htsiDw=
|
||||
github.com/blevesearch/bleve/v2 v2.3.9 h1:pUMvK0mxAexqasZcVj8lazmWnEW5XiV0tASIqANiNTQ=
|
||||
github.com/blevesearch/bleve/v2 v2.3.9/go.mod h1:1PibElcjlQMQHF9uS9mRv58ODQgj4pCWHA1Wfd+qagU=
|
||||
github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
|
||||
github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
|
||||
github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
|
||||
@ -35,26 +35,26 @@ github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZG
|
||||
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
|
||||
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
|
||||
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 h1:LmGmo5twU3gV+natJbKmOktS9eMhokPGKWuR+jX84vk=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4/go.mod h1:PgVnbbg/t1UkgezPDu8EHLi1BHQ17xUwsFdU6NnOYS0=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 h1:1g713kpCQZ8u4a3stRGBfrwVOuGRnmxOVU5MQkUPrHU=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5/go.mod h1:f2nOkKS1HcjgIWZgDAErgBdxmr2eyt0Kn7IY+FU1Xe4=
|
||||
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
|
||||
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
|
||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
|
||||
github.com/blevesearch/vellum v1.0.9 h1:PL+NWVk3dDGPCV0hoDu9XLLJgqU4E5s/dOeEJByQ2uQ=
|
||||
github.com/blevesearch/vellum v1.0.9/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
||||
github.com/blevesearch/zapx/v11 v11.3.7 h1:Y6yIAF/DVPiqZUA/jNgSLXmqewfzwHzuwfKyfdG+Xaw=
|
||||
github.com/blevesearch/zapx/v11 v11.3.7/go.mod h1:Xk9Z69AoAWIOvWudNDMlxJDqSYGf90LS0EfnaAIvXCA=
|
||||
github.com/blevesearch/zapx/v12 v12.3.7 h1:DfQ6rsmZfEK4PzzJJRXjiM6AObG02+HWvprlXQ1Y7eI=
|
||||
github.com/blevesearch/zapx/v12 v12.3.7/go.mod h1:SgEtYIBGvM0mgIBn2/tQE/5SdrPXaJUaT/kVqpAPxm0=
|
||||
github.com/blevesearch/zapx/v13 v13.3.7 h1:igIQg5eKmjw168I7av0Vtwedf7kHnQro/M+ubM4d2l8=
|
||||
github.com/blevesearch/zapx/v13 v13.3.7/go.mod h1:yyrB4kJ0OT75UPZwT/zS+Ru0/jYKorCOOSY5dBzAy+s=
|
||||
github.com/blevesearch/zapx/v14 v14.3.7 h1:gfe+fbWslDWP/evHLtp/GOvmNM3sw1BbqD7LhycBX20=
|
||||
github.com/blevesearch/zapx/v14 v14.3.7/go.mod h1:9J/RbOkqZ1KSjmkOes03AkETX7hrXT0sFMpWH4ewC4w=
|
||||
github.com/blevesearch/zapx/v15 v15.3.10 h1:bQ9ZxJCj6rKp873EuVJu2JPxQ+EWQZI1cjJGeroovaQ=
|
||||
github.com/blevesearch/zapx/v15 v15.3.10/go.mod h1:m7Y6m8soYUvS7MjN9eKlz1xrLCcmqfFadmu7GhWIrLY=
|
||||
github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxyMIPI=
|
||||
github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
||||
github.com/blevesearch/zapx/v11 v11.3.9 h1:y3ijS4h4MJdmQ07MHASxat4owAixreK2xdo76w9ncrw=
|
||||
github.com/blevesearch/zapx/v11 v11.3.9/go.mod h1:jcAYnQwlr+LqD2vLjDWjWiZDXDXGFqPbpPDRTd3XmS4=
|
||||
github.com/blevesearch/zapx/v12 v12.3.9 h1:MXGLlZ03oxXH3DMJTZaBaRj2xb6t4wQVZeZK/wu1M6w=
|
||||
github.com/blevesearch/zapx/v12 v12.3.9/go.mod h1:QXCMwmOkdLnMDgTN1P4CcuX5F851iUOtOwXbw0HMBYs=
|
||||
github.com/blevesearch/zapx/v13 v13.3.9 h1:+VAz9V0VmllHXlZV4DCvfYj0nqaZHgF3MeEHwOyRBwQ=
|
||||
github.com/blevesearch/zapx/v13 v13.3.9/go.mod h1:s+WjNp4WSDtrBVBpa37DUOd7S/Gr/jTZ7ST/MbCVj/0=
|
||||
github.com/blevesearch/zapx/v14 v14.3.9 h1:wuqxATgsTCNHM9xsOFOeFp8H2heZ/gMX/tsl9lRK8U4=
|
||||
github.com/blevesearch/zapx/v14 v14.3.9/go.mod h1:MWZ4v8AzFBRurhDzkLvokFW8ljcq9Evm27mkWe8OGbM=
|
||||
github.com/blevesearch/zapx/v15 v15.3.12 h1:w/kU9aHyfMDEdwHGZzCiakC3HZ9z5gYlXaALDC4Dct8=
|
||||
github.com/blevesearch/zapx/v15 v15.3.12/go.mod h1:tx53gDJS/7Oa3Je820cmVurqCuJ4dqdAy1kiDMV/IUo=
|
||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||
@ -62,8 +62,8 @@ github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBW
|
||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||
github.com/caarlos0/env/v7 v7.1.0 h1:9lzTF5amyQeWHZzuZeKlCb5FWSUxpG1js43mhbY8ozg=
|
||||
github.com/caarlos0/env/v7 v7.1.0/go.mod h1:LPPWniDUq4JaO6Q41vtlyikhMknqymCLBw0eX4dcH1E=
|
||||
github.com/caarlos0/env/v9 v9.0.0 h1:SI6JNsOA+y5gj9njpgybykATIylrRMklbs5ch6wO6pc=
|
||||
github.com/caarlos0/env/v9 v9.0.0/go.mod h1:ye5mlCVMYh6tZ+vCgrs/B95sj88cg5Tlnc0XIzgZ020=
|
||||
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
||||
@ -86,6 +86,8 @@ github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1
|
||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
|
||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
|
||||
github.com/foxxorcat/mopan-sdk-go v0.1.1 h1:JYMeCu4PFpqgHapvOz4jPMT7CxR6Yebu3aWkgGMDeIU=
|
||||
github.com/foxxorcat/mopan-sdk-go v0.1.1/go.mod h1:LpBPmwezjQNyhaNo3HGzgFtQbhvxmF5ZybSVuKi7OVA=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
@ -127,12 +129,12 @@ github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFig
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
|
||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
|
||||
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
@ -167,8 +169,8 @@ github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkr
|
||||
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jlaffaye/ftp v0.1.0 h1:DLGExl5nBoSFoNshAUHwXAezXwXBvFdx7/qwhucWNSE=
|
||||
github.com/jlaffaye/ftp v0.1.0/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE=
|
||||
github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg=
|
||||
github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI=
|
||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||
@ -266,8 +268,8 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y=
|
||||
github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
|
||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
|
||||
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||
@ -313,8 +315,8 @@ github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c/go.mod h1:
|
||||
github.com/winfsp/cgofuse v1.5.0 h1:MsBP7Mi/LiJf/7/F3O/7HjjR009ds6KCdqXzKpZSWxI=
|
||||
github.com/winfsp/cgofuse v1.5.0/go.mod h1:h3awhoUOcn2VYVKCwDaYxSLlZwnyK+A8KaDoLUp2lbU=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=
|
||||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
||||
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
||||
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||
@ -329,11 +331,11 @@ golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0
|
||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/crypto v0.10.0 h1:LKqV2xt9+kDzSTfOhx4FrkEBcMrAgHSYgzywV9zcGmM=
|
||||
golang.org/x/crypto v0.10.0/go.mod h1:o4eNf7Ede1fv+hwOwZsTHl9EsPFO6q6ZvYR8vYfY45I=
|
||||
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
|
||||
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/image v0.7.0 h1:gzS29xtG1J5ybQlv0PuyfE3nmc6R4qB73m6LUUmvFuw=
|
||||
golang.org/x/image v0.7.0/go.mod h1:nd/q4ef1AKKYl/4kft7g+6UyGbdiqWqTP1ZAbRoV7Rg=
|
||||
golang.org/x/image v0.9.0 h1:QrzfX26snvCM20hIhBwuHI/ThTg18b/+kcKdXHvnR+g=
|
||||
golang.org/x/image v0.9.0/go.mod h1:jtrku+n79PfroUbvDdeUWMAI+heR786BofxrbiSF+J0=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
@ -346,17 +348,16 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||
golang.org/x/net v0.11.0 h1:Gi2tvZIJyBtO9SDr1q9h5hEQCp/4L2RQ+ar0qjx2oNU=
|
||||
golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ=
|
||||
golang.org/x/oauth2 v0.4.0 h1:NF0gk8LVPg1Ml7SSbGyySuoxdsXitj7TvgvuRxIMc/M=
|
||||
golang.org/x/oauth2 v0.4.0/go.mod h1:RznEsdpjGAINPTOF0UH/t+xJ75L18YO3Ho6Pyn+uRec=
|
||||
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
|
||||
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
||||
golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8=
|
||||
golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@ -373,13 +374,13 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.9.0 h1:KS/R3tvhPqvJvwcKfnBHJwwthS11LRhmM5D59eEXa0s=
|
||||
golang.org/x/sys v0.9.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
||||
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.9.0 h1:GRRCnKYhdQrD8kfRAdQ6Zcw1P0OcELxGLKJvtjVMZ28=
|
||||
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
@ -387,9 +388,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
||||
golang.org/x/text v0.10.0 h1:UpjohKhiEgNc0CSauXmwYftY1+LlaC75SJwh0SgCX58=
|
||||
golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
|
||||
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af h1:Yx9k8YCG3dvF87UAn2tu2HQLf2dt/eR1bXxpLMWeH+Y=
|
||||
@ -406,8 +406,8 @@ google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCID
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng=
|
||||
google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/caarlos0/env/v7"
|
||||
"github.com/caarlos0/env/v9"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
@ -87,7 +87,7 @@ func confFromEnv() {
|
||||
prefix = ""
|
||||
}
|
||||
log.Infof("load config from env with prefix: %s", prefix)
|
||||
if err := env.Parse(conf.Conf, env.Options{
|
||||
if err := env.ParseWithOptions(conf.Conf, env.Options{
|
||||
Prefix: prefix,
|
||||
}); err != nil {
|
||||
log.Fatalf("load config from env error: %+v", err)
|
||||
|
@ -20,11 +20,14 @@ type Database struct {
|
||||
}
|
||||
|
||||
type Scheme struct {
|
||||
DisableHttp bool `json:"disable_http" env:"DISABLE_HTTP"`
|
||||
Https bool `json:"https" env:"HTTPS"`
|
||||
Address string `json:"address" env:"ADDR"`
|
||||
HttpPort int `json:"http_port" env:"HTTP_PORT"`
|
||||
HttpsPort int `json:"https_port" env:"HTTPS_PORT"`
|
||||
ForceHttps bool `json:"force_https" env:"FORCE_HTTPS"`
|
||||
CertFile string `json:"cert_file" env:"CERT_FILE"`
|
||||
KeyFile string `json:"key_file" env:"KEY_FILE"`
|
||||
UnixFile string `json:"unix_file" env:"UNIX_FILE"`
|
||||
UnixFilePerm string `json:"unix_file_perm" env:"UNIX_FILE_PERM"`
|
||||
}
|
||||
|
||||
type LogConfig struct {
|
||||
@ -38,9 +41,6 @@ type LogConfig struct {
|
||||
|
||||
type Config struct {
|
||||
Force bool `json:"force" env:"FORCE"`
|
||||
Address string `json:"address" env:"ADDR"`
|
||||
Port int `json:"port" env:"PORT"`
|
||||
HttpsPort int `json:"https_port" env:"HTTPS_PORT"`
|
||||
SiteURL string `json:"site_url" env:"SITE_URL"`
|
||||
Cdn string `json:"cdn" env:"CDN"`
|
||||
JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
|
||||
@ -61,9 +61,15 @@ func DefaultConfig() *Config {
|
||||
logPath := filepath.Join(flags.DataDir, "log/log.log")
|
||||
dbPath := filepath.Join(flags.DataDir, "data.db")
|
||||
return &Config{
|
||||
Scheme: Scheme{
|
||||
Address: "0.0.0.0",
|
||||
Port: 5244,
|
||||
HttpsPort: 5245,
|
||||
UnixFile: "",
|
||||
HttpPort: 5244,
|
||||
HttpsPort: -1,
|
||||
ForceHttps: false,
|
||||
CertFile: "",
|
||||
KeyFile: "",
|
||||
},
|
||||
JwtSecret: random.String(16),
|
||||
TokenExpiresIn: 48,
|
||||
TempDir: tempDir,
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
|
||||
type ListArgs struct {
|
||||
ReqPath string
|
||||
S3ShowPlaceholder bool
|
||||
}
|
||||
|
||||
type LinkArgs struct {
|
||||
|
@ -9,24 +9,28 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func GetSHA1Encode(data string) string {
|
||||
func GetSHA1Encode(data []byte) string {
|
||||
h := sha1.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetSHA256Encode(data string) string {
|
||||
func GetSHA256Encode(data []byte) string {
|
||||
h := sha256.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetMD5Encode(data string) string {
|
||||
func GetMD5Encode(data []byte) string {
|
||||
h := md5.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetMD5EncodeStr(data string) string {
|
||||
return GetMD5Encode([]byte(data))
|
||||
}
|
||||
|
||||
var DEC = map[string]string{
|
||||
"-": "+",
|
||||
"_": "/",
|
||||
|
@ -46,9 +46,9 @@ func IsSubPath(path string, subPath string) bool {
|
||||
func Ext(path string) string {
|
||||
ext := stdpath.Ext(path)
|
||||
if strings.HasPrefix(ext, ".") {
|
||||
return ext[1:]
|
||||
ext = ext[1:]
|
||||
}
|
||||
return ext
|
||||
return strings.ToLower(ext)
|
||||
}
|
||||
|
||||
func EncodePath(path string, all ...bool) string {
|
||||
|
@ -60,3 +60,12 @@ func MergeErrors(errs ...error) error {
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func SliceMeet[T1, T2 any](arr []T1, v T2, meet func(item T1, v T2) bool) bool {
|
||||
for _, item := range arr {
|
||||
if meet(item, v) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
@ -58,6 +58,9 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
|
||||
} else {
|
||||
w.WriteHeader(link.Status)
|
||||
}
|
||||
if r.Method == http.MethodHead {
|
||||
return nil
|
||||
}
|
||||
_, err = io.Copy(w, link.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -95,6 +98,9 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
|
||||
} else {
|
||||
w.WriteHeader(link.Status)
|
||||
}
|
||||
if r.Method == http.MethodHead {
|
||||
return nil
|
||||
}
|
||||
return link.Writer(w)
|
||||
} else {
|
||||
req, err := http.NewRequest(r.Method, link.URL, nil)
|
||||
@ -132,6 +138,9 @@ func Proxy(w http.ResponseWriter, r *http.Request, link *model.Link, file model.
|
||||
log.Debugln(msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
if r.Method == http.MethodHead {
|
||||
return nil
|
||||
}
|
||||
_, err = io.Copy(w, res.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -48,9 +48,17 @@ func AddAria2(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
if !aria2.IsAria2Ready() {
|
||||
common.ErrorStrResp(c, "aria2 not ready", 500)
|
||||
// try to init client
|
||||
_, err := aria2.InitClient(2)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if !aria2.IsAria2Ready() {
|
||||
common.ErrorStrResp(c, "aria2 still not ready after init", 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
var req AddAria2Req
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
|
211
server/handles/fsbatch.go
Normal file
211
server/handles/fsbatch.go
Normal file
@ -0,0 +1,211 @@
|
||||
package handles
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/generic"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
type BatchRenameReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
RenameObjects []struct {
|
||||
SrcName string `json:"src_name"`
|
||||
NewName string `json:"new_name"`
|
||||
} `json:"rename_objects"`
|
||||
}
|
||||
|
||||
func FsBatchRename(c *gin.Context) {
|
||||
var req BatchRenameReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanRename() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
|
||||
reqPath, err := user.JoinPath(req.SrcDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := op.GetNearestMeta(reqPath)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
for _, renameObject := range req.RenameObjects {
|
||||
if renameObject.SrcName == "" || renameObject.NewName == "" {
|
||||
continue
|
||||
}
|
||||
filePath := fmt.Sprintf("%s/%s", reqPath, renameObject.SrcName)
|
||||
if err := fs.Rename(c, filePath, renameObject.NewName); err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
type RecursiveMoveReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
DstDir string `json:"dst_dir"`
|
||||
}
|
||||
|
||||
func FsRecursiveMove(c *gin.Context) {
|
||||
var req RecursiveMoveReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanMove() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
srcDir, err := user.JoinPath(req.SrcDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
dstDir, err := user.JoinPath(req.DstDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := op.GetNearestMeta(srcDir)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
|
||||
rootFiles, err := fs.List(c, srcDir, &fs.ListArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
// record the file path
|
||||
filePathMap := make(map[model.Obj]string)
|
||||
movingFiles := generic.NewQueue[model.Obj]()
|
||||
for _, file := range rootFiles {
|
||||
movingFiles.Push(file)
|
||||
filePathMap[file] = srcDir
|
||||
}
|
||||
|
||||
for !movingFiles.IsEmpty() {
|
||||
|
||||
movingFile := movingFiles.Pop()
|
||||
movingFilePath := filePathMap[movingFile]
|
||||
movingFileName := fmt.Sprintf("%s/%s", movingFilePath, movingFile.GetName())
|
||||
if movingFile.IsDir() {
|
||||
// directory, recursive move
|
||||
subFilePath := movingFileName
|
||||
subFiles, err := fs.List(c, movingFileName, &fs.ListArgs{Refresh: true})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
for _, subFile := range subFiles {
|
||||
movingFiles.Push(subFile)
|
||||
filePathMap[subFile] = subFilePath
|
||||
}
|
||||
} else {
|
||||
|
||||
if movingFilePath == dstDir {
|
||||
// same directory, don't move
|
||||
continue
|
||||
}
|
||||
|
||||
// move
|
||||
err := fs.Move(c, movingFileName, dstDir, movingFiles.IsEmpty())
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
type RegexRenameReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
SrcNameRegex string `json:"src_name_regex"`
|
||||
NewNameRegex string `json:"new_name_regex"`
|
||||
}
|
||||
|
||||
func FsRegexRename(c *gin.Context) {
|
||||
var req RegexRenameReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanRename() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
|
||||
reqPath, err := user.JoinPath(req.SrcDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := op.GetNearestMeta(reqPath)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
|
||||
srcRegexp, err := regexp.Compile(req.SrcNameRegex)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
files, err := fs.List(c, reqPath, &fs.ListArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
|
||||
if srcRegexp.MatchString(file.GetName()) {
|
||||
filePath := fmt.Sprintf("%s/%s", reqPath, file.GetName())
|
||||
newFileName := srcRegexp.ReplaceAllString(file.GetName(), req.NewNameRegex)
|
||||
if err := fs.Rename(c, filePath, newFileName); err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
common.SuccessResp(c)
|
||||
}
|
@ -4,7 +4,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
stdpath "path"
|
||||
"regexp"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
@ -96,94 +95,6 @@ func FsMove(c *gin.Context) {
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
type RecursiveMoveReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
DstDir string `json:"dst_dir"`
|
||||
}
|
||||
|
||||
func FsRecursiveMove(c *gin.Context) {
|
||||
var req RecursiveMoveReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanMove() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
srcDir, err := user.JoinPath(req.SrcDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
dstDir, err := user.JoinPath(req.DstDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := op.GetNearestMeta(srcDir)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
|
||||
rootFiles, err := fs.List(c, srcDir, &fs.ListArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
// record the file path
|
||||
filePathMap := make(map[model.Obj]string)
|
||||
movingFiles := generic.NewQueue[model.Obj]()
|
||||
for _, file := range rootFiles {
|
||||
movingFiles.Push(file)
|
||||
filePathMap[file] = srcDir
|
||||
}
|
||||
|
||||
for !movingFiles.IsEmpty() {
|
||||
|
||||
movingFile := movingFiles.Pop()
|
||||
movingFilePath := filePathMap[movingFile]
|
||||
movingFileName := fmt.Sprintf("%s/%s", movingFilePath, movingFile.GetName())
|
||||
if movingFile.IsDir() {
|
||||
// directory, recursive move
|
||||
subFilePath := movingFileName
|
||||
subFiles, err := fs.List(c, movingFileName, &fs.ListArgs{Refresh: true})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
for _, subFile := range subFiles {
|
||||
movingFiles.Push(subFile)
|
||||
filePathMap[subFile] = subFilePath
|
||||
}
|
||||
} else {
|
||||
|
||||
if movingFilePath == dstDir {
|
||||
// same directory, don't move
|
||||
continue
|
||||
}
|
||||
|
||||
// move
|
||||
err := fs.Move(c, movingFileName, dstDir, movingFiles.IsEmpty())
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
func FsCopy(c *gin.Context) {
|
||||
var req MoveCopyReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
@ -255,67 +166,6 @@ func FsRename(c *gin.Context) {
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
type RegexRenameReq struct {
|
||||
SrcDir string `json:"src_dir"`
|
||||
SrcNameRegex string `json:"src_name_regex"`
|
||||
NewNameRegex string `json:"new_name_regex"`
|
||||
}
|
||||
|
||||
func FsRegexRename(c *gin.Context) {
|
||||
var req RegexRenameReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanRename() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
|
||||
reqPath, err := user.JoinPath(req.SrcDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
|
||||
meta, err := op.GetNearestMeta(reqPath)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
|
||||
srcRegexp, err := regexp.Compile(req.SrcNameRegex)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
files, err := fs.List(c, reqPath, &fs.ListArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
|
||||
if srcRegexp.MatchString(file.GetName()) {
|
||||
filePath := fmt.Sprintf("%s/%s", reqPath, file.GetName())
|
||||
newFileName := srcRegexp.ReplaceAllString(file.GetName(), req.NewNameRegex)
|
||||
if err := fs.Rename(c, filePath, newFileName); err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
common.SuccessResp(c)
|
||||
}
|
||||
|
||||
type RemoveReq struct {
|
||||
Dir string `json:"dir"`
|
||||
Names []string `json:"names"`
|
||||
|
@ -47,9 +47,17 @@ func AddQbittorrent(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
if !qbittorrent.IsQbittorrentReady() {
|
||||
common.ErrorStrResp(c, "qbittorrent not ready", 500)
|
||||
// try to init client
|
||||
err := qbittorrent.InitClient()
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if !qbittorrent.IsQbittorrentReady() {
|
||||
common.ErrorStrResp(c, "qbittorrent still not ready after init", 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
var req AddQbittorrentReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
|
@ -1,11 +1,13 @@
|
||||
package handles
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
@ -15,9 +17,20 @@ import (
|
||||
"github.com/coreos/go-oidc"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/pquerna/otp"
|
||||
"github.com/pquerna/otp/totp"
|
||||
"golang.org/x/oauth2"
|
||||
)
|
||||
|
||||
var opts = totp.ValidateOpts{
|
||||
// state verify won't expire in 30 secs, which is quite enough for the callback
|
||||
Period: 30,
|
||||
Skew: 1,
|
||||
// in some OIDC providers(such as Authelia), state parameter must be at least 8 characters
|
||||
Digits: otp.DigitsEight,
|
||||
Algorithm: otp.AlgorithmSHA1,
|
||||
}
|
||||
|
||||
func SSOLoginRedirect(c *gin.Context) {
|
||||
method := c.Query("method")
|
||||
enabled := setting.GetBool(conf.SSOLoginEnabled)
|
||||
@ -62,7 +75,13 @@ func SSOLoginRedirect(c *gin.Context) {
|
||||
common.ErrorStrResp(c, err.Error(), 400)
|
||||
return
|
||||
}
|
||||
c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL("state"))
|
||||
// generate state parameter
|
||||
state,err := totp.GenerateCodeCustom(base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
|
||||
if err != nil {
|
||||
common.ErrorStrResp(c, err.Error(), 400)
|
||||
return
|
||||
}
|
||||
c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
|
||||
return
|
||||
default:
|
||||
common.ErrorStrResp(c, "invalid platform", 400)
|
||||
@ -117,6 +136,17 @@ func OIDCLoginCallback(c *gin.Context) {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
// add state verify process
|
||||
stateVerification, err := totp.ValidateCustom(c.Query("state"), base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
if !stateVerification {
|
||||
common.ErrorStrResp(c, "incorrect or expired state parameter", 400)
|
||||
return
|
||||
}
|
||||
|
||||
oauth2Token, err := oauth2Config.Exchange(c, c.Query("code"))
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
|
@ -12,7 +12,7 @@ func ForceHttps(c *gin.Context) {
|
||||
if c.Request.TLS == nil {
|
||||
host := c.Request.Host
|
||||
// change port to https port
|
||||
host = strings.Replace(host, fmt.Sprintf(":%d", conf.Conf.Port), fmt.Sprintf(":%d", conf.Conf.HttpsPort), 1)
|
||||
host = strings.Replace(host, fmt.Sprintf(":%d", conf.Conf.Scheme.HttpPort), fmt.Sprintf(":%d", conf.Conf.Scheme.HttpsPort), 1)
|
||||
c.Redirect(302, "https://"+host+c.Request.RequestURI)
|
||||
c.Abort()
|
||||
return
|
||||
|
@ -21,7 +21,7 @@ func Init(e *gin.Engine) {
|
||||
}
|
||||
Cors(e)
|
||||
g := e.Group(conf.URL.Path)
|
||||
if conf.Conf.Scheme.Https && conf.Conf.Scheme.ForceHttps && !conf.Conf.Scheme.DisableHttp {
|
||||
if conf.Conf.Scheme.HttpPort != -1 && conf.Conf.Scheme.HttpsPort != -1 && conf.Conf.Scheme.ForceHttps {
|
||||
g.Use(middlewares.ForceHttps)
|
||||
}
|
||||
g.Any("/ping", func(c *gin.Context) {
|
||||
@ -39,6 +39,8 @@ func Init(e *gin.Engine) {
|
||||
|
||||
g.GET("/d/*path", middlewares.Down, handles.Down)
|
||||
g.GET("/p/*path", middlewares.Down, handles.Proxy)
|
||||
g.HEAD("/d/*path", middlewares.Down, handles.Down)
|
||||
g.HEAD("/p/*path", middlewares.Down, handles.Proxy)
|
||||
|
||||
api := g.Group("/api")
|
||||
auth := api.Group("", middlewares.Auth)
|
||||
@ -130,6 +132,7 @@ func _fs(g *gin.RouterGroup) {
|
||||
g.Any("/dirs", handles.FsDirs)
|
||||
g.POST("/mkdir", handles.FsMkdir)
|
||||
g.POST("/rename", handles.FsRename)
|
||||
g.POST("/batch_rename", handles.FsBatchRename)
|
||||
g.POST("/regex_rename", handles.FsRegexRename)
|
||||
g.POST("/move", handles.FsMove)
|
||||
g.POST("/recursive_move", handles.FsRecursiveMove)
|
||||
|
@ -71,6 +71,10 @@ func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
status, err = h.handleUnlock(brw, r)
|
||||
case "PROPFIND":
|
||||
status, err = h.handlePropfind(brw, r)
|
||||
// if there is a error for PROPFIND, we should be as an empty folder to the client
|
||||
if err != nil {
|
||||
status = http.StatusNotFound
|
||||
}
|
||||
case "PROPPATCH":
|
||||
status, err = h.handleProppatch(brw, r)
|
||||
}
|
||||
|
Reference in New Issue
Block a user