Compare commits

...

30 Commits

Author SHA1 Message Date
a64dd4885e fix(139): fixed time zone (close #5263) 2023-09-22 16:54:16 +08:00
0f03a747d8 ci: cancel previous workflow run 2023-09-22 16:53:07 +08:00
30977cdc6d feat: sso compatibility mode (#5260) 2023-09-22 16:45:51 +08:00
106cf720c1 fix(baidu_netdisk): retry logic in request (close #5262) 2023-09-22 16:27:44 +08:00
882112ed1c feat: add hash_info field to /fs/get (close #5259) 2023-09-22 15:20:04 +08:00
2a6ab77295 fix(115): data race in Link (#5253) 2023-09-21 13:39:07 +08:00
f0981a0c8d chore(virtual): implement the driver interface with result 2023-09-20 09:02:56 +08:00
57eea4db17 fix(deps): update module github.com/go-resty/resty/v2 to v2.8.0 (#5244)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-20 08:51:34 +08:00
234852ca61 fix(deps): update module github.com/pkg/sftp to v1.13.6 (#5041)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-19 20:02:42 +08:00
809105b67e fix(deps): update module github.com/blevesearch/bleve/v2 to v2.3.10 (#5232)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-17 15:57:29 +08:00
02e8c31506 fix(deps): update golang.org/x/exp digest to 9212866 (#5205)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-16 23:21:42 +08:00
19b39a5c04 fix(onedrive): overwrite upload big file (close #5217 in #5218)
See https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
2023-09-14 13:38:07 +08:00
28e2731594 fix: clear cache recursively on deleting the folder (close #5209) 2023-09-13 16:06:17 +08:00
b1a279cbcc feat(139): implement MoveResult interface (close #5130) 2023-09-13 15:56:13 +08:00
352a6a741a feat(webdav): support copy directly without task (close #5206) 2023-09-13 15:45:57 +08:00
109015567a fix(deps): update module golang.org/x/oauth2 to v0.12.0 (#5058)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2023-09-12 12:52:48 +08:00
9e0fa77ca2 feat: add 123 link driver (close #4924) 2023-09-10 16:50:10 +08:00
335b11c698 chore: implement the driver interface with obj return [skip ci] 2023-09-08 15:25:49 +08:00
8e433355e6 fix(terabox): missing JsToken field on request (close #5189) 2023-09-08 15:18:56 +08:00
3504f017b9 fix(upload): memory leak on form upload as task (close #5185) 2023-09-07 15:51:52 +08:00
cd2f8077fa chore: enable all pprof handle on debug 2023-09-07 14:56:50 +08:00
d5b68a91d2 fix(webdav): optimize HEAD request (close #5182) 2023-09-06 16:32:51 +08:00
623c7dcea5 fix(189pc): get real link after redirect 2023-09-06 16:02:28 +08:00
ecbd6d86cd fix(lanzou): sub file in share folder need pwd (#5184) 2023-09-06 14:48:12 +08:00
7200344ace feat: adapt hash feature for some drivers (#5180)
* feat(pikpak,thunder): adaptation gcid hash

* chore(weiyun): add note

* feat(baidu_netdisk): adaptation rapid

* feat(baidu_photo): adaptation hash

* feat(189pc): adaptation rapid

* feat(mopan):adaptation ctime

* feat(139):adaptation hash and ctime

---------

Co-authored-by: Andy Hsu <i@nn.ci>
2023-09-06 14:46:35 +08:00
b313ac4daa fix(crypt): fix 139cloud hack (#5178)
(cherry picked from commit 18bf64af47e58cc69cdd2e598de9c19538a7bf78)
2023-09-06 14:12:01 +08:00
f2f312b43a fix: http response body not close on status >= 400 (close #5163) 2023-09-05 15:46:16 +08:00
6f6d20e1ba fix: force_https not take effect on noRoute (close #5167) 2023-09-05 13:05:46 +08:00
3231c3d930 perf(db): release database before exit 2023-09-05 13:04:27 +08:00
b604e21c69 feat(webdav): support http chunked request (close #5161 in #5162)
But we do not recommend not adding the content-length header when putting files
2023-09-05 13:03:29 +08:00
66 changed files with 1172 additions and 416 deletions

View File

@ -11,6 +11,10 @@ on:
- 'cmd/lang.go'
workflow_dispatch:
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
auto_lang:
strategy:

View File

@ -6,6 +6,10 @@ on:
pull_request:
branches: [ 'main' ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build:
strategy:

View File

@ -4,6 +4,10 @@ on:
push:
branches: [ main ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
build_docker:
name: Build docker

View File

@ -19,6 +19,7 @@ var AdminCmd = &cobra.Command{
Short: "Show admin user's info and some operations about admin user's password",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
@ -57,6 +58,7 @@ var ShowTokenCmd = &cobra.Command{
Short: "Show admin token",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
token := setting.GetStr(conf.Token)
utils.Log.Infof("Admin token: %s", token)
},
@ -64,6 +66,7 @@ var ShowTokenCmd = &cobra.Command{
func setAdminPassword(pwd string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)

View File

@ -15,6 +15,7 @@ var Cancel2FACmd = &cobra.Command{
Short: "Delete 2FA of admin user",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed to get admin user: %+v", err)

View File

@ -7,6 +7,7 @@ import (
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
@ -19,6 +20,10 @@ func Init() {
bootstrap.InitIndex()
}
func Release() {
db.Close()
}
var pid = -1
var pidFile string

View File

@ -100,7 +100,7 @@ the address is defined in config file`,
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
utils.Log.Println("Shutdown server...")
Release()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
defer cancel()
var wg sync.WaitGroup

View File

@ -31,6 +31,7 @@ var disableStorageCmd = &cobra.Command{
}
mountPath := args[0]
Init()
defer Release()
storage, err := db.GetStorageByMountPath(mountPath)
if err != nil {
utils.Log.Errorf("failed to query storage: %+v", err)
@ -89,6 +90,7 @@ var listStorageCmd = &cobra.Command{
Short: "List all storages",
Run: func(cmd *cobra.Command, args []string) {
Init()
defer Release()
storages, _, err := db.GetStorages(1, -1)
if err != nil {
utils.Log.Errorf("failed to query storages: %+v", err)

View File

@ -45,10 +45,7 @@ func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
downloadInfo, err := d.client.
SetUserAgent(driver115.UA115Browser).
Download(file.(*FileObj).PickCode)
// recover for upload
d.client.SetUserAgent(driver115.UA115Desktop)
DownloadWithUA(file.(*FileObj).PickCode, driver115.UA115Browser)
if err != nil {
return nil, err
}

View File

@ -0,0 +1,77 @@
package _123Link
import (
"context"
stdpath "path"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
)
type Pan123Link struct {
model.Storage
Addition
root *Node
}
func (d *Pan123Link) Config() driver.Config {
return config
}
func (d *Pan123Link) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan123Link) Init(ctx context.Context) error {
node, err := BuildTree(d.OriginURLs)
if err != nil {
return err
}
node.calSize()
d.root = node
return nil
}
func (d *Pan123Link) Drop(ctx context.Context) error {
return nil
}
func (d *Pan123Link) Get(ctx context.Context, path string) (model.Obj, error) {
node := GetNodeFromRootByPath(d.root, path)
return nodeToObj(node, path)
}
func (d *Pan123Link) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
node := GetNodeFromRootByPath(d.root, dir.GetPath())
if node == nil {
return nil, errs.ObjectNotFound
}
if node.isFile() {
return nil, errs.NotFolder
}
return utils.SliceConvert(node.Children, func(node *Node) (model.Obj, error) {
return nodeToObj(node, stdpath.Join(dir.GetPath(), node.Name))
})
}
func (d *Pan123Link) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
node := GetNodeFromRootByPath(d.root, file.GetPath())
if node == nil {
return nil, errs.ObjectNotFound
}
if node.isFile() {
signUrl, err := SignURL(node.Url, d.PrivateKey, d.UID, time.Duration(d.ValidDuration)*time.Minute)
if err != nil {
return nil, err
}
return &model.Link{
URL: signUrl,
}, nil
}
return nil, errs.NotFile
}
var _ driver.Driver = (*Pan123Link)(nil)

23
drivers/123_link/meta.go Normal file
View File

@ -0,0 +1,23 @@
package _123Link
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
OriginURLs string `json:"origin_urls" type:"text" required:"true" default:"https://vip.123pan.com/29/folder/file.mp3" help:"structure:FolderName:\n [FileSize:][Modified:]Url"`
PrivateKey string `json:"private_key"`
UID uint64 `json:"uid" type:"number"`
ValidDuration int64 `json:"valid_duration" type:"number" default:"30" help:"minutes"`
}
var config = driver.Config{
Name: "123PanLink",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan123Link{}
})
}

152
drivers/123_link/parse.go Normal file
View File

@ -0,0 +1,152 @@
package _123Link
import (
"fmt"
url2 "net/url"
stdpath "path"
"strconv"
"strings"
"time"
)
// build tree from text, text structure definition:
/**
* FolderName:
* [FileSize:][Modified:]Url
*/
/**
* For example:
* folder1:
* name1:url1
* url2
* folder2:
* url3
* url4
* url5
* folder3:
* url6
* url7
* url8
*/
// if there are no name, use the last segment of url as name
func BuildTree(text string) (*Node, error) {
lines := strings.Split(text, "\n")
var root = &Node{Level: -1, Name: "root"}
stack := []*Node{root}
for _, line := range lines {
// calculate indent
indent := 0
for i := 0; i < len(line); i++ {
if line[i] != ' ' {
break
}
indent++
}
// if indent is not a multiple of 2, it is an error
if indent%2 != 0 {
return nil, fmt.Errorf("the line '%s' is not a multiple of 2", line)
}
// calculate level
level := indent / 2
line = strings.TrimSpace(line[indent:])
// if the line is empty, skip
if line == "" {
continue
}
// if level isn't greater than the level of the top of the stack
// it is not the child of the top of the stack
for level <= stack[len(stack)-1].Level {
// pop the top of the stack
stack = stack[:len(stack)-1]
}
// if the line is a folder
if isFolder(line) {
// create a new node
node := &Node{
Level: level,
Name: strings.TrimSuffix(line, ":"),
}
// add the node to the top of the stack
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
// push the node to the stack
stack = append(stack, node)
} else {
// if the line is a file
// create a new node
node, err := parseFileLine(line)
if err != nil {
return nil, err
}
node.Level = level
// add the node to the top of the stack
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
}
}
return root, nil
}
func isFolder(line string) bool {
return strings.HasSuffix(line, ":")
}
// line definition:
// [FileSize:][Modified:]Url
func parseFileLine(line string) (*Node, error) {
// if there is no url, it is an error
if !strings.Contains(line, "http://") && !strings.Contains(line, "https://") {
return nil, fmt.Errorf("invalid line: %s, because url is required for file", line)
}
index := strings.Index(line, "http://")
if index == -1 {
index = strings.Index(line, "https://")
}
url := line[index:]
info := line[:index]
node := &Node{
Url: url,
}
name := stdpath.Base(url)
unescape, err := url2.PathUnescape(name)
if err == nil {
name = unescape
}
node.Name = name
if index > 0 {
if !strings.HasSuffix(info, ":") {
return nil, fmt.Errorf("invalid line: %s, because file info must end with ':'", line)
}
info = info[:len(info)-1]
if info == "" {
return nil, fmt.Errorf("invalid line: %s, because file name can't be empty", line)
}
infoParts := strings.Split(info, ":")
size, err := strconv.ParseInt(infoParts[0], 10, 64)
if err != nil {
return nil, fmt.Errorf("invalid line: %s, because file size must be an integer", line)
}
node.Size = size
if len(infoParts) > 1 {
modified, err := strconv.ParseInt(infoParts[1], 10, 64)
if err != nil {
return nil, fmt.Errorf("invalid line: %s, because file modified must be an unix timestamp", line)
}
node.Modified = modified
} else {
node.Modified = time.Now().Unix()
}
}
return node, nil
}
func splitPath(path string) []string {
if path == "/" {
return []string{"root"}
}
parts := strings.Split(path, "/")
parts[0] = "root"
return parts
}
func GetNodeFromRootByPath(root *Node, path string) *Node {
return root.getByPath(splitPath(path))
}

66
drivers/123_link/types.go Normal file
View File

@ -0,0 +1,66 @@
package _123Link
import (
"time"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
)
// Node is a node in the folder tree
type Node struct {
Url string
Name string
Level int
Modified int64
Size int64
Children []*Node
}
func (node *Node) getByPath(paths []string) *Node {
if len(paths) == 0 || node == nil {
return nil
}
if node.Name != paths[0] {
return nil
}
if len(paths) == 1 {
return node
}
for _, child := range node.Children {
tmp := child.getByPath(paths[1:])
if tmp != nil {
return tmp
}
}
return nil
}
func (node *Node) isFile() bool {
return node.Url != ""
}
func (node *Node) calSize() int64 {
if node.isFile() {
return node.Size
}
var size int64 = 0
for _, child := range node.Children {
size += child.calSize()
}
node.Size = size
return size
}
func nodeToObj(node *Node, path string) (model.Obj, error) {
if node == nil {
return nil, errs.ObjectNotFound
}
return &model.Object{
Name: node.Name,
Size: node.Size,
Modified: time.Unix(node.Modified, 0),
IsFolder: !node.isFile(),
Path: path,
}, nil
}

30
drivers/123_link/util.go Normal file
View File

@ -0,0 +1,30 @@
package _123Link
import (
"crypto/md5"
"fmt"
"math/rand"
"net/url"
"time"
)
func SignURL(originURL, privateKey string, uid uint64, validDuration time.Duration) (newURL string, err error) {
if privateKey == "" {
return originURL, nil
}
var (
ts = time.Now().Add(validDuration).Unix() // 有效时间戳
rInt = rand.Int() // 随机正整数
objURL *url.URL
)
objURL, err = url.Parse(originURL)
if err != nil {
return "", err
}
authKey := fmt.Sprintf("%d-%d-%d-%x", ts, rInt, uid, md5.Sum([]byte(fmt.Sprintf("%s-%d-%d-%d-%s",
objURL.Path, ts, rInt, uid, privateKey))))
v := objURL.Query()
v.Add("auth_key", authKey)
objURL.RawQuery = v.Encode()
return objURL.String(), nil
}

View File

@ -103,9 +103,9 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
return err
}
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
if d.isFamily() {
return errs.NotImplement
return nil, errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
@ -131,7 +131,10 @@ func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
if err != nil {
return nil, err
}
return srcObj, nil
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {

View File

@ -10,7 +10,7 @@ type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
@ -63,7 +63,7 @@ type Content struct {
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
//Digest string `json:"digest"`
Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
@ -141,7 +141,7 @@ type CloudContent struct {
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
@ -165,7 +165,7 @@ type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
//CreateTime string `json:"createTime"`
CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`

View File

@ -48,7 +48,7 @@ func calSign(body, ts, randStr string) string {
}
func getTime(t string) time.Time {
stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
stamp, _ := time.ParseInLocation("20060102150405", t, utils.CNLoc)
return stamp
}
@ -139,6 +139,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
Ctime: getTime(catalog.CreateTime),
IsFolder: true,
}
files = append(files, &f)
@ -150,6 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
@ -202,6 +204,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
Ctime: getTime(catalog.CreateTime),
}
files = append(files, &f)
}
@ -212,6 +215,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
Ctime: getTime(content.CreateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,

View File

@ -27,10 +27,15 @@ type Cloud189PC struct {
tokenInfo *AppSessionResp
uploadThread int
storageConfig driver.Config
}
func (y *Cloud189PC) Config() driver.Config {
return config
if y.storageConfig.Name == "" {
y.storageConfig = config
}
return y.storageConfig
}
func (y *Cloud189PC) GetAddition() driver.Additional {
@ -38,6 +43,9 @@ func (y *Cloud189PC) GetAddition() driver.Additional {
}
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
// 兼容旧上传接口
y.storageConfig.NoOverwriteUpload = y.isFamily() && (y.Addition.RapidUpload || y.Addition.UploadMethod == "old")
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
@ -118,10 +126,11 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
res, err := base.NoRedirectClient.R().SetContext(ctx).SetDoNotParseResponse(true).Get(downloadUrl.URL)
if err != nil {
return nil, err
}
defer res.RawBody().Close()
if res.StatusCode() == 302 {
downloadUrl.URL = res.Header().Get("location")
}
@ -302,6 +311,13 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 响应时间长,按需启用
if y.Addition.RapidUpload {
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
return newObj, nil
}
}
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)

View File

@ -16,6 +16,7 @@ type Addition struct {
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
RapidUpload bool `json:"rapid_upload"`
NoUseOcr bool `json:"no_use_ocr"`
}

View File

@ -546,16 +546,30 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
return resp.toFile(), nil
}
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
fileMd5 := stream.GetHash().GetHash(utils.MD5)
if len(fileMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
if err != nil {
return nil, err
}
if uploadInfo.FileDataExists != 1 {
return nil, errors.New("rapid upload fail")
}
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
}
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
@ -739,68 +753,24 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
// 计算md5
fileMd5 := md5.New()
if _, err := io.Copy(fileMd5, tempFile); err != nil {
fileMd5, err := utils.HashFile(utils.MD5, tempFile)
if err != nil {
return nil, err
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return nil, err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
// 创建上传会话
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err = y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"fileMd5": fileMd5Hex,
"fileName": file.GetName(),
"fileSize": fmt.Sprint(file.GetSize()),
"parentId": dstDir.GetID(),
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": file.GetName(),
"size": fmt.Sprint(file.GetSize()),
"md5": fileMd5Hex,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
// "baseFileId": "",
// "lastWrite":"",
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
// "fileExt": "",
})
}
}, &uploadInfo)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
if err != nil {
return nil, err
}
// 网盘中不存在该文件,开始上传
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
for status.Size < file.GetSize() && status.FileDataExists != 1 {
status := GetUploadFileStatusResp{CreateUploadFileResp: *uploadInfo}
for status.GetSize() < file.GetSize() && status.FileDataExists != 1 {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
@ -839,28 +809,70 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
if err != nil {
return nil, err
}
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
return nil, err
}
up(int(status.Size / file.GetSize()))
up(int(status.GetSize()/file.GetSize()) * 100)
}
// 提交
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
}
// 创建上传会话
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentID,
"fileMd5": fileMd5,
"fileName": fileName,
"fileSize": fileSize,
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": parentID,
"fileName": fileName,
"size": fileSize,
"md5": fileMd5,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
})
}
}, &uploadInfo)
if err != nil {
return nil, err
}
return &uploadInfo, nil
}
// 提交上传文件
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
var resp OldCommitUploadFileResp
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
"UploadFileId": fmt.Sprint(status.UploadFileId),
"UploadFileId": fmt.Sprint(uploadFileID),
"FamilyId": fmt.Sprint(y.FamilyID),
})
} else {
req.SetFormData(map[string]string{
"opertype": "3",
"resumePolicy": "1",
"uploadFileId": fmt.Sprint(status.UploadFileId),
"uploadFileId": fmt.Sprint(uploadFileID),
"isLog": "0",
})
}

View File

@ -3,6 +3,7 @@ package drivers
import (
_ "github.com/alist-org/alist/v3/drivers/115"
_ "github.com/alist-org/alist/v3/drivers/123"
_ "github.com/alist-org/alist/v3/drivers/123_link"
_ "github.com/alist-org/alist/v3/drivers/123_share"
_ "github.com/alist-org/alist/v3/drivers/139"
_ "github.com/alist-org/alist/v3/drivers/189"

View File

@ -146,7 +146,39 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
return err
}
func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
contentMd5 := stream.GetHash().GetHash(utils.MD5)
if len(contentMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
streamSize := stream.GetSize()
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
path := encodeURIComponent(rawPath)
mtime := stream.ModTime().Unix()
ctime := stream.CreateTime().Unix()
blockList, _ := utils.Json.MarshalToString([]string{contentMd5})
data := fmt.Sprintf("path=%s&size=%d&isdir=0&rtype=3&block_list=%s&local_mtime=%d&local_ctime=%d",
path, streamSize, blockList, mtime, ctime)
params := map[string]string{
"method": "create",
}
log.Debugf("[baidu_netdisk] precreate data: %s", data)
var newFile File
_, err := d.post("/xpan/file", params, data, &newFile)
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// rapid upload
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
return newObj, nil
}
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
@ -264,6 +296,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
}
return fileToObj(newFile), nil
}
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params map[string]string, fileName string, file io.Reader) error {
res, err := base.RestyClient.R().
SetContext(ctx).

View File

@ -1,6 +1,7 @@
package baidu_netdisk
import (
"errors"
"fmt"
"net/http"
"net/url"
@ -22,7 +23,7 @@ import (
func (d *BaiduNetdisk) refreshToken() error {
err := d._refreshToken()
if err != nil && err == errs.EmptyToken {
if err != nil && errors.Is(err, errs.EmptyToken) {
err = d._refreshToken()
}
return err
@ -74,21 +75,16 @@ func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCall
log.Info("refreshing baidu_netdisk token.")
err2 := d.refreshToken()
if err2 != nil {
return err2
return retry.Unrecoverable(err2)
}
}
err2 := fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
if !utils.SliceContains([]int{2}, errno) {
err2 = retry.Unrecoverable(err2)
}
return err2
return fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
}
result = res.Body()
return nil
},
retry.LastErrorOnly(true),
retry.Attempts(5),
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
return result, err

View File

@ -227,14 +227,14 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return nil, fmt.Errorf("file size cannot be zero")
}
// TODO:
// 暂时没有找到妙传方式
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
const DEFAULT int64 = 1 << 22
const SliceSize int64 = 1 << 18

View File

@ -2,9 +2,10 @@ package baiduphoto
import (
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/internal/model"
)
@ -52,34 +53,26 @@ type (
Ctime int64 `json:"ctime"` // 创建时间 s
Mtime int64 `json:"mtime"` // 修改时间 s
Thumburl []string `json:"thumburl"`
parseTime *time.Time
Md5 string `json:"md5"`
}
)
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) ModTime() time.Time {
if c.parseTime == nil {
c.parseTime = toTime(c.Mtime)
}
return *c.parseTime
}
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) GetSize() int64 { return c.Size }
func (c *File) GetName() string { return getFileName(c.Path) }
func (c *File) CreateTime() time.Time { return time.Unix(c.Ctime, 0) }
func (c *File) ModTime() time.Time { return time.Unix(c.Mtime, 0) }
func (c *File) IsDir() bool { return false }
func (c *File) GetID() string { return "" }
func (c *File) GetPath() string { return "" }
func (c *File) Thumb() string {
if len(c.Thumburl) > 0 {
return c.Thumburl[0]
}
return ""
}
func (c *File) CreateTime() time.Time {
return time.Unix(c.Ctime, 0)
}
func (c *File) GetHash() utils.HashInfo {
return utils.HashInfo{}
return utils.NewHashInfo(utils.MD5, c.Md5)
}
/*相册部分*/
@ -117,25 +110,17 @@ type (
}
)
func (a *Album) CreateTime() time.Time {
return time.Unix(a.CreationTime, 0)
}
func (a *Album) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) ModTime() time.Time {
if a.parseTime == nil {
a.parseTime = toTime(a.Mtime)
}
return *a.parseTime
}
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
func (a *Album) GetSize() int64 { return 0 }
func (a *Album) GetName() string { return a.Title }
func (a *Album) CreateTime() time.Time { return time.Unix(a.CreationTime, 0) }
func (a *Album) ModTime() time.Time { return time.Unix(a.Mtime, 0) }
func (a *Album) IsDir() bool { return true }
func (a *Album) GetID() string { return "" }
func (a *Album) GetPath() string { return "" }
type (
CopyFileResp struct {

View File

@ -258,7 +258,7 @@ var sizeFindReg = regexp.MustCompile(`(?i)大小\W*([0-9.]+\s*[bkm]+)`)
var timeFindReg = regexp.MustCompile(`\d+\s*[秒天分小][钟时]?前|[昨前]天|\d{4}-\d{2}-\d{2}`)
// 查找分享文件夹子文件夹ID和名称
var findSubFolaerReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
var findSubFolderReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
// 获取下载页面链接
var findDownPageParamReg = regexp.MustCompile(`<iframe.*?src="(.+?)"`)
@ -455,7 +455,7 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
files := make([]FileOrFolderByShareUrl, 0)
// vip获取文件夹
floders := findSubFolaerReg.FindAllStringSubmatch(sharePageData, -1)
floders := findSubFolderReg.FindAllStringSubmatch(sharePageData, -1)
for _, floder := range floders {
if len(floder) == 3 {
files = append(files, FileOrFolderByShareUrl{
@ -476,10 +476,10 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
if err != nil {
return nil, err
}
/*// 文件夹中的文件也不加密
// 文件夹中的文件加密
for i := 0; i < len(resp.Text); i++ {
resp.Text[i].Pwd = pwd
}*/
}
if len(resp.Text) == 0 {
break
}

View File

@ -7,6 +7,7 @@ import (
"io"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
@ -117,6 +118,15 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, err
}
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Head(data.DownloadUrl)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
data.DownloadUrl = res.Header().Get("location")
}
return &model.Link{
URL: data.DownloadUrl,
}, nil
@ -262,7 +272,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
}
if !initUpdload.FileDataExists {
fmt.Println(d.client.CloudDiskStartBusiness())
utils.Log.Error(d.client.CloudDiskStartBusiness())
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
retry.Attempts(3),

View File

@ -4,6 +4,7 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
)
@ -14,6 +15,8 @@ func fileToObj(f mopan.File) model.Obj {
Name: f.Name,
Size: int64(f.Size),
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.Icon.SmallURL,
@ -26,6 +29,7 @@ func folderToObj(f mopan.Folder) model.Obj {
ID: string(f.ID),
Name: f.Name,
Modified: time.Time(f.LastOpTime),
Ctime: time.Time(f.CreateDate),
IsFolder: true,
}
}
@ -37,6 +41,7 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
IsFolder: true,
Modified: o.ModTime(),
Ctime: o.CreateTime(),
}
}
@ -50,6 +55,8 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
Name: newName,
Size: o.GetSize(),
Modified: o.ModTime(),
Ctime: o.CreateTime(),
HashInfo: o.GetHash(),
},
Thumbnail: model.Thumbnail{
Thumbnail: thumb,

View File

@ -196,7 +196,8 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
if err != nil {
return err
}
if res.StatusCode != 201 && res.StatusCode != 202 {
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))

View File

@ -187,7 +187,8 @@ func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.
if err != nil {
return err
}
if res.StatusCode != 201 && res.StatusCode != 202 {
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
data, _ := io.ReadAll(res.Body)
res.Body.Close()
return errors.New(string(data))

View File

@ -3,7 +3,6 @@ package pikpak
import (
"context"
"fmt"
"io"
"net/http"
"strings"
@ -11,6 +10,7 @@ import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@ -123,22 +123,20 @@ func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
// cal gcid
sha1Str, err := getGcid(tempFile, stream.GetSize())
if err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
hi := stream.GetHash()
sha1Str := hi.GetHash(hash_extend.GCID)
if len(sha1Str) < hash_extend.GCID.Width {
tFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
sha1Str, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
if err != nil {
return err
}
}
var resp UploadTaskData
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
@ -177,7 +175,7 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
input := &s3manager.UploadInput{
Bucket: &params.Bucket,
Key: &params.Key,
Body: tempFile,
Body: stream,
}
_, err = uploader.UploadWithContext(ctx, input)
return err

View File

@ -5,6 +5,8 @@ import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type RespErr struct {
@ -21,7 +23,9 @@ type File struct {
Id string `json:"id"`
Kind string `json:"kind"`
Name string `json:"name"`
CreatedTime time.Time `json:"created_time"`
ModifiedTime time.Time `json:"modified_time"`
Hash string `json:"hash"`
Size string `json:"size"`
ThumbnailLink string `json:"thumbnail_link"`
WebContentLink string `json:"web_content_link"`
@ -35,8 +39,10 @@ func fileToObj(f File) *model.ObjThumb {
ID: f.Id,
Name: f.Name,
Size: size,
Ctime: f.CreatedTime,
Modified: f.ModifiedTime,
IsFolder: f.Kind == "drive#folder",
HashInfo: utils.NewHashInfo(hash_extend.GCID, f.Hash),
},
Thumbnail: model.Thumbnail{
Thumbnail: f.ThumbnailLink,

View File

@ -41,24 +41,24 @@ func (d *Template) Link(ctx context.Context, file model.Obj, args model.LinkArgs
return nil, errs.NotImplement
}
func (d *Template) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
func (d *Template) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
// TODO create folder, optional
return errs.NotImplement
return nil, errs.NotImplement
}
func (d *Template) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Template) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO move obj, optional
return errs.NotImplement
return nil, errs.NotImplement
}
func (d *Template) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
func (d *Template) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
// TODO rename obj, optional
return errs.NotImplement
return nil, errs.NotImplement
}
func (d *Template) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Template) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO copy obj, optional
return errs.NotImplement
return nil, errs.NotImplement
}
func (d *Template) Remove(ctx context.Context, obj model.Obj) error {
@ -66,9 +66,9 @@ func (d *Template) Remove(ctx context.Context, obj model.Obj) error {
return errs.NotImplement
}
func (d *Template) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
func (d *Template) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// TODO upload file, optional
return errs.NotImplement
return nil, errs.NotImplement
}
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {

View File

@ -6,15 +6,16 @@ import (
"crypto/md5"
"encoding/hex"
"fmt"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
"io"
"math"
stdpath "path"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
)
@ -119,9 +120,6 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
var Default int64 = 4 * 1024 * 1024
defaultByteData := make([]byte, Default)
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
@ -168,6 +166,9 @@ func (d *Terabox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
return err
}
log.Debugf("%+v", precreateResp)
if precreateResp.Errno != 0 {
return fmt.Errorf("[terabox] failed to precreate file, errno: %s", precreateResp.Errno)
}
if precreateResp.ReturnType == 2 {
return nil
}

View File

@ -8,6 +8,7 @@ import (
type Addition struct {
driver.RootPath
Cookie string `json:"cookie" required:"true"`
JsToken string `json:"js_token" type:"string" required:"true"`
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`

View File

@ -24,10 +24,13 @@ func (d *Terabox) request(furl string, method string, callback base.ReqCallback,
"User-Agent": base.UserAgent,
"X-Requested-With": "XMLHttpRequest",
})
req.SetQueryParam("app_id", "250528")
req.SetQueryParam("web", "1")
req.SetQueryParam("channel", "dubox")
req.SetQueryParam("clienttype", "0")
req.SetQueryParams(map[string]string{
"app_id": "250528",
"web": "1",
"channel": "dubox",
"clienttype": "0",
"jsToken": d.JsToken,
})
if callback != nil {
callback(req)
}

View File

@ -3,7 +3,6 @@ package thunder
import (
"context"
"fmt"
"io"
"net/http"
"strings"
@ -13,6 +12,7 @@ import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
@ -332,24 +332,22 @@ func (xc *XunLeiCommon) Remove(ctx context.Context, obj model.Obj) error {
}
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
hi := stream.GetHash()
gcid := hi.GetHash(hash_extend.GCID)
if len(gcid) < hash_extend.GCID.Width {
tFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
gcid, err := getGcid(tempFile, stream.GetSize())
if err != nil {
return err
}
if _, err := tempFile.Seek(0, io.SeekStart); err != nil {
return err
gcid, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
if err != nil {
return err
}
}
var resp UploadTaskResponse
_, err = xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
r.SetContext(ctx)
r.SetBody(&base.Json{
"kind": FILE,
@ -379,7 +377,7 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
Bucket: aws.String(param.Bucket),
Key: aws.String(param.Key),
Expires: aws.Time(param.Expiration),
Body: tempFile,
Body: stream,
})
return err
}

View File

@ -2,10 +2,12 @@ package thunder
import (
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
)
type ErrResp struct {
@ -104,39 +106,39 @@ type Files struct {
ModifiedTime time.Time `json:"modified_time"`
IconLink string `json:"icon_link"`
ThumbnailLink string `json:"thumbnail_link"`
//Md5Checksum string `json:"md5_checksum"`
//Hash string `json:"hash"`
Links map[string]Link `json:"links"`
Phase string `json:"phase"`
Audit struct {
Status string `json:"status"`
Message string `json:"message"`
Title string `json:"title"`
} `json:"audit"`
// Md5Checksum string `json:"md5_checksum"`
Hash string `json:"hash"`
// Links map[string]Link `json:"links"`
// Phase string `json:"phase"`
// Audit struct {
// Status string `json:"status"`
// Message string `json:"message"`
// Title string `json:"title"`
// } `json:"audit"`
Medias []struct {
Category string `json:"category"`
IconLink string `json:"icon_link"`
IsDefault bool `json:"is_default"`
IsOrigin bool `json:"is_origin"`
IsVisible bool `json:"is_visible"`
Link Link `json:"link"`
MediaID string `json:"media_id"`
MediaName string `json:"media_name"`
NeedMoreQuota bool `json:"need_more_quota"`
Priority int `json:"priority"`
RedirectLink string `json:"redirect_link"`
ResolutionName string `json:"resolution_name"`
Video struct {
AudioCodec string `json:"audio_codec"`
BitRate int `json:"bit_rate"`
Duration int `json:"duration"`
FrameRate int `json:"frame_rate"`
Height int `json:"height"`
VideoCodec string `json:"video_codec"`
VideoType string `json:"video_type"`
Width int `json:"width"`
} `json:"video"`
VipTypes []string `json:"vip_types"`
//Category string `json:"category"`
//IconLink string `json:"icon_link"`
//IsDefault bool `json:"is_default"`
//IsOrigin bool `json:"is_origin"`
//IsVisible bool `json:"is_visible"`
Link Link `json:"link"`
//MediaID string `json:"media_id"`
//MediaName string `json:"media_name"`
//NeedMoreQuota bool `json:"need_more_quota"`
//Priority int `json:"priority"`
//RedirectLink string `json:"redirect_link"`
//ResolutionName string `json:"resolution_name"`
// Video struct {
// AudioCodec string `json:"audio_codec"`
// BitRate int `json:"bit_rate"`
// Duration int `json:"duration"`
// FrameRate int `json:"frame_rate"`
// Height int `json:"height"`
// VideoCodec string `json:"video_codec"`
// VideoType string `json:"video_type"`
// Width int `json:"width"`
// } `json:"video"`
// VipTypes []string `json:"vip_types"`
} `json:"medias"`
Trashed bool `json:"trashed"`
DeleteTime string `json:"delete_time"`
@ -150,21 +152,18 @@ type Files struct {
//Collection interface{} `json:"collection"`
}
func (c *Files) CreateTime() time.Time {
return c.CreatedTime
}
func (c *Files) GetHash() utils.HashInfo {
return utils.HashInfo{}
return utils.NewHashInfo(hash_extend.GCID, c.Hash)
}
func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
func (c *Files) GetName() string { return c.Name }
func (c *Files) ModTime() time.Time { return c.ModifiedTime }
func (c *Files) IsDir() bool { return c.Kind == FOLDER }
func (c *Files) GetID() string { return c.ID }
func (c *Files) GetPath() string { return "" }
func (c *Files) Thumb() string { return c.ThumbnailLink }
func (c *Files) GetSize() int64 { size, _ := strconv.ParseInt(c.Size, 10, 64); return size }
func (c *Files) GetName() string { return c.Name }
func (c *Files) CreateTime() time.Time { return c.CreatedTime }
func (c *Files) ModTime() time.Time { return c.ModifiedTime }
func (c *Files) IsDir() bool { return c.Kind == FOLDER }
func (c *Files) GetID() string { return c.ID }
func (c *Files) GetPath() string { return "" }
func (c *Files) Thumb() string { return c.ThumbnailLink }
/*
* 上传

View File

@ -34,20 +34,10 @@ func (d *Virtual) GetAddition() driver.Additional {
func (d *Virtual) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var res []model.Obj
for i := 0; i < d.NumFile; i++ {
res = append(res, &model.Object{
Name: random.String(10),
Size: random.RangeInt64(d.MinFileSize, d.MaxFileSize),
IsFolder: false,
Modified: time.Now(),
})
res = append(res, d.genObj(false))
}
for i := 0; i < d.NumFolder; i++ {
res = append(res, &model.Object{
Name: random.String(10),
Size: 0,
IsFolder: true,
Modified: time.Now(),
})
res = append(res, d.genObj(true))
}
return res, nil
}
@ -78,28 +68,45 @@ func (d *Virtual) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
}, nil
}
func (d *Virtual) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return nil
func (d *Virtual) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
dir := &model.Object{
Name: dirName,
Size: 0,
IsFolder: true,
Modified: time.Now(),
}
return dir, nil
}
func (d *Virtual) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return nil
func (d *Virtual) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
return srcObj, nil
}
func (d *Virtual) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return nil
func (d *Virtual) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
obj := &model.Object{
Name: newName,
Size: srcObj.GetSize(),
IsFolder: srcObj.IsDir(),
Modified: time.Now(),
}
return obj, nil
}
func (d *Virtual) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return nil
func (d *Virtual) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
return srcObj, nil
}
func (d *Virtual) Remove(ctx context.Context, obj model.Obj) error {
return nil
}
func (d *Virtual) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return nil
func (d *Virtual) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
file := &model.Object{
Name: stream.GetName(),
Size: stream.GetSize(),
Modified: time.Now(),
}
return file, nil
}
var _ driver.Driver = (*Virtual)(nil)

22
drivers/virtual/util.go Normal file
View File

@ -0,0 +1,22 @@
package virtual
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils/random"
)
func (d *Virtual) genObj(dir bool) model.Obj {
obj := &model.Object{
Name: random.String(10),
Size: 0,
IsFolder: true,
Modified: time.Now(),
}
if !dir {
obj.Size = random.RangeInt64(d.MinFileSize, d.MaxFileSize)
obj.IsFolder = false
}
return obj
}

View File

@ -308,14 +308,14 @@ func (d *WeiYun) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// NOTE:
// 秒传需要sha1最后一个状态,但sha1无法逆运算需要读完整个文件(或许可以??)
// 服务器支持上传进度恢复,不需要额外实现
if folder, ok := dstDir.(*Folder); ok {
file, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = file.Close()
}()
// step 1.
preData, err := d.client.PreUpload(ctx, weiyunsdkgo.UpdloadFileParam{
@ -333,7 +333,7 @@ func (d *WeiYun) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return nil, err
}
// fast upload
// not fast upload
if !preData.FileExist {
// step.2 增加上传通道
if len(preData.ChannelList) < d.uploadThread {

36
go.mod
View File

@ -10,7 +10,7 @@ require (
github.com/aliyun/aliyun-oss-go-sdk v2.2.7+incompatible
github.com/avast/retry-go v3.0.0+incompatible
github.com/aws/aws-sdk-go v1.44.327
github.com/blevesearch/bleve/v2 v2.3.9
github.com/blevesearch/bleve/v2 v2.3.10
github.com/caarlos0/env/v9 v9.0.0
github.com/charmbracelet/bubbles v0.16.1
github.com/charmbracelet/bubbletea v0.24.2
@ -24,7 +24,7 @@ require (
github.com/foxxorcat/weiyun-sdk-go v0.1.2
github.com/gin-contrib/cors v1.4.0
github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0
github.com/go-resty/resty/v2 v2.8.0
github.com/go-webauthn/webauthn v0.8.6
github.com/golang-jwt/jwt/v4 v4.5.0
github.com/google/uuid v1.3.1
@ -37,7 +37,7 @@ require (
github.com/natefinch/lumberjack v2.0.0+incompatible
github.com/orzogc/fake115uploader v0.3.3-0.20230715111618-58f9eb76f831
github.com/pkg/errors v0.9.1
github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6
github.com/pkg/sftp v1.13.6
github.com/pquerna/otp v1.4.0
github.com/rclone/rclone v1.63.1
github.com/sirupsen/logrus v1.9.3
@ -47,11 +47,11 @@ require (
github.com/u2takey/ffmpeg-go v0.5.0
github.com/upyun/go-sdk/v3 v3.0.4
github.com/winfsp/cgofuse v1.5.1-0.20230130140708-f87f5db493b5
golang.org/x/crypto v0.12.0
golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63
golang.org/x/crypto v0.13.0
golang.org/x/exp v0.0.0-20230905200255-921286631fa9
golang.org/x/image v0.11.0
golang.org/x/net v0.14.0
golang.org/x/oauth2 v0.10.0
golang.org/x/net v0.15.0
golang.org/x/oauth2 v0.12.0
gorm.io/driver/mysql v1.4.7
gorm.io/driver/postgres v1.4.8
gorm.io/driver/sqlite v1.4.4
@ -70,21 +70,21 @@ require (
github.com/benbjohnson/clock v1.3.0 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.2.0 // indirect
github.com/blevesearch/bleve_index_api v1.0.5 // indirect
github.com/blevesearch/geo v0.1.17 // indirect
github.com/blevesearch/bleve_index_api v1.0.6 // indirect
github.com/blevesearch/geo v0.1.18 // indirect
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
github.com/blevesearch/gtreap v0.1.1 // indirect
github.com/blevesearch/mmap-go v1.0.4 // indirect
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 // indirect
github.com/blevesearch/scorch_segment_api/v2 v2.1.6 // indirect
github.com/blevesearch/segment v0.9.1 // indirect
github.com/blevesearch/snowballstem v0.9.0 // indirect
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
github.com/blevesearch/vellum v1.0.10 // indirect
github.com/blevesearch/zapx/v11 v11.3.9 // indirect
github.com/blevesearch/zapx/v12 v12.3.9 // indirect
github.com/blevesearch/zapx/v13 v13.3.9 // indirect
github.com/blevesearch/zapx/v14 v14.3.9 // indirect
github.com/blevesearch/zapx/v15 v15.3.12 // indirect
github.com/blevesearch/zapx/v11 v11.3.10 // indirect
github.com/blevesearch/zapx/v12 v12.3.10 // indirect
github.com/blevesearch/zapx/v13 v13.3.10 // indirect
github.com/blevesearch/zapx/v14 v14.3.10 // indirect
github.com/blevesearch/zapx/v15 v15.3.13 // indirect
github.com/bluele/gcache v0.0.2 // indirect
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
github.com/bytedance/sonic v1.9.1 // indirect
@ -185,9 +185,9 @@ require (
go.etcd.io/bbolt v1.3.7 // indirect
golang.org/x/arch v0.3.0 // indirect
golang.org/x/sync v0.3.0 // indirect
golang.org/x/sys v0.11.0 // indirect
golang.org/x/term v0.11.0 // indirect
golang.org/x/text v0.12.0 // indirect
golang.org/x/sys v0.12.0 // indirect
golang.org/x/term v0.12.0 // indirect
golang.org/x/text v0.13.0 // indirect
golang.org/x/time v0.3.0 // indirect
google.golang.org/api v0.134.0 // indirect
google.golang.org/appengine v1.6.7 // indirect

39
go.sum
View File

@ -46,10 +46,16 @@ github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjL
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/blevesearch/bleve/v2 v2.3.9 h1:pUMvK0mxAexqasZcVj8lazmWnEW5XiV0tASIqANiNTQ=
github.com/blevesearch/bleve/v2 v2.3.9/go.mod h1:1PibElcjlQMQHF9uS9mRv58ODQgj4pCWHA1Wfd+qagU=
github.com/blevesearch/bleve/v2 v2.3.10 h1:z8V0wwGoL4rp7nG/O3qVVLYxUqCbEwskMt4iRJsPLgg=
github.com/blevesearch/bleve/v2 v2.3.10/go.mod h1:RJzeoeHC+vNHsoLR54+crS1HmOWpnH87fL70HAUCzIA=
github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
github.com/blevesearch/bleve_index_api v1.0.6 h1:gyUUxdsrvmW3jVhhYdCVL6h9dCjNT/geNU7PxGn37p8=
github.com/blevesearch/bleve_index_api v1.0.6/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
github.com/blevesearch/geo v0.1.17/go.mod h1:uRMGWG0HJYfWfFJpK3zTdnnr1K+ksZTuWKhXeSokfnM=
github.com/blevesearch/geo v0.1.18 h1:Np8jycHTZ5scFe7VEPLrDoHnnb9C4j636ue/CGrhtDw=
github.com/blevesearch/geo v0.1.18/go.mod h1:uRMGWG0HJYfWfFJpK3zTdnnr1K+ksZTuWKhXeSokfnM=
github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+DdXTAn4YRcHCo=
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
@ -58,6 +64,8 @@ github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCD
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 h1:1g713kpCQZ8u4a3stRGBfrwVOuGRnmxOVU5MQkUPrHU=
github.com/blevesearch/scorch_segment_api/v2 v2.1.5/go.mod h1:f2nOkKS1HcjgIWZgDAErgBdxmr2eyt0Kn7IY+FU1Xe4=
github.com/blevesearch/scorch_segment_api/v2 v2.1.6 h1:CdekX/Ob6YCYmeHzD72cKpwzBjvkOGegHOqhAkXp6yA=
github.com/blevesearch/scorch_segment_api/v2 v2.1.6/go.mod h1:nQQYlp51XvoSVxcciBjtvuHPIVjlWrN1hX4qwK2cqdc=
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
@ -68,14 +76,24 @@ github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxy
github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
github.com/blevesearch/zapx/v11 v11.3.9 h1:y3ijS4h4MJdmQ07MHASxat4owAixreK2xdo76w9ncrw=
github.com/blevesearch/zapx/v11 v11.3.9/go.mod h1:jcAYnQwlr+LqD2vLjDWjWiZDXDXGFqPbpPDRTd3XmS4=
github.com/blevesearch/zapx/v11 v11.3.10 h1:hvjgj9tZ9DeIqBCxKhi70TtSZYMdcFn7gDb71Xo/fvk=
github.com/blevesearch/zapx/v11 v11.3.10/go.mod h1:0+gW+FaE48fNxoVtMY5ugtNHHof/PxCqh7CnhYdnMzQ=
github.com/blevesearch/zapx/v12 v12.3.9 h1:MXGLlZ03oxXH3DMJTZaBaRj2xb6t4wQVZeZK/wu1M6w=
github.com/blevesearch/zapx/v12 v12.3.9/go.mod h1:QXCMwmOkdLnMDgTN1P4CcuX5F851iUOtOwXbw0HMBYs=
github.com/blevesearch/zapx/v12 v12.3.10 h1:yHfj3vXLSYmmsBleJFROXuO08mS3L1qDCdDK81jDl8s=
github.com/blevesearch/zapx/v12 v12.3.10/go.mod h1:0yeZg6JhaGxITlsS5co73aqPtM04+ycnI6D1v0mhbCs=
github.com/blevesearch/zapx/v13 v13.3.9 h1:+VAz9V0VmllHXlZV4DCvfYj0nqaZHgF3MeEHwOyRBwQ=
github.com/blevesearch/zapx/v13 v13.3.9/go.mod h1:s+WjNp4WSDtrBVBpa37DUOd7S/Gr/jTZ7ST/MbCVj/0=
github.com/blevesearch/zapx/v13 v13.3.10 h1:0KY9tuxg06rXxOZHg3DwPJBjniSlqEgVpxIqMGahDE8=
github.com/blevesearch/zapx/v13 v13.3.10/go.mod h1:w2wjSDQ/WBVeEIvP0fvMJZAzDwqwIEzVPnCPrz93yAk=
github.com/blevesearch/zapx/v14 v14.3.9 h1:wuqxATgsTCNHM9xsOFOeFp8H2heZ/gMX/tsl9lRK8U4=
github.com/blevesearch/zapx/v14 v14.3.9/go.mod h1:MWZ4v8AzFBRurhDzkLvokFW8ljcq9Evm27mkWe8OGbM=
github.com/blevesearch/zapx/v14 v14.3.10 h1:SG6xlsL+W6YjhX5N3aEiL/2tcWh3DO75Bnz77pSwwKU=
github.com/blevesearch/zapx/v14 v14.3.10/go.mod h1:qqyuR0u230jN1yMmE4FIAuCxmahRQEOehF78m6oTgns=
github.com/blevesearch/zapx/v15 v15.3.12 h1:w/kU9aHyfMDEdwHGZzCiakC3HZ9z5gYlXaALDC4Dct8=
github.com/blevesearch/zapx/v15 v15.3.12/go.mod h1:tx53gDJS/7Oa3Je820cmVurqCuJ4dqdAy1kiDMV/IUo=
github.com/blevesearch/zapx/v15 v15.3.13 h1:6EkfaZiPlAxqXz0neniq35my6S48QI94W/wyhnpDHHQ=
github.com/blevesearch/zapx/v15 v15.3.13/go.mod h1:Turk/TNRKj9es7ZpKK95PS7f6D44Y7fAFy8F4LXQtGg=
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
@ -162,6 +180,8 @@ github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
github.com/go-resty/resty/v2 v2.8.0 h1:J29d0JFWwSWrDCysnOK/YjsPMLQTx0TvgJEHVGvf2L8=
github.com/go-resty/resty/v2 v2.8.0/go.mod h1:UCui0cMHekLrSntoMyofdSTaPpinlRHFtPpizuyDW2w=
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
github.com/go-webauthn/webauthn v0.8.6 h1:bKMtL1qzd2WTFkf1mFTVbreYrwn7dsYmEPjTq6QN90E=
@ -356,6 +376,8 @@ github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6 h1:5TvW1dv00Y13njmQ1AWkxSWtPkwE7ZEF6yDuv9q+Als=
github.com/pkg/sftp v1.13.6-0.20230213180117-971c283182b6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
github.com/pkg/xattr v0.4.9 h1:5883YPCtkSd8LFbs13nXplj9g9tlrwoJRjgpgMu1/fE=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
@ -472,10 +494,14 @@ golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
golang.org/x/crypto v0.12.0 h1:tFM/ta59kqch6LlvYnPa0yx5a83cL2nHflFhYKvv9Yk=
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
golang.org/x/crypto v0.13.0 h1:mvySKfSWJ+UKUii46M40LOvyWfN0s2U+46/jDd0e6Ck=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b h1:r+vk0EmXNmekl0S0BascoeeoHk/L7wmaW2QF90K+kYI=
golang.org/x/exp v0.0.0-20230801115018-d63ba01acd4b/go.mod h1:FXUEEKJgO7OQYeo8N01OfiKP8RXMtf6e8aTskBGqWdc=
golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63 h1:m64FZMko/V45gv0bNmrNYoDEq8U5YUhetc9cBWKS1TQ=
golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63/go.mod h1:0v4NqG35kSWCMzLaMeX+IQrlSnVE/bqGSyC2cz/9Le8=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.10.0 h1:gXjUUtwtx5yOE0VKWq1CH4IJAClq4UGgUA3i+rpON9M=
golang.org/x/image v0.10.0/go.mod h1:jtrku+n79PfroUbvDdeUWMAI+heR786BofxrbiSF+J0=
@ -493,12 +519,17 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.13.0 h1:Nvo8UFsZ8X3BhAC9699Z1j7XQ3rsZnUUm7jfBEk1ueY=
golang.org/x/net v0.13.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
golang.org/x/net v0.14.0/go.mod h1:PpSgVXXLK0OxS0F31C1/tv6XNguvCrnXIDrFMspZIUI=
golang.org/x/net v0.15.0 h1:ugBLEUaxABaB5AJqW9enI0ACdci2RUd4eP51NTBvuJ8=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8=
golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI=
golang.org/x/oauth2 v0.12.0 h1:smVPGxink+n1ZI5pkQa8y6fZT0RW0MgCO5bFpepy4B4=
golang.org/x/oauth2 v0.12.0/go.mod h1:A74bZ3aGXgCY0qaIC9Ahg6Lglin4AMAco8cIv9baba4=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -530,14 +561,19 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.11.0 h1:eG7RXZHdqOJ1i+0lgLgCpSXAp6M3LYlAo6osgSi0xOM=
golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0 h1:CM0HF96J0hcLAwsHPJZjfdNzs0gftsLfgKt57wWHJ0o=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
golang.org/x/term v0.10.0/go.mod h1:lpqdcUyK/oCiQxvxVrppt5ggO2KCZ5QblwqPnfZ6d5o=
golang.org/x/term v0.11.0 h1:F9tnn/DA/Im8nCwm+fX+1/eBwi4qFjRT++MhtVC4ZX0=
golang.org/x/term v0.11.0/go.mod h1:zC9APTIj3jG3FdV/Ons+XE1riIZXG4aZ4GTHiPZJPIU=
golang.org/x/term v0.12.0 h1:/ZfYdc3zq+q02Rv9vGqTeSItdzZTSNDmfTi0mBAuidU=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
@ -545,10 +581,13 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.12.0 h1:k+n5B8goJNdU7hSvEtMUz3d1Q6D/XW4COJSJR6fN0mc=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=

View File

@ -165,6 +165,7 @@ func InitialSettings() []model.SettingItem {
{Key: conf.SSOAutoRegister, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSODefaultDir, Value: "/", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSODefaultPermission, Value: "0", Type: conf.TypeNumber, Group: model.SSO, Flag: model.PRIVATE},
{Key: conf.SSOCompatibilityMode, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PUBLIC},
// qbittorrent settings
{Key: conf.QbittorrentUrl, Value: "http://admin:adminadmin@localhost:8080/", Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},

View File

@ -41,7 +41,7 @@ const (
OcrApi = "ocr_api"
FilenameCharMapping = "filename_char_mapping"
ForwardDirectLinkParams = "forward_direct_link_params"
WebauthnLoginEnabled = "webauthn_login_enabled"
WebauthnLoginEnabled = "webauthn_login_enabled"
// index
SearchIndex = "search_index"
@ -69,6 +69,7 @@ const (
SSOAutoRegister = "sso_auto_register"
SSODefaultDir = "sso_default_dir"
SSODefaultPermission = "sso_default_permission"
SSOCompatibilityMode = "sso_compatibility_mode"
// qbittorrent
QbittorrentUrl = "qbittorrent_url"
@ -84,3 +85,8 @@ const (
TEXT
IMAGE
)
// ContextKey is the type of context keys.
const (
NoTaskKey = "no_task"
)

View File

@ -31,3 +31,17 @@ func AutoMigrate(dst ...interface{}) error {
func GetDb() *gorm.DB {
return db
}
func Close() {
log.Info("closing db")
sqlDB, err := db.DB()
if err != nil {
log.Errorf("failed to get db: %s", err.Error())
return
}
err = sqlDB.Close()
if err != nil {
log.Errorf("failed to close db: %s", err.Error())
return
}
}

View File

@ -7,6 +7,7 @@ import (
stdpath "path"
"sync/atomic"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
@ -36,6 +37,31 @@ func _copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool
if srcStorage.GetStorage() == dstStorage.GetStorage() {
return false, op.Copy(ctx, srcStorage, srcObjActualPath, dstDirActualPath, lazyCache...)
}
if ctx.Value(conf.NoTaskKey) != nil {
srcObj, err := op.Get(ctx, srcStorage, srcObjActualPath)
if err != nil {
return false, errors.WithMessagef(err, "failed get src [%s] file", srcObjPath)
}
if !srcObj.IsDir() {
// copy file directly
link, _, err := op.Link(ctx, srcStorage, srcObjActualPath, model.LinkArgs{
Header: http.Header{},
})
if err != nil {
return false, errors.WithMessagef(err, "failed get [%s] link", srcObjPath)
}
fs := stream.FileStream{
Obj: srcObj,
Ctx: ctx,
}
// any link provided is seekable
ss, err := stream.NewSeekableStream(fs, link)
if err != nil {
return false, errors.WithMessagef(err, "failed get [%s] stream", srcObjPath)
}
return false, op.Put(ctx, dstStorage, dstDirActualPath, ss, nil, false)
}
}
// not in the same storage
CopyTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
Name: fmt.Sprintf("copy [%s](%s) to [%s](%s)", srcStorage.GetStorage().MountPath, srcObjActualPath, dstStorage.GetStorage().MountPath, dstDirActualPath),

View File

@ -224,9 +224,10 @@ func RequestHttp(ctx context.Context, httpMethod string, headerOverride http.Hea
res.Header.Del("set-cookie")
if res.StatusCode >= 400 {
all, _ := io.ReadAll(res.Body)
_ = res.Body.Close()
msg := string(all)
log.Debugln(msg)
return res, errors.New(msg)
return nil, fmt.Errorf("http request [%s] failure,status: %d response:%s", URL, res.StatusCode, msg)
}
return res, nil
}

View File

@ -84,6 +84,14 @@ func addCacheObj(storage driver.Driver, path string, newObj model.Obj) {
}
func ClearCache(storage driver.Driver, path string) {
objs, ok := listCache.Get(Key(storage, path))
if ok {
for _, obj := range objs {
if obj.IsDir() {
ClearCache(storage, stdpath.Join(path, obj.GetName()))
}
}
}
listCache.Del(Key(storage, path))
}
@ -473,6 +481,10 @@ func Remove(ctx context.Context, storage driver.Driver, path string) error {
err = s.Remove(ctx, model.UnwrapObj(rawObj))
if err == nil {
delCacheObj(storage, dirPath, rawObj)
// clear folder cache recursively
if rawObj.IsDir() {
ClearCache(storage, path)
}
}
default:
return errs.NotImplement

View File

@ -26,6 +26,16 @@ type FileStream struct {
peekBuff *bytes.Reader
}
func (f *FileStream) GetSize() int64 {
if f.tmpFile != nil {
info, err := f.tmpFile.Stat()
if err == nil {
return info.Size()
}
}
return f.Obj.GetSize()
}
func (f *FileStream) GetMimetype() string {
return f.Mimetype
}

View File

@ -43,7 +43,7 @@ func GetRangeReadCloserFromLink(size int64, link *model.Link) (model.RangeReadCl
return nil, fmt.Errorf("http request failure,status: %d err:%s", response.StatusCode, err)
}
if r.Start == 0 && (r.Length == -1 || r.Length == size) || response.StatusCode == http.StatusPartialContent ||
checkContentRange(&response.Header, size, r.Start) {
checkContentRange(&response.Header, r.Start) {
return response.Body, nil
} else if response.StatusCode == http.StatusOK {
log.Warnf("remote http server not supporting range request, expect low perfromace!")
@ -72,12 +72,12 @@ func RequestRangedHttp(ctx context.Context, link *model.Link, offset, length int
}
// 139 cloud does not properly return 206 http status code, add a hack here
func checkContentRange(header *http.Header, size, offset int64) bool {
r, err2 := http_range.ParseRange(header.Get("Content-Range"), size)
if err2 != nil {
log.Warnf("exception trying to parse Content-Range, will ignore,err=%s", err2)
func checkContentRange(header *http.Header, offset int64) bool {
start, _, err := http_range.ParseContentRange(header.Get("Content-Range"))
if err != nil {
log.Warnf("exception trying to parse Content-Range, will ignore,err=%s", err)
}
if len(r) == 1 && r[0].Start == offset {
if start == offset {
return true
}
return false

View File

@ -107,6 +107,27 @@ func ParseRange(s string, size int64) ([]Range, error) { // nolint:gocognit
return ranges, nil
}
// ParseContentRange this function parse content-range in http response
func ParseContentRange(s string) (start, end int64, err error) {
if s == "" {
return 0, 0, ErrInvalid
}
const b = "bytes "
if !strings.HasPrefix(s, b) {
return 0, 0, ErrInvalid
}
p1 := strings.Index(s, "-")
p2 := strings.Index(s, "/")
if p1 < 0 || p2 < 0 {
return 0, 0, ErrInvalid
}
startStr, endStr := textproto.TrimString(s[len(b):p1]), textproto.TrimString(s[p1+1:p2])
start, startErr := strconv.ParseInt(startStr, 10, 64)
end, endErr := strconv.ParseInt(endStr, 10, 64)
return start, end, errors.Join(startErr, endErr)
}
func (r Range) MimeHeader(contentType string, size int64) textproto.MIMEHeader {
return textproto.MIMEHeader{
"Content-Range": {r.ContentRange(size)},

View File

@ -126,7 +126,7 @@ func CreateTempFile(r io.Reader, size int64) (*os.File, error) {
_ = os.Remove(f.Name())
return nil, errs.NewErr(err, "CreateTempFile failed")
}
if size != 0 && readBytes != size {
if size > 0 && readBytes != size {
_ = os.Remove(f.Name())
return nil, errs.NewErr(err, "CreateTempFile failed, incoming stream actual size= %d, expect = %d ", readBytes, size)
}

View File

@ -8,10 +8,11 @@ import (
"encoding/hex"
"encoding/json"
"errors"
"github.com/alist-org/alist/v3/internal/errs"
log "github.com/sirupsen/logrus"
"hash"
"io"
"github.com/alist-org/alist/v3/internal/errs"
log "github.com/sirupsen/logrus"
)
func GetMD5EncodeStr(data string) string {
@ -29,7 +30,7 @@ type HashType struct {
Width int
Name string
Alias string
NewFunc func() hash.Hash
NewFunc func(...any) hash.Hash
}
func (ht *HashType) MarshalJSON() ([]byte, error) {
@ -57,7 +58,10 @@ var (
// RegisterHash adds a new Hash to the list and returns its Type
func RegisterHash(name, alias string, width int, newFunc func() hash.Hash) *HashType {
return RegisterHashWithParam(name, alias, width, func(a ...any) hash.Hash { return newFunc() })
}
func RegisterHashWithParam(name, alias string, width int, newFunc func(...any) hash.Hash) *HashType {
newType := &HashType{
Name: name,
Alias: alias,
@ -83,15 +87,15 @@ var (
)
// HashData get hash of one hashType
func HashData(hashType *HashType, data []byte) string {
h := hashType.NewFunc()
func HashData(hashType *HashType, data []byte, params ...any) string {
h := hashType.NewFunc(params...)
h.Write(data)
return hex.EncodeToString(h.Sum(nil))
}
// HashReader get hash of one hashType from a reader
func HashReader(hashType *HashType, reader io.Reader) (string, error) {
h := hashType.NewFunc()
func HashReader(hashType *HashType, reader io.Reader, params ...any) (string, error) {
h := hashType.NewFunc(params...)
_, err := io.Copy(h, reader)
if err != nil {
return "", errs.NewErr(err, "HashReader error")
@ -100,8 +104,8 @@ func HashReader(hashType *HashType, reader io.Reader) (string, error) {
}
// HashFile get hash of one hashType from a model.File
func HashFile(hashType *HashType, file io.ReadSeeker) (string, error) {
str, err := HashReader(hashType, file)
func HashFile(hashType *HashType, file io.ReadSeeker, params ...any) (string, error) {
str, err := HashReader(hashType, file, params...)
if err != nil {
return "", err
}
@ -214,3 +218,7 @@ func FromString(str string) HashInfo {
func (hi HashInfo) GetHash(ht *HashType) string {
return hi.h[ht]
}
func (hi HashInfo) Export() map[*HashType]string {
return hi.h
}

96
pkg/utils/hash/gcid.go Normal file
View File

@ -0,0 +1,96 @@
package hash_extend
import (
"crypto/sha1"
"encoding"
"fmt"
"hash"
"strconv"
"github.com/alist-org/alist/v3/pkg/utils"
)
var GCID = utils.RegisterHashWithParam("gcid", "GCID", 40, func(a ...any) hash.Hash {
var (
size int64
err error
)
if len(a) > 0 {
size, err = strconv.ParseInt(fmt.Sprint(a[0]), 10, 64)
if err != nil {
panic(err)
}
}
return NewGcid(size)
})
func NewGcid(size int64) hash.Hash {
calcBlockSize := func(j int64) int64 {
var psize int64 = 0x40000
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
psize = psize << 1
}
return psize
}
return &gcid{
hash: sha1.New(),
hashState: sha1.New(),
blockSize: int(calcBlockSize(size)),
}
}
type gcid struct {
hash hash.Hash
hashState hash.Hash
blockSize int
offset int
}
func (h *gcid) Write(p []byte) (n int, err error) {
n = len(p)
for len(p) > 0 {
if h.offset < h.blockSize {
var lastSize = h.blockSize - h.offset
if lastSize > len(p) {
lastSize = len(p)
}
h.hashState.Write(p[:lastSize])
h.offset += lastSize
p = p[lastSize:]
}
if h.offset >= h.blockSize {
h.hash.Write(h.hashState.Sum(nil))
h.hashState.Reset()
h.offset = 0
}
}
return
}
func (h *gcid) Sum(b []byte) []byte {
if hashm, ok := h.hash.(encoding.BinaryMarshaler); ok {
if hashum, ok := h.hash.(encoding.BinaryUnmarshaler); ok {
tempData, _ := hashm.MarshalBinary()
h.hash.Write(h.hashState.Sum(nil))
defer hashum.UnmarshalBinary(tempData)
}
}
return h.hash.Sum(b)
}
func (h *gcid) Reset() {
h.hash.Reset()
h.hashState.Reset()
}
func (h *gcid) Size() int {
return h.hash.Size()
}
func (h *gcid) BlockSize() int {
return h.blockSize
}

View File

@ -5,8 +5,10 @@ import (
"time"
)
var CNLoc = time.FixedZone("UTC", 8*60*60)
func MustParseCNTime(str string) time.Time {
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", time.Local)
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", CNLoc)
return lastOpTime
}

View File

@ -1,12 +1,20 @@
package server
import (
"net/http"
_ "net/http/pprof"
"runtime"
"github.com/alist-org/alist/v3/server/common"
"github.com/alist-org/alist/v3/server/middlewares"
"github.com/gin-gonic/gin"
)
func dev(g *gin.RouterGroup) {
func _pprof(g *gin.RouterGroup) {
g.Any("/*name", gin.WrapH(http.DefaultServeMux))
}
func debug(g *gin.RouterGroup) {
g.GET("/path/*path", middlewares.Down, func(ctx *gin.Context) {
rawPath := ctx.MustGet("path").(string)
ctx.JSON(200, gin.H{
@ -16,4 +24,9 @@ func dev(g *gin.RouterGroup) {
g.GET("/hide_privacy", func(ctx *gin.Context) {
common.ErrorStrResp(ctx, "This is ip: 1.1.1.1", 400)
})
g.GET("/gc", func(c *gin.Context) {
runtime.GC()
c.String(http.StatusOK, "ok")
})
_pprof(g.Group("/pprof"))
}

View File

@ -33,15 +33,16 @@ type DirReq struct {
}
type ObjResp struct {
Name string `json:"name"`
Size int64 `json:"size"`
IsDir bool `json:"is_dir"`
Modified time.Time `json:"modified"`
Created time.Time `json:"created"`
Sign string `json:"sign"`
Thumb string `json:"thumb"`
Type int `json:"type"`
HashInfo string `json:"hashinfo"`
Name string `json:"name"`
Size int64 `json:"size"`
IsDir bool `json:"is_dir"`
Modified time.Time `json:"modified"`
Created time.Time `json:"created"`
Sign string `json:"sign"`
Thumb string `json:"thumb"`
Type int `json:"type"`
HashInfoStr string `json:"hashinfo"`
HashInfo map[*utils.HashType]string `json:"hash_info"`
}
type FsListResp struct {
@ -200,15 +201,16 @@ func toObjsResp(objs []model.Obj, parent string, encrypt bool) []ObjResp {
for _, obj := range objs {
thumb, _ := model.GetThumb(obj)
resp = append(resp, ObjResp{
Name: obj.GetName(),
Size: obj.GetSize(),
IsDir: obj.IsDir(),
Modified: obj.ModTime(),
Created: obj.CreateTime(),
HashInfo: obj.GetHash().String(),
Sign: common.Sign(obj, parent, encrypt),
Thumb: thumb,
Type: utils.GetObjType(obj.GetName(), obj.IsDir()),
Name: obj.GetName(),
Size: obj.GetSize(),
IsDir: obj.IsDir(),
Modified: obj.ModTime(),
Created: obj.CreateTime(),
HashInfoStr: obj.GetHash().String(),
HashInfo: obj.GetHash().Export(),
Sign: common.Sign(obj, parent, encrypt),
Thumb: thumb,
Type: utils.GetObjType(obj.GetName(), obj.IsDir()),
})
}
return resp
@ -313,15 +315,16 @@ func FsGet(c *gin.Context) {
thumb, _ := model.GetThumb(obj)
common.SuccessResp(c, FsGetResp{
ObjResp: ObjResp{
Name: obj.GetName(),
Size: obj.GetSize(),
IsDir: obj.IsDir(),
Modified: obj.ModTime(),
Created: obj.CreateTime(),
HashInfo: obj.GetHash().String(),
Sign: common.Sign(obj, parentPath, isEncrypt(meta, reqPath)),
Type: utils.GetFileType(obj.GetName()),
Thumb: thumb,
Name: obj.GetName(),
Size: obj.GetSize(),
IsDir: obj.IsDir(),
Modified: obj.ModTime(),
Created: obj.CreateTime(),
HashInfoStr: obj.GetHash().String(),
HashInfo: obj.GetHash().Export(),
Sign: common.Sign(obj, parentPath, isEncrypt(meta, reqPath)),
Type: utils.GetFileType(obj.GetName()),
Thumb: thumb,
},
RawURL: rawURL,
Readme: getReadme(meta, reqPath),

View File

@ -1,13 +1,9 @@
package handles
import (
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/pkg/utils/random"
"io"
"net/url"
"os"
stdpath "path"
"reflect"
"strconv"
"time"
@ -102,28 +98,12 @@ func FsForm(c *gin.Context) {
common.ErrorResp(c, err, 500)
return
}
tmpFile, tmpInSys := "", ""
fv := reflect.ValueOf(*file)
tmpInSys = fv.FieldByName("tmpfile").String()
var f io.Reader
var osFile *os.File
if len(tmpInSys) > 0 {
tmpFile = conf.Conf.TempDir + "file-" + random.String(8)
err = os.Rename(tmpInSys, tmpFile)
if err != nil {
common.ErrorResp(c, err, 500)
return
}
osFile, err = os.Open(tmpFile)
f = osFile
} else {
f, err = file.Open()
}
f, err := file.Open()
if err != nil {
common.ErrorResp(c, err, 500)
return
}
defer f.Close()
dir, name := stdpath.Split(path)
s := stream.FileStream{
Obj: &model.Object{
@ -135,21 +115,19 @@ func FsForm(c *gin.Context) {
Mimetype: file.Header.Get("Content-Type"),
WebPutAsTask: asTask,
}
ss, err := stream.NewSeekableStream(s, nil)
if err != nil {
common.ErrorResp(c, err, 500)
return
}
if osFile != nil {
ss.SetTmpFile(osFile)
}
if asTask {
err = fs.PutAsTask(dir, ss)
s.Reader = struct {
io.Reader
}{f}
err = fs.PutAsTask(dir, &s)
} else {
ss, err := stream.NewSeekableStream(s, nil)
if err != nil {
common.ErrorResp(c, err, 500)
return
}
err = fs.PutDirectly(c, dir, ss, true)
}
if err != nil {
common.ErrorResp(c, err, 500)
return

View File

@ -6,6 +6,7 @@ import (
"fmt"
"net/http"
"net/url"
"path"
"strings"
"time"
@ -36,71 +37,85 @@ var opts = totp.ValidateOpts{
func SSOLoginRedirect(c *gin.Context) {
method := c.Query("method")
usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
enabled := setting.GetBool(conf.SSOLoginEnabled)
clientId := setting.GetStr(conf.SSOClientId)
platform := setting.GetStr(conf.SSOLoginPlatform)
var r_url string
var redirect_uri string
if enabled {
urlValues := url.Values{}
if method == "" {
common.ErrorStrResp(c, "no method provided", 400)
return
}
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + method
urlValues.Add("response_type", "code")
urlValues.Add("redirect_uri", redirect_uri)
urlValues.Add("client_id", clientId)
switch platform {
case "Github":
r_url = "https://github.com/login/oauth/authorize?"
urlValues.Add("scope", "read:user")
case "Microsoft":
r_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize?"
urlValues.Add("scope", "user.read")
urlValues.Add("response_mode", "query")
case "Google":
r_url = "https://accounts.google.com/o/oauth2/v2/auth?"
urlValues.Add("scope", "https://www.googleapis.com/auth/userinfo.profile")
case "Dingtalk":
r_url = "https://login.dingtalk.com/oauth2/auth?"
urlValues.Add("scope", "openid")
urlValues.Add("prompt", "consent")
urlValues.Add("response_type", "code")
case "Casdoor":
endpoint := strings.TrimSuffix(setting.GetStr(conf.SSOEndpointName), "/")
r_url = endpoint + "/login/oauth/authorize?"
urlValues.Add("scope", "profile")
urlValues.Add("state", endpoint)
case "OIDC":
oauth2Config, err := GetOIDCClient(c)
if err != nil {
common.ErrorStrResp(c, err.Error(), 400)
return
}
// generate state parameter
state, err := totp.GenerateCodeCustom(base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
if err != nil {
common.ErrorStrResp(c, err.Error(), 400)
return
}
c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
return
default:
common.ErrorStrResp(c, "invalid platform", 400)
return
}
c.Redirect(302, r_url+urlValues.Encode())
} else {
if !enabled {
common.ErrorStrResp(c, "Single sign-on is not enabled", 403)
return
}
urlValues := url.Values{}
if method == "" {
common.ErrorStrResp(c, "no method provided", 400)
return
}
if usecompatibility {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + method
} else {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + method
}
urlValues.Add("response_type", "code")
urlValues.Add("redirect_uri", redirect_uri)
urlValues.Add("client_id", clientId)
switch platform {
case "Github":
r_url = "https://github.com/login/oauth/authorize?"
urlValues.Add("scope", "read:user")
case "Microsoft":
r_url = "https://login.microsoftonline.com/common/oauth2/v2.0/authorize?"
urlValues.Add("scope", "user.read")
urlValues.Add("response_mode", "query")
case "Google":
r_url = "https://accounts.google.com/o/oauth2/v2/auth?"
urlValues.Add("scope", "https://www.googleapis.com/auth/userinfo.profile")
case "Dingtalk":
r_url = "https://login.dingtalk.com/oauth2/auth?"
urlValues.Add("scope", "openid")
urlValues.Add("prompt", "consent")
urlValues.Add("response_type", "code")
case "Casdoor":
endpoint := strings.TrimSuffix(setting.GetStr(conf.SSOEndpointName), "/")
r_url = endpoint + "/login/oauth/authorize?"
urlValues.Add("scope", "profile")
urlValues.Add("state", endpoint)
case "OIDC":
oauth2Config, err := GetOIDCClient(c)
if err != nil {
common.ErrorStrResp(c, err.Error(), 400)
return
}
// generate state parameter
state, err := totp.GenerateCodeCustom(base32.StdEncoding.EncodeToString([]byte(oauth2Config.ClientSecret)), time.Now(), opts)
if err != nil {
common.ErrorStrResp(c, err.Error(), 400)
return
}
c.Redirect(http.StatusFound, oauth2Config.AuthCodeURL(state))
return
default:
common.ErrorStrResp(c, "invalid platform", 400)
return
}
c.Redirect(302, r_url+urlValues.Encode())
}
var ssoClient = resty.New().SetRetryCount(3)
func GetOIDCClient(c *gin.Context) (*oauth2.Config, error) {
var redirect_uri string
usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
argument := c.Query("method")
redirect_uri := common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
if usecompatibility {
argument = path.Base(c.Request.URL.Path)
}
if usecompatibility {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + argument
} else {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
}
endpoint := setting.GetStr(conf.SSOEndpointName)
provider, err := oidc.NewProvider(c, endpoint)
if err != nil {
@ -152,7 +167,11 @@ func autoRegister(username, userID string, err error) (*model.User, error) {
}
func OIDCLoginCallback(c *gin.Context) {
usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
argument := c.Query("method")
if usecompatibility {
argument = path.Base(c.Request.URL.Path)
}
clientId := setting.GetStr(conf.SSOClientId)
endpoint := setting.GetStr(conf.SSOEndpointName)
provider, err := oidc.NewProvider(c, endpoint)
@ -204,6 +223,10 @@ func OIDCLoginCallback(c *gin.Context) {
}
UserID := claims.Name
if argument == "get_sso_id" {
if usecompatibility {
c.Redirect(302, common.GetApiUrl(c.Request)+"/@manage?sso_id="+UserID)
return
}
html := fmt.Sprintf(`<!DOCTYPE html>
<head></head>
<body>
@ -227,6 +250,10 @@ func OIDCLoginCallback(c *gin.Context) {
if err != nil {
common.ErrorResp(c, err, 400)
}
if usecompatibility {
c.Redirect(302, common.GetApiUrl(c.Request)+"/@login?token="+token)
return
}
html := fmt.Sprintf(`<!DOCTYPE html>
<head></head>
<body>
@ -242,12 +269,18 @@ func OIDCLoginCallback(c *gin.Context) {
func SSOLoginCallback(c *gin.Context) {
enabled := setting.GetBool(conf.SSOLoginEnabled)
usecompatibility := setting.GetBool(conf.SSOCompatibilityMode)
if !enabled {
common.ErrorResp(c, errors.New("sso login is disabled"), 500)
return
}
argument := c.Query("method")
if usecompatibility {
argument = path.Base(c.Request.URL.Path)
}
if !utils.SliceContains([]string{"get_sso_id", "sso_get_token"}, argument) {
common.ErrorResp(c, errors.New("invalid request"), 500)
return
}
clientId := setting.GetStr(conf.SSOClientId)
platform := setting.GetStr(conf.SSOLoginPlatform)
@ -317,12 +350,18 @@ func SSOLoginCallback(c *gin.Context) {
}).
Post(tokenUrl)
} else {
var redirect_uri string
if usecompatibility {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/" + argument
} else {
redirect_uri = common.GetApiUrl(c.Request) + "/api/auth/sso_callback" + "?method=" + argument
}
resp, err = ssoClient.R().SetHeader("Accept", "application/json").
SetFormData(map[string]string{
"client_id": clientId,
"client_secret": clientSecret,
"code": callbackCode,
"redirect_uri": common.GetApiUrl(c.Request) + "/api/auth/sso_callback?method=" + argument,
"redirect_uri": redirect_uri,
"scope": scope,
}).SetFormData(additionalForm).Post(tokenUrl)
}
@ -349,6 +388,10 @@ func SSOLoginCallback(c *gin.Context) {
return
}
if argument == "get_sso_id" {
if usecompatibility {
c.Redirect(302, common.GetApiUrl(c.Request)+"/@manage?sso_id="+userID)
return
}
html := fmt.Sprintf(`<!DOCTYPE html>
<head></head>
<body>
@ -373,6 +416,10 @@ func SSOLoginCallback(c *gin.Context) {
if err != nil {
common.ErrorResp(c, err, 400)
}
if usecompatibility {
c.Redirect(302, common.GetApiUrl(c.Request)+"/@login?token="+token)
return
}
html := fmt.Sprintf(`<!DOCTYPE html>
<head></head>
<body>

View File

@ -22,7 +22,7 @@ func Init(e *gin.Engine) {
Cors(e)
g := e.Group(conf.URL.Path)
if conf.Conf.Scheme.HttpPort != -1 && conf.Conf.Scheme.HttpsPort != -1 && conf.Conf.Scheme.ForceHttps {
g.Use(middlewares.ForceHttps)
e.Use(middlewares.ForceHttps)
}
g.Any("/ping", func(c *gin.Context) {
c.String(200, "pong")
@ -56,6 +56,8 @@ func Init(e *gin.Engine) {
// auth
api.GET("/auth/sso", handles.SSOLoginRedirect)
api.GET("/auth/sso_callback", handles.SSOLoginCallback)
api.GET("/auth/get_sso_id", handles.SSOLoginCallback)
api.GET("/auth/sso_get_token", handles.SSOLoginCallback)
//webauthn
webauthn.GET("/webauthn_begin_registration", handles.BeginAuthnRegistration)
@ -71,8 +73,8 @@ func Init(e *gin.Engine) {
_fs(auth.Group("/fs"))
admin(auth.Group("/admin", middlewares.AuthAdmin))
if flags.Dev {
dev(g.Group("/dev"))
if flags.Debug || flags.Dev {
debug(g.Group("/debug"))
}
static.Static(g, func(handlers ...gin.HandlerFunc) {
e.NoRoute(handlers...)

View File

@ -5,10 +5,8 @@ import (
"fmt"
"io/fs"
"net/http"
"net/http/pprof"
"strings"
"github.com/alist-org/alist/v3/cmd/flags"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
@ -85,8 +83,6 @@ func Static(r *gin.RouterGroup, noRoute func(handlers ...gin.HandlerFunc)) {
c.Status(200)
if strings.HasPrefix(c.Request.URL.Path, "/@manage") {
_, _ = c.Writer.WriteString(conf.ManageHtml)
} else if strings.HasPrefix(c.Request.URL.Path, "/debug/pprof") && flags.Debug {
pprof.Index(c.Writer, c.Request)
} else {
_, _ = c.Writer.WriteString(conf.IndexHtml)
}

View File

@ -51,6 +51,7 @@ func WebDAVAuth(c *gin.Context) {
username, password, ok := c.Request.BasicAuth()
if !ok {
bt := c.GetHeader("Authorization")
log.Debugf("[webdav auth] token: %s", bt)
if strings.HasPrefix(bt, "Bearer") {
bt = strings.TrimPrefix(bt, "Bearer ")
token := setting.GetStr(conf.Token)

View File

@ -10,6 +10,7 @@ import (
"path"
"path/filepath"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
@ -54,7 +55,8 @@ func moveFiles(ctx context.Context, src, dst string, overwrite bool) (status int
//
// See section 9.8.5 for when various HTTP status codes apply.
func copyFiles(ctx context.Context, src, dst string, overwrite bool) (status int, err error) {
_, err = fs.Copy(ctx, src, dst)
dstDir := path.Dir(dst)
_, err = fs.Copy(context.WithValue(ctx, conf.NoTaskKey, struct{}{}), src, dstDir)
if err != nil {
return http.StatusInternalServerError, err
}

View File

@ -460,7 +460,7 @@ type ETager interface {
func findETag(ctx context.Context, ls LockSystem, name string, fi model.Obj) (string, error) {
if do, ok := fi.(ETager); ok {
etag, err := do.ETag(ctx)
if err != ErrNotImplemented {
if !errors.Is(err, ErrNotImplemented) {
return etag, err
}
}

View File

@ -8,7 +8,6 @@ package webdav // import "golang.org/x/net/webdav"
import (
"errors"
"fmt"
"github.com/alist-org/alist/v3/internal/stream"
"net/http"
"net/url"
"os"
@ -16,6 +15,8 @@ import (
"strings"
"time"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
@ -219,20 +220,24 @@ func (h *Handler) handleGetHeadPost(w http.ResponseWriter, r *http.Request) (sta
user := ctx.Value("user").(*model.User)
reqPath, err = user.JoinPath(reqPath)
if err != nil {
return 403, err
return http.StatusForbidden, err
}
fi, err := fs.Get(ctx, reqPath, &fs.GetArgs{})
if err != nil {
return http.StatusNotFound, err
}
if fi.IsDir() {
return http.StatusMethodNotAllowed, nil
}
etag, err := findETag(ctx, h.LockSystem, reqPath, fi)
if err != nil {
return http.StatusInternalServerError, err
}
w.Header().Set("ETag", etag)
if r.Method == http.MethodHead {
w.Header().Set("Content-Length", fmt.Sprintf("%d", fi.GetSize()))
return http.StatusOK, nil
}
if fi.IsDir() {
return http.StatusMethodNotAllowed, nil
}
// Let ServeContent determine the Content-Type header.
storage, _ := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
downProxyUrl := storage.GetStorage().DownProxyUrl