feat(archive): archive manage (#7817)
* feat(archive): archive management * fix(ftp-server): remove duplicate ReadAtSeeker realization * fix(archive): bad seeking of SeekableStream * fix(archive): split internal and driver extraction api * feat(archive): patch * fix(shutdown): clear decompress upload tasks * chore * feat(archive): support .iso format * chore
This commit is contained in:
@ -8,10 +8,8 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/pkg/errors"
|
||||
"io"
|
||||
fs2 "io/fs"
|
||||
"net/http"
|
||||
"os"
|
||||
@ -20,9 +18,7 @@ import (
|
||||
|
||||
type FileDownloadProxy struct {
|
||||
ftpserver.FileTransfer
|
||||
ss *stream.SeekableStream
|
||||
reader io.Reader
|
||||
cur int64
|
||||
reader stream.SStreamReadAtSeeker
|
||||
}
|
||||
|
||||
func OpenDownload(ctx context.Context, reqPath string, offset int64) (*FileDownloadProxy, error) {
|
||||
@ -55,22 +51,16 @@ func OpenDownload(ctx context.Context, reqPath string, offset int64) (*FileDownl
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var reader io.Reader
|
||||
if offset != 0 {
|
||||
reader, err = ss.RangeRead(http_range.Range{Start: offset, Length: -1})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
reader = ss
|
||||
reader, err := stream.NewReadAtSeeker(ss, offset)
|
||||
if err != nil {
|
||||
_ = ss.Close()
|
||||
return nil, err
|
||||
}
|
||||
return &FileDownloadProxy{ss: ss, reader: reader}, nil
|
||||
return &FileDownloadProxy{reader: reader}, nil
|
||||
}
|
||||
|
||||
func (f *FileDownloadProxy) Read(p []byte) (n int, err error) {
|
||||
n, err = f.reader.Read(p)
|
||||
f.cur += int64(n)
|
||||
return n, err
|
||||
return f.reader.Read(p)
|
||||
}
|
||||
|
||||
func (f *FileDownloadProxy) Write(p []byte) (n int, err error) {
|
||||
@ -78,32 +68,11 @@ func (f *FileDownloadProxy) Write(p []byte) (n int, err error) {
|
||||
}
|
||||
|
||||
func (f *FileDownloadProxy) Seek(offset int64, whence int) (int64, error) {
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
break
|
||||
case io.SeekCurrent:
|
||||
offset += f.cur
|
||||
break
|
||||
case io.SeekEnd:
|
||||
offset += f.ss.GetSize()
|
||||
break
|
||||
default:
|
||||
return 0, errs.NotSupport
|
||||
}
|
||||
if offset < 0 {
|
||||
return 0, errors.New("Seek: negative position")
|
||||
}
|
||||
reader, err := f.ss.RangeRead(http_range.Range{Start: offset, Length: -1})
|
||||
if err != nil {
|
||||
return f.cur, err
|
||||
}
|
||||
f.cur = offset
|
||||
f.reader = reader
|
||||
return offset, nil
|
||||
return f.reader.Seek(offset, whence)
|
||||
}
|
||||
|
||||
func (f *FileDownloadProxy) Close() error {
|
||||
return f.ss.Close()
|
||||
return f.reader.Close()
|
||||
}
|
||||
|
||||
type OsFileInfoAdapter struct {
|
||||
|
381
server/handles/archive.go
Normal file
381
server/handles/archive.go
Normal file
@ -0,0 +1,381 @@
|
||||
package handles
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/pkg/errors"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"mime"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type ArchiveMetaReq struct {
|
||||
Path string `json:"path" form:"path"`
|
||||
Password string `json:"password" form:"password"`
|
||||
Refresh bool `json:"refresh" form:"refresh"`
|
||||
ArchivePass string `json:"archive_pass" form:"archive_pass"`
|
||||
}
|
||||
|
||||
type ArchiveMetaResp struct {
|
||||
Comment string `json:"comment"`
|
||||
IsEncrypted bool `json:"encrypted"`
|
||||
Content []ArchiveContentResp `json:"content"`
|
||||
RawURL string `json:"raw_url"`
|
||||
Sign string `json:"sign"`
|
||||
}
|
||||
|
||||
type ArchiveContentResp struct {
|
||||
ObjResp
|
||||
Children []ArchiveContentResp `json:"children,omitempty"`
|
||||
}
|
||||
|
||||
func toObjsRespWithoutSignAndThumb(obj model.Obj) ObjResp {
|
||||
return ObjResp{
|
||||
Name: obj.GetName(),
|
||||
Size: obj.GetSize(),
|
||||
IsDir: obj.IsDir(),
|
||||
Modified: obj.ModTime(),
|
||||
Created: obj.CreateTime(),
|
||||
HashInfoStr: obj.GetHash().String(),
|
||||
HashInfo: obj.GetHash().Export(),
|
||||
Sign: "",
|
||||
Thumb: "",
|
||||
Type: utils.GetObjType(obj.GetName(), obj.IsDir()),
|
||||
}
|
||||
}
|
||||
|
||||
func toContentResp(objs []model.ObjTree) []ArchiveContentResp {
|
||||
if objs == nil {
|
||||
return nil
|
||||
}
|
||||
ret, _ := utils.SliceConvert(objs, func(src model.ObjTree) (ArchiveContentResp, error) {
|
||||
return ArchiveContentResp{
|
||||
ObjResp: toObjsRespWithoutSignAndThumb(src),
|
||||
Children: toContentResp(src.GetChildren()),
|
||||
}, nil
|
||||
})
|
||||
return ret
|
||||
}
|
||||
|
||||
func FsArchiveMeta(c *gin.Context) {
|
||||
var req ArchiveMetaReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanReadArchives() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
reqPath, err := user.JoinPath(req.Path)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
meta, err := op.GetNearestMeta(reqPath)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
if !common.CanAccess(user, meta, reqPath, req.Password) {
|
||||
common.ErrorStrResp(c, "password is incorrect or you have no permission", 403)
|
||||
return
|
||||
}
|
||||
archiveArgs := model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: req.ArchivePass,
|
||||
}
|
||||
ret, err := fs.ArchiveMeta(c, reqPath, model.ArchiveMetaArgs{
|
||||
ArchiveArgs: archiveArgs,
|
||||
Refresh: req.Refresh,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, errs.WrongArchivePassword) {
|
||||
common.ErrorResp(c, err, 202)
|
||||
} else {
|
||||
common.ErrorResp(c, err, 500)
|
||||
}
|
||||
return
|
||||
}
|
||||
s := ""
|
||||
if isEncrypt(meta, reqPath) || setting.GetBool(conf.SignAll) {
|
||||
s = sign.Sign(reqPath)
|
||||
}
|
||||
api := "/ae"
|
||||
if ret.DriverProviding {
|
||||
api = "/ad"
|
||||
}
|
||||
common.SuccessResp(c, ArchiveMetaResp{
|
||||
Comment: ret.GetComment(),
|
||||
IsEncrypted: ret.IsEncrypted(),
|
||||
Content: toContentResp(ret.GetTree()),
|
||||
RawURL: fmt.Sprintf("%s%s%s", common.GetApiUrl(c.Request), api, utils.EncodePath(reqPath, true)),
|
||||
Sign: s,
|
||||
})
|
||||
}
|
||||
|
||||
type ArchiveListReq struct {
|
||||
ArchiveMetaReq
|
||||
model.PageReq
|
||||
InnerPath string `json:"inner_path" form:"inner_path"`
|
||||
}
|
||||
|
||||
type ArchiveListResp struct {
|
||||
Content []ObjResp `json:"content"`
|
||||
Total int64 `json:"total"`
|
||||
}
|
||||
|
||||
func FsArchiveList(c *gin.Context) {
|
||||
var req ArchiveListReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
req.Validate()
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanReadArchives() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
reqPath, err := user.JoinPath(req.Path)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
meta, err := op.GetNearestMeta(reqPath)
|
||||
if err != nil {
|
||||
if !errors.Is(errors.Cause(err), errs.MetaNotFound) {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Set("meta", meta)
|
||||
if !common.CanAccess(user, meta, reqPath, req.Password) {
|
||||
common.ErrorStrResp(c, "password is incorrect or you have no permission", 403)
|
||||
return
|
||||
}
|
||||
objs, err := fs.ArchiveList(c, reqPath, model.ArchiveListArgs{
|
||||
ArchiveInnerArgs: model.ArchiveInnerArgs{
|
||||
ArchiveArgs: model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: req.ArchivePass,
|
||||
},
|
||||
InnerPath: utils.FixAndCleanPath(req.InnerPath),
|
||||
},
|
||||
Refresh: req.Refresh,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, errs.WrongArchivePassword) {
|
||||
common.ErrorResp(c, err, 202)
|
||||
} else {
|
||||
common.ErrorResp(c, err, 500)
|
||||
}
|
||||
return
|
||||
}
|
||||
total, objs := pagination(objs, &req.PageReq)
|
||||
ret, _ := utils.SliceConvert(objs, func(src model.Obj) (ObjResp, error) {
|
||||
return toObjsRespWithoutSignAndThumb(src), nil
|
||||
})
|
||||
common.SuccessResp(c, ArchiveListResp{
|
||||
Content: ret,
|
||||
Total: int64(total),
|
||||
})
|
||||
}
|
||||
|
||||
type ArchiveDecompressReq struct {
|
||||
SrcDir string `json:"src_dir" form:"src_dir"`
|
||||
DstDir string `json:"dst_dir" form:"dst_dir"`
|
||||
Name string `json:"name" form:"name"`
|
||||
ArchivePass string `json:"archive_pass" form:"archive_pass"`
|
||||
InnerPath string `json:"inner_path" form:"inner_path"`
|
||||
CacheFull bool `json:"cache_full" form:"cache_full"`
|
||||
PutIntoNewDir bool `json:"put_into_new_dir" form:"put_into_new_dir"`
|
||||
}
|
||||
|
||||
func FsArchiveDecompress(c *gin.Context) {
|
||||
var req ArchiveDecompressReq
|
||||
if err := c.ShouldBind(&req); err != nil {
|
||||
common.ErrorResp(c, err, 400)
|
||||
return
|
||||
}
|
||||
user := c.MustGet("user").(*model.User)
|
||||
if !user.CanDecompress() {
|
||||
common.ErrorResp(c, errs.PermissionDenied, 403)
|
||||
return
|
||||
}
|
||||
srcPath, err := user.JoinPath(stdpath.Join(req.SrcDir, req.Name))
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
dstDir, err := user.JoinPath(req.DstDir)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 403)
|
||||
return
|
||||
}
|
||||
t, err := fs.ArchiveDecompress(c, srcPath, dstDir, model.ArchiveDecompressArgs{
|
||||
ArchiveInnerArgs: model.ArchiveInnerArgs{
|
||||
ArchiveArgs: model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: req.ArchivePass,
|
||||
},
|
||||
InnerPath: utils.FixAndCleanPath(req.InnerPath),
|
||||
},
|
||||
CacheFull: req.CacheFull,
|
||||
PutIntoNewDir: req.PutIntoNewDir,
|
||||
})
|
||||
if err != nil {
|
||||
if errors.Is(err, errs.WrongArchivePassword) {
|
||||
common.ErrorResp(c, err, 202)
|
||||
} else {
|
||||
common.ErrorResp(c, err, 500)
|
||||
}
|
||||
return
|
||||
}
|
||||
common.SuccessResp(c, gin.H{
|
||||
"task": getTaskInfo(t),
|
||||
})
|
||||
}
|
||||
|
||||
func ArchiveDown(c *gin.Context) {
|
||||
archiveRawPath := c.MustGet("path").(string)
|
||||
innerPath := utils.FixAndCleanPath(c.Query("inner"))
|
||||
password := c.Query("pass")
|
||||
filename := stdpath.Base(innerPath)
|
||||
storage, err := fs.GetStorage(archiveRawPath, &fs.GetStoragesArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if common.ShouldProxy(storage, filename) {
|
||||
ArchiveProxy(c)
|
||||
return
|
||||
} else {
|
||||
link, _, err := fs.ArchiveDriverExtract(c, archiveRawPath, model.ArchiveInnerArgs{
|
||||
ArchiveArgs: model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
IP: c.ClientIP(),
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: password,
|
||||
},
|
||||
InnerPath: innerPath,
|
||||
})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
down(c, link)
|
||||
}
|
||||
}
|
||||
|
||||
func ArchiveProxy(c *gin.Context) {
|
||||
archiveRawPath := c.MustGet("path").(string)
|
||||
innerPath := utils.FixAndCleanPath(c.Query("inner"))
|
||||
password := c.Query("pass")
|
||||
filename := stdpath.Base(innerPath)
|
||||
storage, err := fs.GetStorage(archiveRawPath, &fs.GetStoragesArgs{})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if canProxy(storage, filename) {
|
||||
// TODO: Support external download proxy URL
|
||||
link, file, err := fs.ArchiveDriverExtract(c, archiveRawPath, model.ArchiveInnerArgs{
|
||||
ArchiveArgs: model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: password,
|
||||
},
|
||||
InnerPath: innerPath,
|
||||
})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
localProxy(c, link, file, storage.GetStorage().ProxyRange)
|
||||
} else {
|
||||
common.ErrorStrResp(c, "proxy not allowed", 403)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func ArchiveInternalExtract(c *gin.Context) {
|
||||
archiveRawPath := c.MustGet("path").(string)
|
||||
innerPath := utils.FixAndCleanPath(c.Query("inner"))
|
||||
password := c.Query("pass")
|
||||
rc, size, err := fs.ArchiveInternalExtract(c, archiveRawPath, model.ArchiveInnerArgs{
|
||||
ArchiveArgs: model.ArchiveArgs{
|
||||
LinkArgs: model.LinkArgs{
|
||||
Header: c.Request.Header,
|
||||
Type: c.Query("type"),
|
||||
HttpReq: c.Request,
|
||||
},
|
||||
Password: password,
|
||||
},
|
||||
InnerPath: innerPath,
|
||||
})
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
if err := rc.Close(); err != nil {
|
||||
log.Errorf("failed to close file streamer, %v", err)
|
||||
}
|
||||
}()
|
||||
headers := map[string]string{
|
||||
"Referrer-Policy": "no-referrer",
|
||||
"Cache-Control": "max-age=0, no-cache, no-store, must-revalidate",
|
||||
}
|
||||
if c.Query("attachment") == "true" {
|
||||
filename := stdpath.Base(innerPath)
|
||||
headers["Content-Disposition"] = fmt.Sprintf("attachment; filename=\"%s\"", filename)
|
||||
}
|
||||
contentType := c.Request.Header.Get("Content-Type")
|
||||
if contentType == "" {
|
||||
fileExt := stdpath.Ext(innerPath)
|
||||
contentType = mime.TypeByExtension(fileExt)
|
||||
}
|
||||
c.DataFromReader(200, size, contentType, rc, headers)
|
||||
}
|
||||
|
||||
func ArchiveExtensions(c *gin.Context) {
|
||||
var ext []string
|
||||
for key := range tool.Tools {
|
||||
ext = append(ext, strings.TrimPrefix(key, "."))
|
||||
}
|
||||
common.SuccessResp(c, ext)
|
||||
}
|
@ -40,28 +40,7 @@ func Down(c *gin.Context) {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if link.MFile != nil {
|
||||
defer func(ReadSeekCloser io.ReadCloser) {
|
||||
err := ReadSeekCloser.Close()
|
||||
if err != nil {
|
||||
log.Errorf("close data error: %s", err)
|
||||
}
|
||||
}(link.MFile)
|
||||
}
|
||||
c.Header("Referrer-Policy", "no-referrer")
|
||||
c.Header("Cache-Control", "max-age=0, no-cache, no-store, must-revalidate")
|
||||
if setting.GetBool(conf.ForwardDirectLinkParams) {
|
||||
query := c.Request.URL.Query()
|
||||
for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
|
||||
query.Del(v)
|
||||
}
|
||||
link.URL, err = utils.InjectQuery(link.URL, query)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Redirect(302, link.URL)
|
||||
down(c, link)
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,31 +74,62 @@ func Proxy(c *gin.Context) {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
if link.URL != "" && setting.GetBool(conf.ForwardDirectLinkParams) {
|
||||
query := c.Request.URL.Query()
|
||||
for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
|
||||
query.Del(v)
|
||||
}
|
||||
link.URL, err = utils.InjectQuery(link.URL, query)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
if storage.GetStorage().ProxyRange {
|
||||
common.ProxyRange(link, file.GetSize())
|
||||
}
|
||||
err = common.Proxy(c.Writer, c.Request, link, file)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
localProxy(c, link, file, storage.GetStorage().ProxyRange)
|
||||
} else {
|
||||
common.ErrorStrResp(c, "proxy not allowed", 403)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func down(c *gin.Context, link *model.Link) {
|
||||
var err error
|
||||
if link.MFile != nil {
|
||||
defer func(ReadSeekCloser io.ReadCloser) {
|
||||
err := ReadSeekCloser.Close()
|
||||
if err != nil {
|
||||
log.Errorf("close data error: %s", err)
|
||||
}
|
||||
}(link.MFile)
|
||||
}
|
||||
c.Header("Referrer-Policy", "no-referrer")
|
||||
c.Header("Cache-Control", "max-age=0, no-cache, no-store, must-revalidate")
|
||||
if setting.GetBool(conf.ForwardDirectLinkParams) {
|
||||
query := c.Request.URL.Query()
|
||||
for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
|
||||
query.Del(v)
|
||||
}
|
||||
link.URL, err = utils.InjectQuery(link.URL, query)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
c.Redirect(302, link.URL)
|
||||
}
|
||||
|
||||
func localProxy(c *gin.Context, link *model.Link, file model.Obj, proxyRange bool) {
|
||||
var err error
|
||||
if link.URL != "" && setting.GetBool(conf.ForwardDirectLinkParams) {
|
||||
query := c.Request.URL.Query()
|
||||
for _, v := range conf.SlicesMap[conf.IgnoreDirectLinkParams] {
|
||||
query.Del(v)
|
||||
}
|
||||
link.URL, err = utils.InjectQuery(link.URL, query)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500)
|
||||
return
|
||||
}
|
||||
}
|
||||
if proxyRange {
|
||||
common.ProxyRange(link, file.GetSize())
|
||||
}
|
||||
err = common.Proxy(c.Writer, c.Request, link, file)
|
||||
if err != nil {
|
||||
common.ErrorResp(c, err, 500, true)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// TODO need optimize
|
||||
// when can be proxy?
|
||||
// 1. text file
|
||||
|
@ -75,7 +75,7 @@ func getUserInfo(c *gin.Context) (bool, uint, bool) {
|
||||
}
|
||||
}
|
||||
|
||||
func getTargetedHandler[T task.TaskExtensionInfo](manager *tache.Manager[T], callback func(c *gin.Context, task T)) gin.HandlerFunc {
|
||||
func getTargetedHandler[T task.TaskExtensionInfo](manager task.Manager[T], callback func(c *gin.Context, task T)) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
isAdmin, uid, ok := getUserInfo(c)
|
||||
if !ok {
|
||||
@ -97,7 +97,7 @@ func getTargetedHandler[T task.TaskExtensionInfo](manager *tache.Manager[T], cal
|
||||
}
|
||||
}
|
||||
|
||||
func getBatchHandler[T task.TaskExtensionInfo](manager *tache.Manager[T], callback func(task T)) gin.HandlerFunc {
|
||||
func getBatchHandler[T task.TaskExtensionInfo](manager task.Manager[T], callback func(task T)) gin.HandlerFunc {
|
||||
return func(c *gin.Context) {
|
||||
isAdmin, uid, ok := getUserInfo(c)
|
||||
if !ok {
|
||||
@ -122,7 +122,7 @@ func getBatchHandler[T task.TaskExtensionInfo](manager *tache.Manager[T], callba
|
||||
}
|
||||
}
|
||||
|
||||
func taskRoute[T task.TaskExtensionInfo](g *gin.RouterGroup, manager *tache.Manager[T]) {
|
||||
func taskRoute[T task.TaskExtensionInfo](g *gin.RouterGroup, manager task.Manager[T]) {
|
||||
g.GET("/undone", func(c *gin.Context) {
|
||||
isAdmin, uid, ok := getUserInfo(c)
|
||||
if !ok {
|
||||
@ -220,4 +220,6 @@ func SetupTaskRoute(g *gin.RouterGroup) {
|
||||
taskRoute(g.Group("/copy"), fs.CopyTaskManager)
|
||||
taskRoute(g.Group("/offline_download"), tool.DownloadTaskManager)
|
||||
taskRoute(g.Group("/offline_download_transfer"), tool.TransferTaskManager)
|
||||
taskRoute(g.Group("/decompress"), fs.ArchiveDownloadTaskManager)
|
||||
taskRoute(g.Group("/decompress_upload"), fs.ArchiveContentUploadTaskManager)
|
||||
}
|
||||
|
@ -42,6 +42,12 @@ func Init(e *gin.Engine) {
|
||||
g.GET("/p/*path", middlewares.Down, handles.Proxy)
|
||||
g.HEAD("/d/*path", middlewares.Down, handles.Down)
|
||||
g.HEAD("/p/*path", middlewares.Down, handles.Proxy)
|
||||
g.GET("/ad/*path", middlewares.Down, handles.ArchiveDown)
|
||||
g.GET("/ap/*path", middlewares.Down, handles.ArchiveProxy)
|
||||
g.GET("/ae/*path", middlewares.Down, handles.ArchiveInternalExtract)
|
||||
g.HEAD("/ad/*path", middlewares.Down, handles.ArchiveDown)
|
||||
g.HEAD("/ap/*path", middlewares.Down, handles.ArchiveProxy)
|
||||
g.HEAD("/ae/*path", middlewares.Down, handles.ArchiveInternalExtract)
|
||||
|
||||
api := g.Group("/api")
|
||||
auth := api.Group("", middlewares.Auth)
|
||||
@ -77,6 +83,7 @@ func Init(e *gin.Engine) {
|
||||
public := api.Group("/public")
|
||||
public.Any("/settings", handles.PublicSettings)
|
||||
public.Any("/offline_download_tools", handles.OfflineDownloadTools)
|
||||
public.Any("/archive_extensions", handles.ArchiveExtensions)
|
||||
|
||||
_fs(auth.Group("/fs"))
|
||||
_task(auth.Group("/task", middlewares.AuthNotGuest))
|
||||
@ -173,6 +180,10 @@ func _fs(g *gin.RouterGroup) {
|
||||
// g.POST("/add_qbit", handles.AddQbittorrent)
|
||||
// g.POST("/add_transmission", handles.SetTransmission)
|
||||
g.POST("/add_offline_download", handles.AddOfflineDownload)
|
||||
a := g.Group("/archive")
|
||||
a.Any("/meta", handles.FsArchiveMeta)
|
||||
a.Any("/list", handles.FsArchiveList)
|
||||
a.POST("/decompress", handles.FsArchiveDecompress)
|
||||
}
|
||||
|
||||
func _task(g *gin.RouterGroup) {
|
||||
|
Reference in New Issue
Block a user