feat(archive): support multipart archives (#8184 close #8015)

* feat(archive): multipart support & sevenzip tool

* feat(archive): rardecode tool

* feat(archive): support decompress multi-selected

* fix(archive): decompress response filter internal

* feat(archive): support multipart zip

* fix: more applicable AcceptedMultipartExtensions interface
This commit is contained in:
KirCute 2025-03-27 23:20:44 +08:00 committed by GitHub
parent 704d3854df
commit 1335f80362
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
19 changed files with 1042 additions and 320 deletions

6
go.mod
View File

@ -85,7 +85,7 @@ require (
github.com/blevesearch/go-faiss v1.0.20 // indirect github.com/blevesearch/go-faiss v1.0.20 // indirect
github.com/blevesearch/zapx/v16 v16.1.5 // indirect github.com/blevesearch/zapx/v16 v16.1.5 // indirect
github.com/bodgit/plumbing v1.3.0 // indirect github.com/bodgit/plumbing v1.3.0 // indirect
github.com/bodgit/sevenzip v1.6.0 // indirect github.com/bodgit/sevenzip v1.6.0
github.com/bodgit/windows v1.0.1 // indirect github.com/bodgit/windows v1.0.1 // indirect
github.com/bytedance/sonic/loader v0.1.1 // indirect github.com/bytedance/sonic/loader v0.1.1 // indirect
github.com/charmbracelet/x/ansi v0.2.3 // indirect github.com/charmbracelet/x/ansi v0.2.3 // indirect
@ -106,14 +106,14 @@ require (
github.com/kr/text v0.2.0 // indirect github.com/kr/text v0.2.0 // indirect
github.com/matoous/go-nanoid/v2 v2.1.0 // indirect github.com/matoous/go-nanoid/v2 v2.1.0 // indirect
github.com/microcosm-cc/bluemonday v1.0.27 github.com/microcosm-cc/bluemonday v1.0.27
github.com/nwaples/rardecode/v2 v2.0.0-beta.4.0.20241112120701-034e449c6e78 // indirect github.com/nwaples/rardecode/v2 v2.0.0-beta.4.0.20241112120701-034e449c6e78
github.com/sorairolake/lzip-go v0.3.5 // indirect github.com/sorairolake/lzip-go v0.3.5 // indirect
github.com/taruti/bytepool v0.0.0-20160310082835-5e3a9ea56543 // indirect github.com/taruti/bytepool v0.0.0-20160310082835-5e3a9ea56543 // indirect
github.com/therootcompany/xz v1.0.1 // indirect github.com/therootcompany/xz v1.0.1 // indirect
github.com/ulikunitz/xz v0.5.12 // indirect github.com/ulikunitz/xz v0.5.12 // indirect
github.com/xhofe/115-sdk-go v0.1.4 github.com/xhofe/115-sdk-go v0.1.4
github.com/yuin/goldmark v1.7.8 github.com/yuin/goldmark v1.7.8
go4.org v0.0.0-20230225012048-214862532bf5 // indirect go4.org v0.0.0-20230225012048-214862532bf5
resty.dev/v3 v3.0.0-beta.2 // indirect resty.dev/v3 v3.0.0-beta.2 // indirect
) )

View File

@ -3,5 +3,7 @@ package archive
import ( import (
_ "github.com/alist-org/alist/v3/internal/archive/archives" _ "github.com/alist-org/alist/v3/internal/archive/archives"
_ "github.com/alist-org/alist/v3/internal/archive/iso9660" _ "github.com/alist-org/alist/v3/internal/archive/iso9660"
_ "github.com/alist-org/alist/v3/internal/archive/rardecode"
_ "github.com/alist-org/alist/v3/internal/archive/sevenzip"
_ "github.com/alist-org/alist/v3/internal/archive/zip" _ "github.com/alist-org/alist/v3/internal/archive/zip"
) )

View File

@ -16,14 +16,18 @@ import (
type Archives struct { type Archives struct {
} }
func (*Archives) AcceptedExtensions() []string { func (Archives) AcceptedExtensions() []string {
return []string{ return []string{
".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar", ".rar", ".7z", ".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar",
} }
} }
func (*Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) { func (Archives) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
fsys, err := getFs(ss, args) return map[string]tool.MultipartExtension{}
}
func (Archives) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
fsys, err := getFs(ss[0], args)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -47,8 +51,8 @@ func (*Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (mod
}, nil }, nil
} }
func (*Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) { func (Archives) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
fsys, err := getFs(ss, args.ArchiveArgs) fsys, err := getFs(ss[0], args.ArchiveArgs)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -69,8 +73,8 @@ func (*Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([
}) })
} }
func (*Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) { func (Archives) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
fsys, err := getFs(ss, args.ArchiveArgs) fsys, err := getFs(ss[0], args.ArchiveArgs)
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
@ -85,8 +89,8 @@ func (*Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs)
return file, stat.Size(), nil return file, stat.Size(), nil
} }
func (*Archives) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error { func (Archives) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
fsys, err := getFs(ss, args.ArchiveArgs) fsys, err := getFs(ss[0], args.ArchiveArgs)
if err != nil { if err != nil {
return err return err
} }
@ -133,5 +137,5 @@ func (*Archives) Decompress(ss *stream.SeekableStream, outputPath string, args m
var _ tool.Tool = (*Archives)(nil) var _ tool.Tool = (*Archives)(nil)
func init() { func init() {
tool.RegisterTool(&Archives{}) tool.RegisterTool(Archives{})
} }

View File

@ -14,19 +14,23 @@ import (
type ISO9660 struct { type ISO9660 struct {
} }
func (t *ISO9660) AcceptedExtensions() []string { func (ISO9660) AcceptedExtensions() []string {
return []string{".iso"} return []string{".iso"}
} }
func (t *ISO9660) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) { func (ISO9660) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
return map[string]tool.MultipartExtension{}
}
func (ISO9660) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
return &model.ArchiveMetaInfo{ return &model.ArchiveMetaInfo{
Comment: "", Comment: "",
Encrypted: false, Encrypted: false,
}, nil }, nil
} }
func (t *ISO9660) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) { func (ISO9660) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
img, err := getImage(ss) img, err := getImage(ss[0])
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -48,8 +52,8 @@ func (t *ISO9660) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (
return ret, nil return ret, nil
} }
func (t *ISO9660) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) { func (ISO9660) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
img, err := getImage(ss) img, err := getImage(ss[0])
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
@ -63,8 +67,8 @@ func (t *ISO9660) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs
return io.NopCloser(obj.Reader()), obj.Size(), nil return io.NopCloser(obj.Reader()), obj.Size(), nil
} }
func (t *ISO9660) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error { func (ISO9660) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
img, err := getImage(ss) img, err := getImage(ss[0])
if err != nil { if err != nil {
return err return err
} }
@ -92,5 +96,5 @@ func (t *ISO9660) Decompress(ss *stream.SeekableStream, outputPath string, args
var _ tool.Tool = (*ISO9660)(nil) var _ tool.Tool = (*ISO9660)(nil)
func init() { func init() {
tool.RegisterTool(&ISO9660{}) tool.RegisterTool(ISO9660{})
} }

View File

@ -0,0 +1,140 @@
package rardecode
import (
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/nwaples/rardecode/v2"
"io"
"os"
stdpath "path"
"strings"
)
type RarDecoder struct{}
func (RarDecoder) AcceptedExtensions() []string {
return []string{".rar"}
}
func (RarDecoder) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
return map[string]tool.MultipartExtension{
".part1.rar": {".part%d.rar", 2},
}
}
func (RarDecoder) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
l, err := list(ss, args.Password)
if err != nil {
return nil, err
}
_, tree := tool.GenerateMetaTreeFromFolderTraversal(l)
return &model.ArchiveMetaInfo{
Comment: "",
Encrypted: false,
Tree: tree,
}, nil
}
func (RarDecoder) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
return nil, errs.NotSupport
}
func (RarDecoder) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
reader, err := getReader(ss, args.Password)
if err != nil {
return nil, 0, err
}
innerPath := strings.TrimPrefix(args.InnerPath, "/")
for {
var header *rardecode.FileHeader
header, err = reader.Next()
if err == io.EOF {
break
}
if err != nil {
return nil, 0, err
}
if header.Name == innerPath {
if header.IsDir {
break
}
return io.NopCloser(reader), header.UnPackedSize, nil
}
}
return nil, 0, errs.ObjectNotFound
}
func (RarDecoder) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
reader, err := getReader(ss, args.Password)
if err != nil {
return err
}
if args.InnerPath == "/" {
for {
var header *rardecode.FileHeader
header, err = reader.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
name := header.Name
if header.IsDir {
name = name + "/"
}
err = decompress(reader, header, name, outputPath)
if err != nil {
return err
}
}
} else {
innerPath := strings.TrimPrefix(args.InnerPath, "/")
innerBase := stdpath.Base(innerPath)
createdBaseDir := false
for {
var header *rardecode.FileHeader
header, err = reader.Next()
if err == io.EOF {
break
}
if err != nil {
return err
}
name := header.Name
if header.IsDir {
name = name + "/"
}
if name == innerPath {
err = _decompress(reader, header, outputPath, up)
if err != nil {
return err
}
break
} else if strings.HasPrefix(name, innerPath+"/") {
targetPath := stdpath.Join(outputPath, innerBase)
if !createdBaseDir {
err = os.Mkdir(targetPath, 0700)
if err != nil {
return err
}
createdBaseDir = true
}
restPath := strings.TrimPrefix(name, innerPath+"/")
err = decompress(reader, header, restPath, targetPath)
if err != nil {
return err
}
}
}
}
return nil
}
var _ tool.Tool = (*RarDecoder)(nil)
func init() {
tool.RegisterTool(RarDecoder{})
}

View File

@ -0,0 +1,225 @@
package rardecode
import (
"fmt"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/nwaples/rardecode/v2"
"io"
"io/fs"
"os"
stdpath "path"
"sort"
"strings"
"time"
)
type VolumeFile struct {
stream.SStreamReadAtSeeker
name string
}
func (v *VolumeFile) Name() string {
return v.name
}
func (v *VolumeFile) Size() int64 {
return v.SStreamReadAtSeeker.GetRawStream().GetSize()
}
func (v *VolumeFile) Mode() fs.FileMode {
return 0644
}
func (v *VolumeFile) ModTime() time.Time {
return v.SStreamReadAtSeeker.GetRawStream().ModTime()
}
func (v *VolumeFile) IsDir() bool {
return false
}
func (v *VolumeFile) Sys() any {
return nil
}
func (v *VolumeFile) Stat() (fs.FileInfo, error) {
return v, nil
}
func (v *VolumeFile) Close() error {
return nil
}
type VolumeFs struct {
parts map[string]*VolumeFile
}
func (v *VolumeFs) Open(name string) (fs.File, error) {
file, ok := v.parts[name]
if !ok {
return nil, fs.ErrNotExist
}
return file, nil
}
func makeOpts(ss []*stream.SeekableStream) (string, rardecode.Option, error) {
if len(ss) == 1 {
reader, err := stream.NewReadAtSeeker(ss[0], 0)
if err != nil {
return "", nil, err
}
fileName := "file.rar"
fsys := &VolumeFs{parts: map[string]*VolumeFile{
fileName: {SStreamReadAtSeeker: reader, name: fileName},
}}
return fileName, rardecode.FileSystem(fsys), nil
} else {
parts := make(map[string]*VolumeFile, len(ss))
for i, s := range ss {
reader, err := stream.NewReadAtSeeker(s, 0)
if err != nil {
return "", nil, err
}
fileName := fmt.Sprintf("file.part%d.rar", i+1)
parts[fileName] = &VolumeFile{SStreamReadAtSeeker: reader, name: fileName}
}
return "file.part1.rar", rardecode.FileSystem(&VolumeFs{parts: parts}), nil
}
}
type WrapReader struct {
files []*rardecode.File
}
func (r *WrapReader) Files() []tool.SubFile {
ret := make([]tool.SubFile, 0, len(r.files))
for _, f := range r.files {
ret = append(ret, &WrapFile{File: f})
}
return ret
}
type WrapFile struct {
*rardecode.File
}
func (f *WrapFile) Name() string {
if f.File.IsDir {
return f.File.Name + "/"
}
return f.File.Name
}
func (f *WrapFile) FileInfo() fs.FileInfo {
return &WrapFileInfo{File: f.File}
}
type WrapFileInfo struct {
*rardecode.File
}
func (f *WrapFileInfo) Name() string {
return stdpath.Base(f.File.Name)
}
func (f *WrapFileInfo) Size() int64 {
return f.File.UnPackedSize
}
func (f *WrapFileInfo) ModTime() time.Time {
return f.File.ModificationTime
}
func (f *WrapFileInfo) IsDir() bool {
return f.File.IsDir
}
func (f *WrapFileInfo) Sys() any {
return nil
}
func list(ss []*stream.SeekableStream, password string) (*WrapReader, error) {
fileName, fsOpt, err := makeOpts(ss)
if err != nil {
return nil, err
}
opts := []rardecode.Option{fsOpt}
if password != "" {
opts = append(opts, rardecode.Password(password))
}
files, err := rardecode.List(fileName, opts...)
// rardecode输出文件列表的顺序不一定是父目录在前子目录在后
// 父路径的长度一定比子路径短排序后的files可保证父路径在前
sort.Slice(files, func(i, j int) bool {
return len(files[i].Name) < len(files[j].Name)
})
if err != nil {
return nil, filterPassword(err)
}
return &WrapReader{files: files}, nil
}
func getReader(ss []*stream.SeekableStream, password string) (*rardecode.Reader, error) {
fileName, fsOpt, err := makeOpts(ss)
if err != nil {
return nil, err
}
opts := []rardecode.Option{fsOpt}
if password != "" {
opts = append(opts, rardecode.Password(password))
}
rc, err := rardecode.OpenReader(fileName, opts...)
if err != nil {
return nil, filterPassword(err)
}
ss[0].Closers.Add(rc)
return &rc.Reader, nil
}
func decompress(reader *rardecode.Reader, header *rardecode.FileHeader, filePath, outputPath string) error {
targetPath := outputPath
dir, base := stdpath.Split(filePath)
if dir != "" {
targetPath = stdpath.Join(targetPath, dir)
err := os.MkdirAll(targetPath, 0700)
if err != nil {
return err
}
}
if base != "" {
err := _decompress(reader, header, targetPath, func(_ float64) {})
if err != nil {
return err
}
}
return nil
}
func _decompress(reader *rardecode.Reader, header *rardecode.FileHeader, targetPath string, up model.UpdateProgress) error {
f, err := os.OpenFile(stdpath.Join(targetPath, stdpath.Base(header.Name)), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
Reader: &stream.SimpleReaderWithSize{
Reader: reader,
Size: header.UnPackedSize,
},
UpdateProgress: up,
})
if err != nil {
return err
}
return nil
}
func filterPassword(err error) error {
if err != nil && strings.Contains(err.Error(), "password") {
return errs.WrongArchivePassword
}
return err
}

View File

@ -0,0 +1,72 @@
package sevenzip
import (
"io"
"strings"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
)
type SevenZip struct{}
func (SevenZip) AcceptedExtensions() []string {
return []string{".7z"}
}
func (SevenZip) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
return map[string]tool.MultipartExtension{
".7z.001": {".7z.%.3d", 2},
}
}
func (SevenZip) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
reader, err := getReader(ss, args.Password)
if err != nil {
return nil, err
}
_, tree := tool.GenerateMetaTreeFromFolderTraversal(&WrapReader{Reader: reader})
return &model.ArchiveMetaInfo{
Comment: "",
Encrypted: args.Password != "",
Tree: tree,
}, nil
}
func (SevenZip) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
return nil, errs.NotSupport
}
func (SevenZip) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
reader, err := getReader(ss, args.Password)
if err != nil {
return nil, 0, err
}
innerPath := strings.TrimPrefix(args.InnerPath, "/")
for _, file := range reader.File {
if file.Name == innerPath {
r, e := file.Open()
if e != nil {
return nil, 0, e
}
return r, file.FileInfo().Size(), nil
}
}
return nil, 0, errs.ObjectNotFound
}
func (SevenZip) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
reader, err := getReader(ss, args.Password)
if err != nil {
return err
}
return tool.DecompressFromFolderTraversal(&WrapReader{Reader: reader}, outputPath, args, up)
}
var _ tool.Tool = (*SevenZip)(nil)
func init() {
tool.RegisterTool(SevenZip{})
}

View File

@ -0,0 +1,61 @@
package sevenzip
import (
"errors"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/bodgit/sevenzip"
"io"
"io/fs"
)
type WrapReader struct {
Reader *sevenzip.Reader
}
func (r *WrapReader) Files() []tool.SubFile {
ret := make([]tool.SubFile, 0, len(r.Reader.File))
for _, f := range r.Reader.File {
ret = append(ret, &WrapFile{f: f})
}
return ret
}
type WrapFile struct {
f *sevenzip.File
}
func (f *WrapFile) Name() string {
return f.f.Name
}
func (f *WrapFile) FileInfo() fs.FileInfo {
return f.f.FileInfo()
}
func (f *WrapFile) Open() (io.ReadCloser, error) {
return f.f.Open()
}
func getReader(ss []*stream.SeekableStream, password string) (*sevenzip.Reader, error) {
readerAt, err := stream.NewMultiReaderAt(ss)
if err != nil {
return nil, err
}
sr, err := sevenzip.NewReaderWithPassword(readerAt, readerAt.Size(), password)
if err != nil {
return nil, filterPassword(err)
}
return sr, nil
}
func filterPassword(err error) error {
if err != nil {
var e *sevenzip.ReadError
if errors.As(err, &e) && e.Encrypted {
return errs.WrongArchivePassword
}
}
return err
}

View File

@ -6,10 +6,16 @@ import (
"io" "io"
) )
type MultipartExtension struct {
PartFileFormat string
SecondPartIndex int
}
type Tool interface { type Tool interface {
AcceptedExtensions() []string AcceptedExtensions() []string
GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) AcceptedMultipartExtensions() map[string]MultipartExtension
List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error)
Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error)
Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error)
Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error
} }

View File

@ -0,0 +1,201 @@
package tool
import (
"io"
"io/fs"
"os"
stdpath "path"
"strings"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream"
)
type SubFile interface {
Name() string
FileInfo() fs.FileInfo
Open() (io.ReadCloser, error)
}
type CanEncryptSubFile interface {
IsEncrypted() bool
SetPassword(password string)
}
type ArchiveReader interface {
Files() []SubFile
}
func GenerateMetaTreeFromFolderTraversal(r ArchiveReader) (bool, []model.ObjTree) {
encrypted := false
dirMap := make(map[string]*model.ObjectTree)
dirMap["."] = &model.ObjectTree{}
for _, file := range r.Files() {
if encrypt, ok := file.(CanEncryptSubFile); ok && encrypt.IsEncrypted() {
encrypted = true
}
name := strings.TrimPrefix(file.Name(), "/")
var dir string
var dirObj *model.ObjectTree
isNewFolder := false
if !file.FileInfo().IsDir() {
// 先将 文件 添加到 所在的文件夹
dir = stdpath.Dir(name)
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.FileInfo().ModTime()
dirMap[dir] = dirObj
}
dirObj.Children = append(
dirObj.Children, &model.ObjectTree{
Object: *MakeModelObj(file.FileInfo()),
},
)
} else {
dir = strings.TrimSuffix(name, "/")
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirMap[dir] = dirObj
}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.FileInfo().ModTime()
dirObj.Children = make([]model.ObjTree, 0)
}
if isNewFolder {
// 将 文件夹 添加到 父文件夹
dir = stdpath.Dir(dir)
pDirObj := dirMap[dir]
if pDirObj != nil {
pDirObj.Children = append(pDirObj.Children, dirObj)
continue
}
for {
// 考虑压缩包仅记录文件的路径,不记录文件夹
pDirObj = &model.ObjectTree{}
pDirObj.IsFolder = true
pDirObj.Name = stdpath.Base(dir)
pDirObj.Modified = file.FileInfo().ModTime()
dirMap[dir] = pDirObj
pDirObj.Children = append(pDirObj.Children, dirObj)
dir = stdpath.Dir(dir)
if dirMap[dir] != nil {
break
}
dirObj = pDirObj
}
}
}
return encrypted, dirMap["."].GetChildren()
}
func MakeModelObj(file os.FileInfo) *model.Object {
return &model.Object{
Name: file.Name(),
Size: file.Size(),
Modified: file.ModTime(),
IsFolder: file.IsDir(),
}
}
type WrapFileInfo struct {
model.Obj
}
func DecompressFromFolderTraversal(r ArchiveReader, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
var err error
files := r.Files()
if args.InnerPath == "/" {
for i, file := range files {
name := file.Name()
err = decompress(file, name, outputPath, args.Password)
if err != nil {
return err
}
up(float64(i+1) * 100.0 / float64(len(files)))
}
} else {
innerPath := strings.TrimPrefix(args.InnerPath, "/")
innerBase := stdpath.Base(innerPath)
createdBaseDir := false
for _, file := range files {
name := file.Name()
if name == innerPath {
err = _decompress(file, outputPath, args.Password, up)
if err != nil {
return err
}
break
} else if strings.HasPrefix(name, innerPath+"/") {
targetPath := stdpath.Join(outputPath, innerBase)
if !createdBaseDir {
err = os.Mkdir(targetPath, 0700)
if err != nil {
return err
}
createdBaseDir = true
}
restPath := strings.TrimPrefix(name, innerPath+"/")
err = decompress(file, restPath, targetPath, args.Password)
if err != nil {
return err
}
}
}
}
return nil
}
func decompress(file SubFile, filePath, outputPath, password string) error {
targetPath := outputPath
dir, base := stdpath.Split(filePath)
if dir != "" {
targetPath = stdpath.Join(targetPath, dir)
err := os.MkdirAll(targetPath, 0700)
if err != nil {
return err
}
}
if base != "" {
err := _decompress(file, targetPath, password, func(_ float64) {})
if err != nil {
return err
}
}
return nil
}
func _decompress(file SubFile, targetPath, password string, up model.UpdateProgress) error {
if encrypt, ok := file.(CanEncryptSubFile); ok && encrypt.IsEncrypted() {
encrypt.SetPassword(password)
}
rc, err := file.Open()
if err != nil {
return err
}
defer func() { _ = rc.Close() }()
f, err := os.OpenFile(stdpath.Join(targetPath, file.FileInfo().Name()), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err != nil {
return err
}
defer func() { _ = f.Close() }()
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
Reader: &stream.SimpleReaderWithSize{
Reader: rc,
Size: file.FileInfo().Size(),
},
UpdateProgress: up,
})
if err != nil {
return err
}
return nil
}

View File

@ -5,19 +5,28 @@ import (
) )
var ( var (
Tools = make(map[string]Tool) Tools = make(map[string]Tool)
MultipartExtensions = make(map[string]MultipartExtension)
) )
func RegisterTool(tool Tool) { func RegisterTool(tool Tool) {
for _, ext := range tool.AcceptedExtensions() { for _, ext := range tool.AcceptedExtensions() {
Tools[ext] = tool Tools[ext] = tool
} }
for mainFile, ext := range tool.AcceptedMultipartExtensions() {
MultipartExtensions[mainFile] = ext
Tools[mainFile] = tool
}
} }
func GetArchiveTool(ext string) (Tool, error) { func GetArchiveTool(ext string) (*MultipartExtension, Tool, error) {
t, ok := Tools[ext] t, ok := Tools[ext]
if !ok { if !ok {
return nil, errs.UnknownArchiveFormat return nil, nil, errs.UnknownArchiveFormat
} }
return t, nil partExt, ok := MultipartExtensions[ext]
if !ok {
return nil, t, nil
}
return &partExt, t, nil
} }

View File

@ -2,8 +2,13 @@ package zip
import ( import (
"bytes" "bytes"
"io"
"io/fs"
stdpath "path"
"strings"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/errs" "github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream" "github.com/alist-org/alist/v3/internal/stream"
"github.com/saintfish/chardet" "github.com/saintfish/chardet"
"github.com/yeka/zip" "github.com/yeka/zip"
@ -16,65 +21,62 @@ import (
"golang.org/x/text/encoding/unicode" "golang.org/x/text/encoding/unicode"
"golang.org/x/text/encoding/unicode/utf32" "golang.org/x/text/encoding/unicode/utf32"
"golang.org/x/text/transform" "golang.org/x/text/transform"
"io"
"os"
stdpath "path"
"strings"
) )
func toModelObj(file os.FileInfo) *model.Object { type WrapReader struct {
return &model.Object{ Reader *zip.Reader
Name: decodeName(file.Name()),
Size: file.Size(),
Modified: file.ModTime(),
IsFolder: file.IsDir(),
}
} }
func decompress(file *zip.File, filePath, outputPath, password string) error { func (r *WrapReader) Files() []tool.SubFile {
targetPath := outputPath ret := make([]tool.SubFile, 0, len(r.Reader.File))
dir, base := stdpath.Split(filePath) for _, f := range r.Reader.File {
if dir != "" { ret = append(ret, &WrapFile{f: f})
targetPath = stdpath.Join(targetPath, dir)
err := os.MkdirAll(targetPath, 0700)
if err != nil {
return err
}
} }
if base != "" { return ret
err := _decompress(file, targetPath, password, func(_ float64) {})
if err != nil {
return err
}
}
return nil
} }
func _decompress(file *zip.File, targetPath, password string, up model.UpdateProgress) error { type WrapFileInfo struct {
if file.IsEncrypted() { fs.FileInfo
file.SetPassword(password) }
func (f *WrapFileInfo) Name() string {
return decodeName(f.FileInfo.Name())
}
type WrapFile struct {
f *zip.File
}
func (f *WrapFile) Name() string {
return decodeName(f.f.Name)
}
func (f *WrapFile) FileInfo() fs.FileInfo {
return &WrapFileInfo{FileInfo: f.f.FileInfo()}
}
func (f *WrapFile) Open() (io.ReadCloser, error) {
return f.f.Open()
}
func (f *WrapFile) IsEncrypted() bool {
return f.f.IsEncrypted()
}
func (f *WrapFile) SetPassword(password string) {
f.f.SetPassword(password)
}
func getReader(ss []*stream.SeekableStream) (*zip.Reader, error) {
if len(ss) > 1 && stdpath.Ext(ss[1].GetName()) == ".z01" {
// FIXME: Incorrect parsing method for standard multipart zip format
ss = append(ss[1:], ss[0])
} }
rc, err := file.Open() reader, err := stream.NewMultiReaderAt(ss)
if err != nil { if err != nil {
return err return nil, err
} }
defer rc.Close() return zip.NewReader(reader, reader.Size())
f, err := os.OpenFile(stdpath.Join(targetPath, decodeName(file.FileInfo().Name())), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
if err != nil {
return err
}
defer f.Close()
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
Reader: &stream.SimpleReaderWithSize{
Reader: rc,
Size: file.FileInfo().Size(),
},
UpdateProgress: up,
})
if err != nil {
return err
}
return nil
} }
func filterPassword(err error) error { func filterPassword(err error) error {

View File

@ -2,7 +2,6 @@ package zip
import ( import (
"io" "io"
"os"
stdpath "path" stdpath "path"
"strings" "strings"
@ -10,106 +9,37 @@ import (
"github.com/alist-org/alist/v3/internal/errs" "github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model" "github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/stream" "github.com/alist-org/alist/v3/internal/stream"
"github.com/yeka/zip"
) )
type Zip struct { type Zip struct {
} }
func (*Zip) AcceptedExtensions() []string { func (Zip) AcceptedExtensions() []string {
return []string{".zip"} return []string{}
} }
func (*Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) { func (Zip) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
reader, err := stream.NewReadAtSeeker(ss, 0) return map[string]tool.MultipartExtension{
".zip": {".z%.2d", 1},
".zip.001": {".zip.%.3d", 2},
}
}
func (Zip) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
zipReader, err := getReader(ss)
if err != nil { if err != nil {
return nil, err return nil, err
} }
zipReader, err := zip.NewReader(reader, ss.GetSize()) encrypted, tree := tool.GenerateMetaTreeFromFolderTraversal(&WrapReader{Reader: zipReader})
if err != nil {
return nil, err
}
encrypted := false
dirMap := make(map[string]*model.ObjectTree)
dirMap["."] = &model.ObjectTree{}
for _, file := range zipReader.File {
if file.IsEncrypted() {
encrypted = true
}
name := strings.TrimPrefix(decodeName(file.Name), "/")
var dir string
var dirObj *model.ObjectTree
isNewFolder := false
if !file.FileInfo().IsDir() {
// 先将 文件 添加到 所在的文件夹
dir = stdpath.Dir(name)
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.ModTime()
dirMap[dir] = dirObj
}
dirObj.Children = append(
dirObj.Children, &model.ObjectTree{
Object: *toModelObj(file.FileInfo()),
},
)
} else {
dir = strings.TrimSuffix(name, "/")
dirObj = dirMap[dir]
if dirObj == nil {
isNewFolder = true
dirObj = &model.ObjectTree{}
dirMap[dir] = dirObj
}
dirObj.IsFolder = true
dirObj.Name = stdpath.Base(dir)
dirObj.Modified = file.ModTime()
dirObj.Children = make([]model.ObjTree, 0)
}
if isNewFolder {
// 将 文件夹 添加到 父文件夹
dir = stdpath.Dir(dir)
pDirObj := dirMap[dir]
if pDirObj != nil {
pDirObj.Children = append(pDirObj.Children, dirObj)
continue
}
for {
// 考虑压缩包仅记录文件的路径,不记录文件夹
pDirObj = &model.ObjectTree{}
pDirObj.IsFolder = true
pDirObj.Name = stdpath.Base(dir)
pDirObj.Modified = file.ModTime()
dirMap[dir] = pDirObj
pDirObj.Children = append(pDirObj.Children, dirObj)
dir = stdpath.Dir(dir)
if dirMap[dir] != nil {
break
}
dirObj = pDirObj
}
}
}
return &model.ArchiveMetaInfo{ return &model.ArchiveMetaInfo{
Comment: zipReader.Comment, Comment: zipReader.Comment,
Encrypted: encrypted, Encrypted: encrypted,
Tree: dirMap["."].GetChildren(), Tree: tree,
}, nil }, nil
} }
func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) { func (Zip) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
reader, err := stream.NewReadAtSeeker(ss, 0) zipReader, err := getReader(ss)
if err != nil {
return nil, err
}
zipReader, err := zip.NewReader(reader, ss.GetSize())
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -134,13 +64,13 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
if dir == nil && len(strs) == 2 { if dir == nil && len(strs) == 2 {
dir = &model.Object{ dir = &model.Object{
Name: strs[0], Name: strs[0],
Modified: ss.ModTime(), Modified: ss[0].ModTime(),
IsFolder: true, IsFolder: true,
} }
} }
continue continue
} }
ret = append(ret, toModelObj(file.FileInfo())) ret = append(ret, tool.MakeModelObj(&WrapFileInfo{FileInfo: file.FileInfo()}))
} }
if len(ret) == 0 && dir != nil { if len(ret) == 0 && dir != nil {
ret = append(ret, dir) ret = append(ret, dir)
@ -157,7 +87,7 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
continue continue
} }
exist = true exist = true
ret = append(ret, toModelObj(file.FileInfo())) ret = append(ret, tool.MakeModelObj(&WrapFileInfo{file.FileInfo()}))
} }
if !exist { if !exist {
return nil, errs.ObjectNotFound return nil, errs.ObjectNotFound
@ -166,12 +96,8 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
} }
} }
func (*Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) { func (Zip) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
reader, err := stream.NewReadAtSeeker(ss, 0) zipReader, err := getReader(ss)
if err != nil {
return nil, 0, err
}
zipReader, err := zip.NewReader(reader, ss.GetSize())
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
@ -191,58 +117,16 @@ func (*Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.
return nil, 0, errs.ObjectNotFound return nil, 0, errs.ObjectNotFound
} }
func (*Zip) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error { func (Zip) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
reader, err := stream.NewReadAtSeeker(ss, 0) zipReader, err := getReader(ss)
if err != nil { if err != nil {
return err return err
} }
zipReader, err := zip.NewReader(reader, ss.GetSize()) return tool.DecompressFromFolderTraversal(&WrapReader{Reader: zipReader}, outputPath, args, up)
if err != nil {
return err
}
if args.InnerPath == "/" {
for i, file := range zipReader.File {
name := decodeName(file.Name)
err = decompress(file, name, outputPath, args.Password)
if err != nil {
return err
}
up(float64(i+1) * 100.0 / float64(len(zipReader.File)))
}
} else {
innerPath := strings.TrimPrefix(args.InnerPath, "/")
innerBase := stdpath.Base(innerPath)
createdBaseDir := false
for _, file := range zipReader.File {
name := decodeName(file.Name)
if name == innerPath {
err = _decompress(file, outputPath, args.Password, up)
if err != nil {
return err
}
break
} else if strings.HasPrefix(name, innerPath+"/") {
targetPath := stdpath.Join(outputPath, innerBase)
if !createdBaseDir {
err = os.Mkdir(targetPath, 0700)
if err != nil {
return err
}
createdBaseDir = true
}
restPath := strings.TrimPrefix(name, innerPath+"/")
err = decompress(file, restPath, targetPath, args.Password)
if err != nil {
return err
}
}
}
}
return nil
} }
var _ tool.Tool = (*Zip)(nil) var _ tool.Tool = (*Zip)(nil)
func init() { func init() {
tool.RegisterTool(&Zip{}) tool.RegisterTool(Zip{})
} }

View File

@ -79,13 +79,13 @@ type Remove interface {
type Put interface { type Put interface {
// Put a file (provided as a FileStreamer) into the driver // Put a file (provided as a FileStreamer) into the driver
// Besides the most basic upload functionality, the following features also need to be implemented: // Besides the most basic upload functionality, the following features also need to be implemented:
// 1. Canceling (when `<-ctx.Done()` returns), by the following methods: // 1. Canceling (when `<-ctx.Done()` returns), which can be supported by the following methods:
// (1) Use request methods that carry context, such as the following: // (1) Use request methods that carry context, such as the following:
// a. http.NewRequestWithContext // a. http.NewRequestWithContext
// b. resty.Request.SetContext // b. resty.Request.SetContext
// c. s3manager.Uploader.UploadWithContext // c. s3manager.Uploader.UploadWithContext
// d. utils.CopyWithCtx // d. utils.CopyWithCtx
// (2) Use a `driver.ReaderWithCtx` or a `driver.NewLimitedUploadStream` // (2) Use a `driver.ReaderWithCtx` or `driver.NewLimitedUploadStream`
// (3) Use `utils.IsCanceled` to check if the upload has been canceled during the upload process, // (3) Use `utils.IsCanceled` to check if the upload has been canceled during the upload process,
// this is typically applicable to chunked uploads. // this is typically applicable to chunked uploads.
// 2. Submit upload progress (via `up`) in real-time. There are three recommended ways as follows: // 2. Submit upload progress (via `up`) in real-time. There are three recommended ways as follows:

View File

@ -4,17 +4,6 @@ import (
"context" "context"
stderrors "errors" stderrors "errors"
"fmt" "fmt"
"github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/internal/task"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
"github.com/xhofe/tache"
"io" "io"
"math/rand" "math/rand"
"mime" "mime"
@ -25,6 +14,17 @@ import (
"strconv" "strconv"
"strings" "strings"
"time" "time"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/internal/task"
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
"github.com/xhofe/tache"
) )
type ArchiveDownloadTask struct { type ArchiveDownloadTask struct {
@ -37,7 +37,6 @@ type ArchiveDownloadTask struct {
dstStorage driver.Driver dstStorage driver.Driver
SrcStorageMp string SrcStorageMp string
DstStorageMp string DstStorageMp string
Tool tool.Tool
} }
func (t *ArchiveDownloadTask) GetName() string { func (t *ArchiveDownloadTask) GetName() string {
@ -67,33 +66,39 @@ func (t *ArchiveDownloadTask) RunWithoutPushUploadTask() (*ArchiveContentUploadT
if t.srcStorage == nil { if t.srcStorage == nil {
t.srcStorage, err = op.GetStorageByMountPath(t.SrcStorageMp) t.srcStorage, err = op.GetStorageByMountPath(t.SrcStorageMp)
} }
l, srcObj, err := op.Link(t.Ctx(), t.srcStorage, t.SrcObjPath, model.LinkArgs{ srcObj, tool, ss, err := op.GetArchiveToolAndStream(t.Ctx(), t.srcStorage, t.SrcObjPath, model.LinkArgs{
Header: http.Header{}, Header: http.Header{},
}) })
if err != nil { if err != nil {
return nil, err return nil, err
} }
fs := stream.FileStream{
Obj: srcObj,
Ctx: t.Ctx(),
}
ss, err := stream.NewSeekableStream(fs, l)
if err != nil {
return nil, err
}
defer func() { defer func() {
if err := ss.Close(); err != nil { var e error
log.Errorf("failed to close file streamer, %v", err) for _, s := range ss {
e = stderrors.Join(e, s.Close())
}
if e != nil {
log.Errorf("failed to close file streamer, %v", e)
} }
}() }()
var decompressUp model.UpdateProgress var decompressUp model.UpdateProgress
if t.CacheFull { if t.CacheFull {
t.SetTotalBytes(srcObj.GetSize()) var total, cur int64 = 0, 0
t.status = "getting src object" for _, s := range ss {
_, err = ss.CacheFullInTempFileAndUpdateProgress(t.SetProgress) total += s.GetSize()
if err != nil {
return nil, err
} }
t.SetTotalBytes(total)
t.status = "getting src object"
for _, s := range ss {
_, err = s.CacheFullInTempFileAndUpdateProgress(func(p float64) {
t.SetProgress((float64(cur) + float64(s.GetSize())*p/100.0) / float64(total))
})
cur += s.GetSize()
if err != nil {
return nil, err
}
}
t.SetProgress(100.0)
decompressUp = func(_ float64) {} decompressUp = func(_ float64) {}
} else { } else {
decompressUp = t.SetProgress decompressUp = t.SetProgress
@ -103,7 +108,7 @@ func (t *ArchiveDownloadTask) RunWithoutPushUploadTask() (*ArchiveContentUploadT
if err != nil { if err != nil {
return nil, err return nil, err
} }
err = t.Tool.Decompress(ss, dir, t.ArchiveInnerArgs, decompressUp) err = tool.Decompress(ss, dir, t.ArchiveInnerArgs, decompressUp)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -344,11 +349,6 @@ func archiveDecompress(ctx context.Context, srcObjPath, dstDirPath string, args
return nil, err return nil, err
} }
} }
ext := stdpath.Ext(srcObjActualPath)
t, err := tool.GetArchiveTool(ext)
if err != nil {
return nil, errors.WithMessagef(err, "failed get [%s] archive tool", ext)
}
taskCreator, _ := ctx.Value("user").(*model.User) taskCreator, _ := ctx.Value("user").(*model.User)
tsk := &ArchiveDownloadTask{ tsk := &ArchiveDownloadTask{
TaskExtension: task.TaskExtension{ TaskExtension: task.TaskExtension{
@ -361,7 +361,6 @@ func archiveDecompress(ctx context.Context, srcObjPath, dstDirPath string, args
DstDirPath: dstDirActualPath, DstDirPath: dstDirActualPath,
SrcStorageMp: srcStorage.GetStorage().MountPath, SrcStorageMp: srcStorage.GetStorage().MountPath,
DstStorageMp: dstStorage.GetStorage().MountPath, DstStorageMp: dstStorage.GetStorage().MountPath,
Tool: t,
} }
if ctx.Value(conf.NoTaskKey) != nil { if ctx.Value(conf.NoTaskKey) != nil {
uploadTask, err := tsk.RunWithoutPushUploadTask() uploadTask, err := tsk.RunWithoutPushUploadTask()

View File

@ -3,6 +3,7 @@ package op
import ( import (
"context" "context"
stderrors "errors" stderrors "errors"
"fmt"
"io" "io"
stdpath "path" stdpath "path"
"strings" "strings"
@ -54,21 +55,76 @@ func GetArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
return meta, err return meta, err
} }
func getArchiveToolAndStream(ctx context.Context, storage driver.Driver, path string, args model.LinkArgs) (model.Obj, tool.Tool, *stream.SeekableStream, error) { func GetArchiveToolAndStream(ctx context.Context, storage driver.Driver, path string, args model.LinkArgs) (model.Obj, tool.Tool, []*stream.SeekableStream, error) {
l, obj, err := Link(ctx, storage, path, args) l, obj, err := Link(ctx, storage, path, args)
if err != nil { if err != nil {
return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] link", path) return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] link", path)
} }
ext := stdpath.Ext(obj.GetName()) baseName, ext, found := strings.Cut(obj.GetName(), ".")
t, err := tool.GetArchiveTool(ext) if !found {
if l.MFile != nil {
_ = l.MFile.Close()
}
if l.RangeReadCloser != nil {
_ = l.RangeReadCloser.Close()
}
return nil, nil, nil, errors.Errorf("failed get archive tool: the obj does not have an extension.")
}
partExt, t, err := tool.GetArchiveTool("." + ext)
if err != nil { if err != nil {
return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] archive tool", ext) var e error
partExt, t, e = tool.GetArchiveTool(stdpath.Ext(obj.GetName()))
if e != nil {
if l.MFile != nil {
_ = l.MFile.Close()
}
if l.RangeReadCloser != nil {
_ = l.RangeReadCloser.Close()
}
return nil, nil, nil, errors.WithMessagef(stderrors.Join(err, e), "failed get archive tool: %s", ext)
}
} }
ss, err := stream.NewSeekableStream(stream.FileStream{Ctx: ctx, Obj: obj}, l) ss, err := stream.NewSeekableStream(stream.FileStream{Ctx: ctx, Obj: obj}, l)
if err != nil { if err != nil {
if l.MFile != nil {
_ = l.MFile.Close()
}
if l.RangeReadCloser != nil {
_ = l.RangeReadCloser.Close()
}
return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] stream", path) return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] stream", path)
} }
return obj, t, ss, nil ret := []*stream.SeekableStream{ss}
if partExt == nil {
return obj, t, ret, nil
} else {
index := partExt.SecondPartIndex
dir := stdpath.Dir(path)
for {
p := stdpath.Join(dir, baseName+fmt.Sprintf(partExt.PartFileFormat, index))
var o model.Obj
l, o, err = Link(ctx, storage, p, args)
if err != nil {
break
}
ss, err = stream.NewSeekableStream(stream.FileStream{Ctx: ctx, Obj: o}, l)
if err != nil {
if l.MFile != nil {
_ = l.MFile.Close()
}
if l.RangeReadCloser != nil {
_ = l.RangeReadCloser.Close()
}
for _, s := range ret {
_ = s.Close()
}
return nil, nil, nil, errors.WithMessagef(err, "failed get [%s] stream", path)
}
ret = append(ret, ss)
index++
}
return obj, t, ret, nil
}
} }
func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, args model.ArchiveMetaArgs) (model.Obj, *model.ArchiveMetaProvider, error) { func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, args model.ArchiveMetaArgs) (model.Obj, *model.ArchiveMetaProvider, error) {
@ -94,13 +150,17 @@ func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
return obj, archiveMetaProvider, err return obj, archiveMetaProvider, err
} }
} }
obj, t, ss, err := getArchiveToolAndStream(ctx, storage, path, args.LinkArgs) obj, t, ss, err := GetArchiveToolAndStream(ctx, storage, path, args.LinkArgs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
defer func() { defer func() {
if err := ss.Close(); err != nil { var e error
log.Errorf("failed to close file streamer, %v", err) for _, s := range ss {
e = stderrors.Join(e, s.Close())
}
if e != nil {
log.Errorf("failed to close file streamer, %v", e)
} }
}() }()
meta, err := t.GetMeta(ss, args.ArchiveArgs) meta, err := t.GetMeta(ss, args.ArchiveArgs)
@ -114,9 +174,9 @@ func getArchiveMeta(ctx context.Context, storage driver.Driver, path string, arg
if !storage.Config().NoCache { if !storage.Config().NoCache {
Expiration := time.Minute * time.Duration(storage.GetStorage().CacheExpiration) Expiration := time.Minute * time.Duration(storage.GetStorage().CacheExpiration)
archiveMetaProvider.Expiration = &Expiration archiveMetaProvider.Expiration = &Expiration
} else if ss.Link.MFile == nil { } else if ss[0].Link.MFile == nil {
// alias、crypt 驱动 // alias、crypt 驱动
archiveMetaProvider.Expiration = ss.Link.Expiration archiveMetaProvider.Expiration = ss[0].Link.Expiration
} }
return obj, archiveMetaProvider, err return obj, archiveMetaProvider, err
} }
@ -188,13 +248,17 @@ func _listArchive(ctx context.Context, storage driver.Driver, path string, args
return obj, files, err return obj, files, err
} }
} }
obj, t, ss, err := getArchiveToolAndStream(ctx, storage, path, args.LinkArgs) obj, t, ss, err := GetArchiveToolAndStream(ctx, storage, path, args.LinkArgs)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
defer func() { defer func() {
if err := ss.Close(); err != nil { var e error
log.Errorf("failed to close file streamer, %v", err) for _, s := range ss {
e = stderrors.Join(e, s.Close())
}
if e != nil {
log.Errorf("failed to close file streamer, %v", e)
} }
}() }()
files, err := t.List(ss, args.ArchiveInnerArgs) files, err := t.List(ss, args.ArchiveInnerArgs)
@ -378,8 +442,8 @@ func driverExtract(ctx context.Context, storage driver.Driver, path string, args
} }
type streamWithParent struct { type streamWithParent struct {
rc io.ReadCloser rc io.ReadCloser
parent *stream.SeekableStream parents []*stream.SeekableStream
} }
func (s *streamWithParent) Read(p []byte) (int, error) { func (s *streamWithParent) Read(p []byte) (int, error) {
@ -387,24 +451,31 @@ func (s *streamWithParent) Read(p []byte) (int, error) {
} }
func (s *streamWithParent) Close() error { func (s *streamWithParent) Close() error {
err1 := s.rc.Close() err := s.rc.Close()
err2 := s.parent.Close() for _, ss := range s.parents {
return stderrors.Join(err1, err2) err = stderrors.Join(err, ss.Close())
}
return err
} }
func InternalExtract(ctx context.Context, storage driver.Driver, path string, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) { func InternalExtract(ctx context.Context, storage driver.Driver, path string, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
_, t, ss, err := getArchiveToolAndStream(ctx, storage, path, args.LinkArgs) _, t, ss, err := GetArchiveToolAndStream(ctx, storage, path, args.LinkArgs)
if err != nil { if err != nil {
return nil, 0, err return nil, 0, err
} }
rc, size, err := t.Extract(ss, args) rc, size, err := t.Extract(ss, args)
if err != nil { if err != nil {
if e := ss.Close(); e != nil { var e error
for _, s := range ss {
e = stderrors.Join(e, s.Close())
}
if e != nil {
log.Errorf("failed to close file streamer, %v", e) log.Errorf("failed to close file streamer, %v", e)
err = stderrors.Join(err, e)
} }
return nil, 0, err return nil, 0, err
} }
return &streamWithParent{rc: rc, parent: ss}, size, nil return &streamWithParent{rc: rc, parents: ss}, size, nil
} }
func ArchiveDecompress(ctx context.Context, storage driver.Driver, srcPath, dstDirPath string, args model.ArchiveDecompressArgs, lazyCache ...bool) error { func ArchiveDecompress(ctx context.Context, storage driver.Driver, srcPath, dstDirPath string, args model.ArchiveDecompressArgs, lazyCache ...bool) error {

View File

@ -139,7 +139,7 @@ type RateLimitRangeReadCloser struct {
Limiter Limiter Limiter Limiter
} }
func (rrc RateLimitRangeReadCloser) RangeRead(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) { func (rrc *RateLimitRangeReadCloser) RangeRead(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
rc, err := rrc.RangeReadCloserIF.RangeRead(ctx, httpRange) rc, err := rrc.RangeReadCloserIF.RangeRead(ctx, httpRange)
if err != nil { if err != nil {
return nil, err return nil, err

View File

@ -14,6 +14,7 @@ import (
"github.com/alist-org/alist/v3/pkg/http_range" "github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils" "github.com/alist-org/alist/v3/pkg/utils"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"go4.org/readerutil"
) )
type FileStream struct { type FileStream struct {
@ -159,6 +160,10 @@ var _ model.FileStreamer = (*FileStream)(nil)
//var _ seekableStream = (*FileStream)(nil) //var _ seekableStream = (*FileStream)(nil)
// for most internal stream, which is either RangeReadCloser or MFile // for most internal stream, which is either RangeReadCloser or MFile
// Any functionality implemented based on SeekableStream should implement a Close method,
// whose only purpose is to close the SeekableStream object. If such functionality has
// additional resources that need to be closed, they should be added to the Closer property of
// the SeekableStream object and be closed together when the SeekableStream object is closed.
type SeekableStream struct { type SeekableStream struct {
FileStream FileStream
Link *model.Link Link *model.Link
@ -196,7 +201,7 @@ func NewSeekableStream(fs FileStream, link *model.Link) (*SeekableStream, error)
return &ss, nil return &ss, nil
} }
if ss.Link.RangeReadCloser != nil { if ss.Link.RangeReadCloser != nil {
ss.rangeReadCloser = RateLimitRangeReadCloser{ ss.rangeReadCloser = &RateLimitRangeReadCloser{
RangeReadCloserIF: ss.Link.RangeReadCloser, RangeReadCloserIF: ss.Link.RangeReadCloser,
Limiter: ServerDownloadLimit, Limiter: ServerDownloadLimit,
} }
@ -208,7 +213,7 @@ func NewSeekableStream(fs FileStream, link *model.Link) (*SeekableStream, error)
if err != nil { if err != nil {
return nil, err return nil, err
} }
rrc = RateLimitRangeReadCloser{ rrc = &RateLimitRangeReadCloser{
RangeReadCloserIF: rrc, RangeReadCloserIF: rrc,
Limiter: ServerDownloadLimit, Limiter: ServerDownloadLimit,
} }
@ -364,7 +369,7 @@ type RangeReadReadAtSeeker struct {
ss *SeekableStream ss *SeekableStream
masterOff int64 masterOff int64
readers []*readerCur readers []*readerCur
*headCache headCache *headCache
} }
type headCache struct { type headCache struct {
@ -406,7 +411,7 @@ func (c *headCache) read(p []byte) (n int, err error) {
} }
return return
} }
func (r *headCache) close() error { func (r *headCache) Close() error {
for i := range r.bufs { for i := range r.bufs {
r.bufs[i] = nil r.bufs[i] = nil
} }
@ -419,6 +424,7 @@ func (r *RangeReadReadAtSeeker) InitHeadCache() {
reader := r.readers[0] reader := r.readers[0]
r.readers = r.readers[1:] r.readers = r.readers[1:]
r.headCache = &headCache{readerCur: reader} r.headCache = &headCache{readerCur: reader}
r.ss.Closers.Add(r.headCache)
} }
} }
@ -449,6 +455,18 @@ func NewReadAtSeeker(ss *SeekableStream, offset int64, forceRange ...bool) (SStr
return r, nil return r, nil
} }
func NewMultiReaderAt(ss []*SeekableStream) (readerutil.SizeReaderAt, error) {
readers := make([]readerutil.SizeReaderAt, 0, len(ss))
for _, s := range ss {
ra, err := NewReadAtSeeker(s, 0)
if err != nil {
return nil, err
}
readers = append(readers, io.NewSectionReader(ra, 0, s.GetSize()))
}
return readerutil.NewMultiReaderAt(readers...), nil
}
func (r *RangeReadReadAtSeeker) GetRawStream() *SeekableStream { func (r *RangeReadReadAtSeeker) GetRawStream() *SeekableStream {
return r.ss return r.ss
} }
@ -559,9 +577,6 @@ func (r *RangeReadReadAtSeeker) Read(p []byte) (n int, err error) {
} }
func (r *RangeReadReadAtSeeker) Close() error { func (r *RangeReadReadAtSeeker) Close() error {
if r.headCache != nil {
_ = r.headCache.close()
}
return r.ss.Close() return r.ss.Close()
} }

View File

@ -1,10 +1,11 @@
package handles package handles
import ( import (
"encoding/json"
"fmt" "fmt"
"github.com/alist-org/alist/v3/internal/task"
"net/url" "net/url"
stdpath "path" stdpath "path"
"strings"
"github.com/alist-org/alist/v3/internal/archive/tool" "github.com/alist-org/alist/v3/internal/archive/tool"
"github.com/alist-org/alist/v3/internal/conf" "github.com/alist-org/alist/v3/internal/conf"
@ -208,14 +209,30 @@ func FsArchiveList(c *gin.Context) {
}) })
} }
type StringOrArray []string
func (s *StringOrArray) UnmarshalJSON(data []byte) error {
var value string
if err := json.Unmarshal(data, &value); err == nil {
*s = []string{value}
return nil
}
var sliceValue []string
if err := json.Unmarshal(data, &sliceValue); err != nil {
return err
}
*s = sliceValue
return nil
}
type ArchiveDecompressReq struct { type ArchiveDecompressReq struct {
SrcDir string `json:"src_dir" form:"src_dir"` SrcDir string `json:"src_dir" form:"src_dir"`
DstDir string `json:"dst_dir" form:"dst_dir"` DstDir string `json:"dst_dir" form:"dst_dir"`
Name string `json:"name" form:"name"` Name StringOrArray `json:"name" form:"name"`
ArchivePass string `json:"archive_pass" form:"archive_pass"` ArchivePass string `json:"archive_pass" form:"archive_pass"`
InnerPath string `json:"inner_path" form:"inner_path"` InnerPath string `json:"inner_path" form:"inner_path"`
CacheFull bool `json:"cache_full" form:"cache_full"` CacheFull bool `json:"cache_full" form:"cache_full"`
PutIntoNewDir bool `json:"put_into_new_dir" form:"put_into_new_dir"` PutIntoNewDir bool `json:"put_into_new_dir" form:"put_into_new_dir"`
} }
func FsArchiveDecompress(c *gin.Context) { func FsArchiveDecompress(c *gin.Context) {
@ -229,41 +246,51 @@ func FsArchiveDecompress(c *gin.Context) {
common.ErrorResp(c, errs.PermissionDenied, 403) common.ErrorResp(c, errs.PermissionDenied, 403)
return return
} }
srcPath, err := user.JoinPath(stdpath.Join(req.SrcDir, req.Name)) srcPaths := make([]string, 0, len(req.Name))
if err != nil { for _, name := range req.Name {
common.ErrorResp(c, err, 403) srcPath, err := user.JoinPath(stdpath.Join(req.SrcDir, name))
return if err != nil {
common.ErrorResp(c, err, 403)
return
}
srcPaths = append(srcPaths, srcPath)
} }
dstDir, err := user.JoinPath(req.DstDir) dstDir, err := user.JoinPath(req.DstDir)
if err != nil { if err != nil {
common.ErrorResp(c, err, 403) common.ErrorResp(c, err, 403)
return return
} }
t, err := fs.ArchiveDecompress(c, srcPath, dstDir, model.ArchiveDecompressArgs{ tasks := make([]task.TaskExtensionInfo, 0, len(srcPaths))
ArchiveInnerArgs: model.ArchiveInnerArgs{ for _, srcPath := range srcPaths {
ArchiveArgs: model.ArchiveArgs{ t, e := fs.ArchiveDecompress(c, srcPath, dstDir, model.ArchiveDecompressArgs{
LinkArgs: model.LinkArgs{ ArchiveInnerArgs: model.ArchiveInnerArgs{
Header: c.Request.Header, ArchiveArgs: model.ArchiveArgs{
Type: c.Query("type"), LinkArgs: model.LinkArgs{
HttpReq: c.Request, Header: c.Request.Header,
Type: c.Query("type"),
HttpReq: c.Request,
},
Password: req.ArchivePass,
}, },
Password: req.ArchivePass, InnerPath: utils.FixAndCleanPath(req.InnerPath),
}, },
InnerPath: utils.FixAndCleanPath(req.InnerPath), CacheFull: req.CacheFull,
}, PutIntoNewDir: req.PutIntoNewDir,
CacheFull: req.CacheFull, })
PutIntoNewDir: req.PutIntoNewDir, if e != nil {
}) if errors.Is(e, errs.WrongArchivePassword) {
if err != nil { common.ErrorResp(c, e, 202)
if errors.Is(err, errs.WrongArchivePassword) { } else {
common.ErrorResp(c, err, 202) common.ErrorResp(c, e, 500)
} else { }
common.ErrorResp(c, err, 500) return
}
if t != nil {
tasks = append(tasks, t)
} }
return
} }
common.SuccessResp(c, gin.H{ common.SuccessResp(c, gin.H{
"task": getTaskInfo(t), "task": getTaskInfos(tasks),
}) })
} }
@ -376,7 +403,7 @@ func ArchiveInternalExtract(c *gin.Context) {
func ArchiveExtensions(c *gin.Context) { func ArchiveExtensions(c *gin.Context) {
var ext []string var ext []string
for key := range tool.Tools { for key := range tool.Tools {
ext = append(ext, strings.TrimPrefix(key, ".")) ext = append(ext, key)
} }
common.SuccessResp(c, ext) common.SuccessResp(c, ext)
} }