* feat(archive): multipart support & sevenzip tool * feat(archive): rardecode tool * feat(archive): support decompress multi-selected * fix(archive): decompress response filter internal * feat(archive): support multipart zip * fix: more applicable AcceptedMultipartExtensions interface
This commit is contained in:
@ -3,5 +3,7 @@ package archive
|
||||
import (
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/archives"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/iso9660"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/rardecode"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/sevenzip"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/zip"
|
||||
)
|
||||
|
@ -16,14 +16,18 @@ import (
|
||||
type Archives struct {
|
||||
}
|
||||
|
||||
func (*Archives) AcceptedExtensions() []string {
|
||||
func (Archives) AcceptedExtensions() []string {
|
||||
return []string{
|
||||
".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar", ".rar", ".7z",
|
||||
".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar",
|
||||
}
|
||||
}
|
||||
|
||||
func (*Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
fsys, err := getFs(ss, args)
|
||||
func (Archives) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
|
||||
return map[string]tool.MultipartExtension{}
|
||||
}
|
||||
|
||||
func (Archives) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
fsys, err := getFs(ss[0], args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -47,8 +51,8 @@ func (*Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (mod
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (*Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
func (Archives) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
fsys, err := getFs(ss[0], args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -69,8 +73,8 @@ func (*Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([
|
||||
})
|
||||
}
|
||||
|
||||
func (*Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
func (Archives) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
fsys, err := getFs(ss[0], args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
@ -85,8 +89,8 @@ func (*Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs)
|
||||
return file, stat.Size(), nil
|
||||
}
|
||||
|
||||
func (*Archives) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
func (Archives) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
fsys, err := getFs(ss[0], args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -133,5 +137,5 @@ func (*Archives) Decompress(ss *stream.SeekableStream, outputPath string, args m
|
||||
var _ tool.Tool = (*Archives)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&Archives{})
|
||||
tool.RegisterTool(Archives{})
|
||||
}
|
||||
|
@ -14,19 +14,23 @@ import (
|
||||
type ISO9660 struct {
|
||||
}
|
||||
|
||||
func (t *ISO9660) AcceptedExtensions() []string {
|
||||
func (ISO9660) AcceptedExtensions() []string {
|
||||
return []string{".iso"}
|
||||
}
|
||||
|
||||
func (t *ISO9660) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
func (ISO9660) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
|
||||
return map[string]tool.MultipartExtension{}
|
||||
}
|
||||
|
||||
func (ISO9660) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: "",
|
||||
Encrypted: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
img, err := getImage(ss)
|
||||
func (ISO9660) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
img, err := getImage(ss[0])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -48,8 +52,8 @@ func (t *ISO9660) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
img, err := getImage(ss)
|
||||
func (ISO9660) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
img, err := getImage(ss[0])
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
@ -63,8 +67,8 @@ func (t *ISO9660) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs
|
||||
return io.NopCloser(obj.Reader()), obj.Size(), nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
img, err := getImage(ss)
|
||||
func (ISO9660) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
img, err := getImage(ss[0])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -92,5 +96,5 @@ func (t *ISO9660) Decompress(ss *stream.SeekableStream, outputPath string, args
|
||||
var _ tool.Tool = (*ISO9660)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&ISO9660{})
|
||||
tool.RegisterTool(ISO9660{})
|
||||
}
|
||||
|
140
internal/archive/rardecode/rardecode.go
Normal file
140
internal/archive/rardecode/rardecode.go
Normal file
@ -0,0 +1,140 @@
|
||||
package rardecode
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/nwaples/rardecode/v2"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type RarDecoder struct{}
|
||||
|
||||
func (RarDecoder) AcceptedExtensions() []string {
|
||||
return []string{".rar"}
|
||||
}
|
||||
|
||||
func (RarDecoder) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
|
||||
return map[string]tool.MultipartExtension{
|
||||
".part1.rar": {".part%d.rar", 2},
|
||||
}
|
||||
}
|
||||
|
||||
func (RarDecoder) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
l, err := list(ss, args.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, tree := tool.GenerateMetaTreeFromFolderTraversal(l)
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: "",
|
||||
Encrypted: false,
|
||||
Tree: tree,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (RarDecoder) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
||||
func (RarDecoder) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
reader, err := getReader(ss, args.Password)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
for {
|
||||
var header *rardecode.FileHeader
|
||||
header, err = reader.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if header.Name == innerPath {
|
||||
if header.IsDir {
|
||||
break
|
||||
}
|
||||
return io.NopCloser(reader), header.UnPackedSize, nil
|
||||
}
|
||||
}
|
||||
return nil, 0, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (RarDecoder) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
reader, err := getReader(ss, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if args.InnerPath == "/" {
|
||||
for {
|
||||
var header *rardecode.FileHeader
|
||||
header, err = reader.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
name := header.Name
|
||||
if header.IsDir {
|
||||
name = name + "/"
|
||||
}
|
||||
err = decompress(reader, header, name, outputPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
} else {
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
innerBase := stdpath.Base(innerPath)
|
||||
createdBaseDir := false
|
||||
for {
|
||||
var header *rardecode.FileHeader
|
||||
header, err = reader.Next()
|
||||
if err == io.EOF {
|
||||
break
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
name := header.Name
|
||||
if header.IsDir {
|
||||
name = name + "/"
|
||||
}
|
||||
if name == innerPath {
|
||||
err = _decompress(reader, header, outputPath, up)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
} else if strings.HasPrefix(name, innerPath+"/") {
|
||||
targetPath := stdpath.Join(outputPath, innerBase)
|
||||
if !createdBaseDir {
|
||||
err = os.Mkdir(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createdBaseDir = true
|
||||
}
|
||||
restPath := strings.TrimPrefix(name, innerPath+"/")
|
||||
err = decompress(reader, header, restPath, targetPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*RarDecoder)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(RarDecoder{})
|
||||
}
|
225
internal/archive/rardecode/utils.go
Normal file
225
internal/archive/rardecode/utils.go
Normal file
@ -0,0 +1,225 @@
|
||||
package rardecode
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/nwaples/rardecode/v2"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type VolumeFile struct {
|
||||
stream.SStreamReadAtSeeker
|
||||
name string
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Name() string {
|
||||
return v.name
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Size() int64 {
|
||||
return v.SStreamReadAtSeeker.GetRawStream().GetSize()
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Mode() fs.FileMode {
|
||||
return 0644
|
||||
}
|
||||
|
||||
func (v *VolumeFile) ModTime() time.Time {
|
||||
return v.SStreamReadAtSeeker.GetRawStream().ModTime()
|
||||
}
|
||||
|
||||
func (v *VolumeFile) IsDir() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Sys() any {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Stat() (fs.FileInfo, error) {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
func (v *VolumeFile) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type VolumeFs struct {
|
||||
parts map[string]*VolumeFile
|
||||
}
|
||||
|
||||
func (v *VolumeFs) Open(name string) (fs.File, error) {
|
||||
file, ok := v.parts[name]
|
||||
if !ok {
|
||||
return nil, fs.ErrNotExist
|
||||
}
|
||||
return file, nil
|
||||
}
|
||||
|
||||
func makeOpts(ss []*stream.SeekableStream) (string, rardecode.Option, error) {
|
||||
if len(ss) == 1 {
|
||||
reader, err := stream.NewReadAtSeeker(ss[0], 0)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
fileName := "file.rar"
|
||||
fsys := &VolumeFs{parts: map[string]*VolumeFile{
|
||||
fileName: {SStreamReadAtSeeker: reader, name: fileName},
|
||||
}}
|
||||
return fileName, rardecode.FileSystem(fsys), nil
|
||||
} else {
|
||||
parts := make(map[string]*VolumeFile, len(ss))
|
||||
for i, s := range ss {
|
||||
reader, err := stream.NewReadAtSeeker(s, 0)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
fileName := fmt.Sprintf("file.part%d.rar", i+1)
|
||||
parts[fileName] = &VolumeFile{SStreamReadAtSeeker: reader, name: fileName}
|
||||
}
|
||||
return "file.part1.rar", rardecode.FileSystem(&VolumeFs{parts: parts}), nil
|
||||
}
|
||||
}
|
||||
|
||||
type WrapReader struct {
|
||||
files []*rardecode.File
|
||||
}
|
||||
|
||||
func (r *WrapReader) Files() []tool.SubFile {
|
||||
ret := make([]tool.SubFile, 0, len(r.files))
|
||||
for _, f := range r.files {
|
||||
ret = append(ret, &WrapFile{File: f})
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
type WrapFile struct {
|
||||
*rardecode.File
|
||||
}
|
||||
|
||||
func (f *WrapFile) Name() string {
|
||||
if f.File.IsDir {
|
||||
return f.File.Name + "/"
|
||||
}
|
||||
return f.File.Name
|
||||
}
|
||||
|
||||
func (f *WrapFile) FileInfo() fs.FileInfo {
|
||||
return &WrapFileInfo{File: f.File}
|
||||
}
|
||||
|
||||
type WrapFileInfo struct {
|
||||
*rardecode.File
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) Name() string {
|
||||
return stdpath.Base(f.File.Name)
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) Size() int64 {
|
||||
return f.File.UnPackedSize
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) ModTime() time.Time {
|
||||
return f.File.ModificationTime
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) IsDir() bool {
|
||||
return f.File.IsDir
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) Sys() any {
|
||||
return nil
|
||||
}
|
||||
|
||||
func list(ss []*stream.SeekableStream, password string) (*WrapReader, error) {
|
||||
fileName, fsOpt, err := makeOpts(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts := []rardecode.Option{fsOpt}
|
||||
if password != "" {
|
||||
opts = append(opts, rardecode.Password(password))
|
||||
}
|
||||
files, err := rardecode.List(fileName, opts...)
|
||||
// rardecode输出文件列表的顺序不一定是父目录在前,子目录在后
|
||||
// 父路径的长度一定比子路径短,排序后的files可保证父路径在前
|
||||
sort.Slice(files, func(i, j int) bool {
|
||||
return len(files[i].Name) < len(files[j].Name)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, filterPassword(err)
|
||||
}
|
||||
return &WrapReader{files: files}, nil
|
||||
}
|
||||
|
||||
func getReader(ss []*stream.SeekableStream, password string) (*rardecode.Reader, error) {
|
||||
fileName, fsOpt, err := makeOpts(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
opts := []rardecode.Option{fsOpt}
|
||||
if password != "" {
|
||||
opts = append(opts, rardecode.Password(password))
|
||||
}
|
||||
rc, err := rardecode.OpenReader(fileName, opts...)
|
||||
if err != nil {
|
||||
return nil, filterPassword(err)
|
||||
}
|
||||
ss[0].Closers.Add(rc)
|
||||
return &rc.Reader, nil
|
||||
}
|
||||
|
||||
func decompress(reader *rardecode.Reader, header *rardecode.FileHeader, filePath, outputPath string) error {
|
||||
targetPath := outputPath
|
||||
dir, base := stdpath.Split(filePath)
|
||||
if dir != "" {
|
||||
targetPath = stdpath.Join(targetPath, dir)
|
||||
err := os.MkdirAll(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if base != "" {
|
||||
err := _decompress(reader, header, targetPath, func(_ float64) {})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func _decompress(reader *rardecode.Reader, header *rardecode.FileHeader, targetPath string, up model.UpdateProgress) error {
|
||||
f, err := os.OpenFile(stdpath.Join(targetPath, stdpath.Base(header.Name)), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = f.Close() }()
|
||||
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: reader,
|
||||
Size: header.UnPackedSize,
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func filterPassword(err error) error {
|
||||
if err != nil && strings.Contains(err.Error(), "password") {
|
||||
return errs.WrongArchivePassword
|
||||
}
|
||||
return err
|
||||
}
|
72
internal/archive/sevenzip/sevenzip.go
Normal file
72
internal/archive/sevenzip/sevenzip.go
Normal file
@ -0,0 +1,72 @@
|
||||
package sevenzip
|
||||
|
||||
import (
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
)
|
||||
|
||||
type SevenZip struct{}
|
||||
|
||||
func (SevenZip) AcceptedExtensions() []string {
|
||||
return []string{".7z"}
|
||||
}
|
||||
|
||||
func (SevenZip) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
|
||||
return map[string]tool.MultipartExtension{
|
||||
".7z.001": {".7z.%.3d", 2},
|
||||
}
|
||||
}
|
||||
|
||||
func (SevenZip) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
reader, err := getReader(ss, args.Password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, tree := tool.GenerateMetaTreeFromFolderTraversal(&WrapReader{Reader: reader})
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: "",
|
||||
Encrypted: args.Password != "",
|
||||
Tree: tree,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (SevenZip) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
||||
func (SevenZip) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
reader, err := getReader(ss, args.Password)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
for _, file := range reader.File {
|
||||
if file.Name == innerPath {
|
||||
r, e := file.Open()
|
||||
if e != nil {
|
||||
return nil, 0, e
|
||||
}
|
||||
return r, file.FileInfo().Size(), nil
|
||||
}
|
||||
}
|
||||
return nil, 0, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (SevenZip) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
reader, err := getReader(ss, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return tool.DecompressFromFolderTraversal(&WrapReader{Reader: reader}, outputPath, args, up)
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*SevenZip)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(SevenZip{})
|
||||
}
|
61
internal/archive/sevenzip/utils.go
Normal file
61
internal/archive/sevenzip/utils.go
Normal file
@ -0,0 +1,61 @@
|
||||
package sevenzip
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/bodgit/sevenzip"
|
||||
"io"
|
||||
"io/fs"
|
||||
)
|
||||
|
||||
type WrapReader struct {
|
||||
Reader *sevenzip.Reader
|
||||
}
|
||||
|
||||
func (r *WrapReader) Files() []tool.SubFile {
|
||||
ret := make([]tool.SubFile, 0, len(r.Reader.File))
|
||||
for _, f := range r.Reader.File {
|
||||
ret = append(ret, &WrapFile{f: f})
|
||||
}
|
||||
return ret
|
||||
}
|
||||
|
||||
type WrapFile struct {
|
||||
f *sevenzip.File
|
||||
}
|
||||
|
||||
func (f *WrapFile) Name() string {
|
||||
return f.f.Name
|
||||
}
|
||||
|
||||
func (f *WrapFile) FileInfo() fs.FileInfo {
|
||||
return f.f.FileInfo()
|
||||
}
|
||||
|
||||
func (f *WrapFile) Open() (io.ReadCloser, error) {
|
||||
return f.f.Open()
|
||||
}
|
||||
|
||||
func getReader(ss []*stream.SeekableStream, password string) (*sevenzip.Reader, error) {
|
||||
readerAt, err := stream.NewMultiReaderAt(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sr, err := sevenzip.NewReaderWithPassword(readerAt, readerAt.Size(), password)
|
||||
if err != nil {
|
||||
return nil, filterPassword(err)
|
||||
}
|
||||
return sr, nil
|
||||
}
|
||||
|
||||
func filterPassword(err error) error {
|
||||
if err != nil {
|
||||
var e *sevenzip.ReadError
|
||||
if errors.As(err, &e) && e.Encrypted {
|
||||
return errs.WrongArchivePassword
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
@ -6,10 +6,16 @@ import (
|
||||
"io"
|
||||
)
|
||||
|
||||
type MultipartExtension struct {
|
||||
PartFileFormat string
|
||||
SecondPartIndex int
|
||||
}
|
||||
|
||||
type Tool interface {
|
||||
AcceptedExtensions() []string
|
||||
GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error)
|
||||
List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error)
|
||||
Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error)
|
||||
Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error
|
||||
AcceptedMultipartExtensions() map[string]MultipartExtension
|
||||
GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error)
|
||||
List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error)
|
||||
Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error)
|
||||
Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error
|
||||
}
|
||||
|
201
internal/archive/tool/helper.go
Normal file
201
internal/archive/tool/helper.go
Normal file
@ -0,0 +1,201 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
)
|
||||
|
||||
type SubFile interface {
|
||||
Name() string
|
||||
FileInfo() fs.FileInfo
|
||||
Open() (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
type CanEncryptSubFile interface {
|
||||
IsEncrypted() bool
|
||||
SetPassword(password string)
|
||||
}
|
||||
|
||||
type ArchiveReader interface {
|
||||
Files() []SubFile
|
||||
}
|
||||
|
||||
func GenerateMetaTreeFromFolderTraversal(r ArchiveReader) (bool, []model.ObjTree) {
|
||||
encrypted := false
|
||||
dirMap := make(map[string]*model.ObjectTree)
|
||||
dirMap["."] = &model.ObjectTree{}
|
||||
for _, file := range r.Files() {
|
||||
if encrypt, ok := file.(CanEncryptSubFile); ok && encrypt.IsEncrypted() {
|
||||
encrypted = true
|
||||
}
|
||||
|
||||
name := strings.TrimPrefix(file.Name(), "/")
|
||||
var dir string
|
||||
var dirObj *model.ObjectTree
|
||||
isNewFolder := false
|
||||
if !file.FileInfo().IsDir() {
|
||||
// 先将 文件 添加到 所在的文件夹
|
||||
dir = stdpath.Dir(name)
|
||||
dirObj = dirMap[dir]
|
||||
if dirObj == nil {
|
||||
isNewFolder = true
|
||||
dirObj = &model.ObjectTree{}
|
||||
dirObj.IsFolder = true
|
||||
dirObj.Name = stdpath.Base(dir)
|
||||
dirObj.Modified = file.FileInfo().ModTime()
|
||||
dirMap[dir] = dirObj
|
||||
}
|
||||
dirObj.Children = append(
|
||||
dirObj.Children, &model.ObjectTree{
|
||||
Object: *MakeModelObj(file.FileInfo()),
|
||||
},
|
||||
)
|
||||
} else {
|
||||
dir = strings.TrimSuffix(name, "/")
|
||||
dirObj = dirMap[dir]
|
||||
if dirObj == nil {
|
||||
isNewFolder = true
|
||||
dirObj = &model.ObjectTree{}
|
||||
dirMap[dir] = dirObj
|
||||
}
|
||||
dirObj.IsFolder = true
|
||||
dirObj.Name = stdpath.Base(dir)
|
||||
dirObj.Modified = file.FileInfo().ModTime()
|
||||
dirObj.Children = make([]model.ObjTree, 0)
|
||||
}
|
||||
if isNewFolder {
|
||||
// 将 文件夹 添加到 父文件夹
|
||||
dir = stdpath.Dir(dir)
|
||||
pDirObj := dirMap[dir]
|
||||
if pDirObj != nil {
|
||||
pDirObj.Children = append(pDirObj.Children, dirObj)
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
// 考虑压缩包仅记录文件的路径,不记录文件夹
|
||||
pDirObj = &model.ObjectTree{}
|
||||
pDirObj.IsFolder = true
|
||||
pDirObj.Name = stdpath.Base(dir)
|
||||
pDirObj.Modified = file.FileInfo().ModTime()
|
||||
dirMap[dir] = pDirObj
|
||||
pDirObj.Children = append(pDirObj.Children, dirObj)
|
||||
dir = stdpath.Dir(dir)
|
||||
if dirMap[dir] != nil {
|
||||
break
|
||||
}
|
||||
dirObj = pDirObj
|
||||
}
|
||||
}
|
||||
}
|
||||
return encrypted, dirMap["."].GetChildren()
|
||||
}
|
||||
|
||||
func MakeModelObj(file os.FileInfo) *model.Object {
|
||||
return &model.Object{
|
||||
Name: file.Name(),
|
||||
Size: file.Size(),
|
||||
Modified: file.ModTime(),
|
||||
IsFolder: file.IsDir(),
|
||||
}
|
||||
}
|
||||
|
||||
type WrapFileInfo struct {
|
||||
model.Obj
|
||||
}
|
||||
|
||||
func DecompressFromFolderTraversal(r ArchiveReader, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
var err error
|
||||
files := r.Files()
|
||||
if args.InnerPath == "/" {
|
||||
for i, file := range files {
|
||||
name := file.Name()
|
||||
err = decompress(file, name, outputPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(float64(i+1) * 100.0 / float64(len(files)))
|
||||
}
|
||||
} else {
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
innerBase := stdpath.Base(innerPath)
|
||||
createdBaseDir := false
|
||||
for _, file := range files {
|
||||
name := file.Name()
|
||||
if name == innerPath {
|
||||
err = _decompress(file, outputPath, args.Password, up)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
} else if strings.HasPrefix(name, innerPath+"/") {
|
||||
targetPath := stdpath.Join(outputPath, innerBase)
|
||||
if !createdBaseDir {
|
||||
err = os.Mkdir(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createdBaseDir = true
|
||||
}
|
||||
restPath := strings.TrimPrefix(name, innerPath+"/")
|
||||
err = decompress(file, restPath, targetPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func decompress(file SubFile, filePath, outputPath, password string) error {
|
||||
targetPath := outputPath
|
||||
dir, base := stdpath.Split(filePath)
|
||||
if dir != "" {
|
||||
targetPath = stdpath.Join(targetPath, dir)
|
||||
err := os.MkdirAll(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if base != "" {
|
||||
err := _decompress(file, targetPath, password, func(_ float64) {})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func _decompress(file SubFile, targetPath, password string, up model.UpdateProgress) error {
|
||||
if encrypt, ok := file.(CanEncryptSubFile); ok && encrypt.IsEncrypted() {
|
||||
encrypt.SetPassword(password)
|
||||
}
|
||||
rc, err := file.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = rc.Close() }()
|
||||
f, err := os.OpenFile(stdpath.Join(targetPath, file.FileInfo().Name()), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() { _ = f.Close() }()
|
||||
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: rc,
|
||||
Size: file.FileInfo().Size(),
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
@ -5,19 +5,28 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
Tools = make(map[string]Tool)
|
||||
Tools = make(map[string]Tool)
|
||||
MultipartExtensions = make(map[string]MultipartExtension)
|
||||
)
|
||||
|
||||
func RegisterTool(tool Tool) {
|
||||
for _, ext := range tool.AcceptedExtensions() {
|
||||
Tools[ext] = tool
|
||||
}
|
||||
for mainFile, ext := range tool.AcceptedMultipartExtensions() {
|
||||
MultipartExtensions[mainFile] = ext
|
||||
Tools[mainFile] = tool
|
||||
}
|
||||
}
|
||||
|
||||
func GetArchiveTool(ext string) (Tool, error) {
|
||||
func GetArchiveTool(ext string) (*MultipartExtension, Tool, error) {
|
||||
t, ok := Tools[ext]
|
||||
if !ok {
|
||||
return nil, errs.UnknownArchiveFormat
|
||||
return nil, nil, errs.UnknownArchiveFormat
|
||||
}
|
||||
return t, nil
|
||||
partExt, ok := MultipartExtensions[ext]
|
||||
if !ok {
|
||||
return nil, t, nil
|
||||
}
|
||||
return &partExt, t, nil
|
||||
}
|
||||
|
@ -2,8 +2,13 @@ package zip
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/fs"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/saintfish/chardet"
|
||||
"github.com/yeka/zip"
|
||||
@ -16,65 +21,62 @@ import (
|
||||
"golang.org/x/text/encoding/unicode"
|
||||
"golang.org/x/text/encoding/unicode/utf32"
|
||||
"golang.org/x/text/transform"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func toModelObj(file os.FileInfo) *model.Object {
|
||||
return &model.Object{
|
||||
Name: decodeName(file.Name()),
|
||||
Size: file.Size(),
|
||||
Modified: file.ModTime(),
|
||||
IsFolder: file.IsDir(),
|
||||
}
|
||||
type WrapReader struct {
|
||||
Reader *zip.Reader
|
||||
}
|
||||
|
||||
func decompress(file *zip.File, filePath, outputPath, password string) error {
|
||||
targetPath := outputPath
|
||||
dir, base := stdpath.Split(filePath)
|
||||
if dir != "" {
|
||||
targetPath = stdpath.Join(targetPath, dir)
|
||||
err := os.MkdirAll(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
func (r *WrapReader) Files() []tool.SubFile {
|
||||
ret := make([]tool.SubFile, 0, len(r.Reader.File))
|
||||
for _, f := range r.Reader.File {
|
||||
ret = append(ret, &WrapFile{f: f})
|
||||
}
|
||||
if base != "" {
|
||||
err := _decompress(file, targetPath, password, func(_ float64) {})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return ret
|
||||
}
|
||||
|
||||
func _decompress(file *zip.File, targetPath, password string, up model.UpdateProgress) error {
|
||||
if file.IsEncrypted() {
|
||||
file.SetPassword(password)
|
||||
type WrapFileInfo struct {
|
||||
fs.FileInfo
|
||||
}
|
||||
|
||||
func (f *WrapFileInfo) Name() string {
|
||||
return decodeName(f.FileInfo.Name())
|
||||
}
|
||||
|
||||
type WrapFile struct {
|
||||
f *zip.File
|
||||
}
|
||||
|
||||
func (f *WrapFile) Name() string {
|
||||
return decodeName(f.f.Name)
|
||||
}
|
||||
|
||||
func (f *WrapFile) FileInfo() fs.FileInfo {
|
||||
return &WrapFileInfo{FileInfo: f.f.FileInfo()}
|
||||
}
|
||||
|
||||
func (f *WrapFile) Open() (io.ReadCloser, error) {
|
||||
return f.f.Open()
|
||||
}
|
||||
|
||||
func (f *WrapFile) IsEncrypted() bool {
|
||||
return f.f.IsEncrypted()
|
||||
}
|
||||
|
||||
func (f *WrapFile) SetPassword(password string) {
|
||||
f.f.SetPassword(password)
|
||||
}
|
||||
|
||||
func getReader(ss []*stream.SeekableStream) (*zip.Reader, error) {
|
||||
if len(ss) > 1 && stdpath.Ext(ss[1].GetName()) == ".z01" {
|
||||
// FIXME: Incorrect parsing method for standard multipart zip format
|
||||
ss = append(ss[1:], ss[0])
|
||||
}
|
||||
rc, err := file.Open()
|
||||
reader, err := stream.NewMultiReaderAt(ss)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
defer rc.Close()
|
||||
f, err := os.OpenFile(stdpath.Join(targetPath, decodeName(file.FileInfo().Name())), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: rc,
|
||||
Size: file.FileInfo().Size(),
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
return zip.NewReader(reader, reader.Size())
|
||||
}
|
||||
|
||||
func filterPassword(err error) error {
|
||||
|
@ -2,7 +2,6 @@ package zip
|
||||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
@ -10,106 +9,37 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/yeka/zip"
|
||||
)
|
||||
|
||||
type Zip struct {
|
||||
}
|
||||
|
||||
func (*Zip) AcceptedExtensions() []string {
|
||||
return []string{".zip"}
|
||||
func (Zip) AcceptedExtensions() []string {
|
||||
return []string{}
|
||||
}
|
||||
|
||||
func (*Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
func (Zip) AcceptedMultipartExtensions() map[string]tool.MultipartExtension {
|
||||
return map[string]tool.MultipartExtension{
|
||||
".zip": {".z%.2d", 1},
|
||||
".zip.001": {".zip.%.3d", 2},
|
||||
}
|
||||
}
|
||||
|
||||
func (Zip) GetMeta(ss []*stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
zipReader, err := getReader(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
encrypted := false
|
||||
dirMap := make(map[string]*model.ObjectTree)
|
||||
dirMap["."] = &model.ObjectTree{}
|
||||
for _, file := range zipReader.File {
|
||||
if file.IsEncrypted() {
|
||||
encrypted = true
|
||||
}
|
||||
|
||||
name := strings.TrimPrefix(decodeName(file.Name), "/")
|
||||
var dir string
|
||||
var dirObj *model.ObjectTree
|
||||
isNewFolder := false
|
||||
if !file.FileInfo().IsDir() {
|
||||
// 先将 文件 添加到 所在的文件夹
|
||||
dir = stdpath.Dir(name)
|
||||
dirObj = dirMap[dir]
|
||||
if dirObj == nil {
|
||||
isNewFolder = true
|
||||
dirObj = &model.ObjectTree{}
|
||||
dirObj.IsFolder = true
|
||||
dirObj.Name = stdpath.Base(dir)
|
||||
dirObj.Modified = file.ModTime()
|
||||
dirMap[dir] = dirObj
|
||||
}
|
||||
dirObj.Children = append(
|
||||
dirObj.Children, &model.ObjectTree{
|
||||
Object: *toModelObj(file.FileInfo()),
|
||||
},
|
||||
)
|
||||
} else {
|
||||
dir = strings.TrimSuffix(name, "/")
|
||||
dirObj = dirMap[dir]
|
||||
if dirObj == nil {
|
||||
isNewFolder = true
|
||||
dirObj = &model.ObjectTree{}
|
||||
dirMap[dir] = dirObj
|
||||
}
|
||||
dirObj.IsFolder = true
|
||||
dirObj.Name = stdpath.Base(dir)
|
||||
dirObj.Modified = file.ModTime()
|
||||
dirObj.Children = make([]model.ObjTree, 0)
|
||||
}
|
||||
if isNewFolder {
|
||||
// 将 文件夹 添加到 父文件夹
|
||||
dir = stdpath.Dir(dir)
|
||||
pDirObj := dirMap[dir]
|
||||
if pDirObj != nil {
|
||||
pDirObj.Children = append(pDirObj.Children, dirObj)
|
||||
continue
|
||||
}
|
||||
|
||||
for {
|
||||
// 考虑压缩包仅记录文件的路径,不记录文件夹
|
||||
pDirObj = &model.ObjectTree{}
|
||||
pDirObj.IsFolder = true
|
||||
pDirObj.Name = stdpath.Base(dir)
|
||||
pDirObj.Modified = file.ModTime()
|
||||
dirMap[dir] = pDirObj
|
||||
pDirObj.Children = append(pDirObj.Children, dirObj)
|
||||
dir = stdpath.Dir(dir)
|
||||
if dirMap[dir] != nil {
|
||||
break
|
||||
}
|
||||
dirObj = pDirObj
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
encrypted, tree := tool.GenerateMetaTreeFromFolderTraversal(&WrapReader{Reader: zipReader})
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: zipReader.Comment,
|
||||
Encrypted: encrypted,
|
||||
Tree: dirMap["."].GetChildren(),
|
||||
Tree: tree,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
func (Zip) List(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
zipReader, err := getReader(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -134,13 +64,13 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
|
||||
if dir == nil && len(strs) == 2 {
|
||||
dir = &model.Object{
|
||||
Name: strs[0],
|
||||
Modified: ss.ModTime(),
|
||||
Modified: ss[0].ModTime(),
|
||||
IsFolder: true,
|
||||
}
|
||||
}
|
||||
continue
|
||||
}
|
||||
ret = append(ret, toModelObj(file.FileInfo()))
|
||||
ret = append(ret, tool.MakeModelObj(&WrapFileInfo{FileInfo: file.FileInfo()}))
|
||||
}
|
||||
if len(ret) == 0 && dir != nil {
|
||||
ret = append(ret, dir)
|
||||
@ -157,7 +87,7 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
|
||||
continue
|
||||
}
|
||||
exist = true
|
||||
ret = append(ret, toModelObj(file.FileInfo()))
|
||||
ret = append(ret, tool.MakeModelObj(&WrapFileInfo{file.FileInfo()}))
|
||||
}
|
||||
if !exist {
|
||||
return nil, errs.ObjectNotFound
|
||||
@ -166,12 +96,8 @@ func (*Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]mode
|
||||
}
|
||||
}
|
||||
|
||||
func (*Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
func (Zip) Extract(ss []*stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
zipReader, err := getReader(ss)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
@ -191,58 +117,16 @@ func (*Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.
|
||||
return nil, 0, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (*Zip) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
func (Zip) Decompress(ss []*stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
zipReader, err := getReader(ss)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if args.InnerPath == "/" {
|
||||
for i, file := range zipReader.File {
|
||||
name := decodeName(file.Name)
|
||||
err = decompress(file, name, outputPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(float64(i+1) * 100.0 / float64(len(zipReader.File)))
|
||||
}
|
||||
} else {
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
innerBase := stdpath.Base(innerPath)
|
||||
createdBaseDir := false
|
||||
for _, file := range zipReader.File {
|
||||
name := decodeName(file.Name)
|
||||
if name == innerPath {
|
||||
err = _decompress(file, outputPath, args.Password, up)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
} else if strings.HasPrefix(name, innerPath+"/") {
|
||||
targetPath := stdpath.Join(outputPath, innerBase)
|
||||
if !createdBaseDir {
|
||||
err = os.Mkdir(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createdBaseDir = true
|
||||
}
|
||||
restPath := strings.TrimPrefix(name, innerPath+"/")
|
||||
err = decompress(file, restPath, targetPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return tool.DecompressFromFolderTraversal(&WrapReader{Reader: zipReader}, outputPath, args, up)
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*Zip)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&Zip{})
|
||||
tool.RegisterTool(Zip{})
|
||||
}
|
||||
|
Reference in New Issue
Block a user