feat(archive): archive manage (#7817)
* feat(archive): archive management * fix(ftp-server): remove duplicate ReadAtSeeker realization * fix(archive): bad seeking of SeekableStream * fix(archive): split internal and driver extraction api * feat(archive): patch * fix(shutdown): clear decompress upload tasks * chore * feat(archive): support .iso format * chore
This commit is contained in:
7
internal/archive/all.go
Normal file
7
internal/archive/all.go
Normal file
@ -0,0 +1,7 @@
|
||||
package archive
|
||||
|
||||
import (
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/archives"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/iso9660"
|
||||
_ "github.com/alist-org/alist/v3/internal/archive/zip"
|
||||
)
|
126
internal/archive/archives/archives.go
Normal file
126
internal/archive/archives/archives.go
Normal file
@ -0,0 +1,126 @@
|
||||
package archives
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"io"
|
||||
"io/fs"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Archives struct {
|
||||
}
|
||||
|
||||
func (_ *Archives) AcceptedExtensions() []string {
|
||||
return []string{
|
||||
".br", ".bz2", ".gz", ".lz4", ".lz", ".sz", ".s2", ".xz", ".zz", ".zst", ".tar", ".rar", ".7z",
|
||||
}
|
||||
}
|
||||
|
||||
func (_ *Archives) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
fsys, err := getFs(ss, args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = fsys.ReadDir(".")
|
||||
if err != nil {
|
||||
return nil, filterPassword(err)
|
||||
}
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: "",
|
||||
Encrypted: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (_ *Archives) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
if innerPath == "" {
|
||||
innerPath = "."
|
||||
}
|
||||
obj, err := fsys.ReadDir(innerPath)
|
||||
if err != nil {
|
||||
return nil, filterPassword(err)
|
||||
}
|
||||
return utils.SliceConvert(obj, func(src os.DirEntry) (model.Obj, error) {
|
||||
info, err := src.Info()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return toModelObj(info), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (_ *Archives) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
file, err := fsys.Open(strings.TrimPrefix(args.InnerPath, "/"))
|
||||
if err != nil {
|
||||
return nil, 0, filterPassword(err)
|
||||
}
|
||||
stat, err := file.Stat()
|
||||
if err != nil {
|
||||
return nil, 0, filterPassword(err)
|
||||
}
|
||||
return file, stat.Size(), nil
|
||||
}
|
||||
|
||||
func (_ *Archives) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
fsys, err := getFs(ss, args.ArchiveArgs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
isDir := false
|
||||
path := strings.TrimPrefix(args.InnerPath, "/")
|
||||
if path == "" {
|
||||
isDir = true
|
||||
path = "."
|
||||
} else {
|
||||
stat, err := fsys.Stat(path)
|
||||
if err != nil {
|
||||
return filterPassword(err)
|
||||
}
|
||||
if stat.IsDir() {
|
||||
isDir = true
|
||||
outputPath = stdpath.Join(outputPath, stat.Name())
|
||||
err = os.Mkdir(outputPath, 0700)
|
||||
if err != nil {
|
||||
return filterPassword(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
if isDir {
|
||||
err = fs.WalkDir(fsys, path, func(p string, d fs.DirEntry, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
relPath := strings.TrimPrefix(p, path+"/")
|
||||
dstPath := stdpath.Join(outputPath, relPath)
|
||||
if d.IsDir() {
|
||||
err = os.MkdirAll(dstPath, 0700)
|
||||
} else {
|
||||
dir := stdpath.Dir(dstPath)
|
||||
err = decompress(fsys, p, dir, func(_ float64) {})
|
||||
}
|
||||
return err
|
||||
})
|
||||
} else {
|
||||
err = decompress(fsys, path, outputPath, up)
|
||||
}
|
||||
return filterPassword(err)
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*Archives)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&Archives{})
|
||||
}
|
80
internal/archive/archives/utils.go
Normal file
80
internal/archive/archives/utils.go
Normal file
@ -0,0 +1,80 @@
|
||||
package archives
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/mholt/archives"
|
||||
"io"
|
||||
fs2 "io/fs"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func getFs(ss *stream.SeekableStream, args model.ArchiveArgs) (*archives.ArchiveFS, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
format, _, err := archives.Identify(ss.Ctx, ss.GetName(), reader)
|
||||
if err != nil {
|
||||
return nil, errs.UnknownArchiveFormat
|
||||
}
|
||||
extractor, ok := format.(archives.Extractor)
|
||||
if !ok {
|
||||
return nil, errs.UnknownArchiveFormat
|
||||
}
|
||||
switch f := format.(type) {
|
||||
case archives.SevenZip:
|
||||
f.Password = args.Password
|
||||
case archives.Rar:
|
||||
f.Password = args.Password
|
||||
}
|
||||
return &archives.ArchiveFS{
|
||||
Stream: io.NewSectionReader(reader, 0, ss.GetSize()),
|
||||
Format: extractor,
|
||||
Context: ss.Ctx,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func toModelObj(file os.FileInfo) *model.Object {
|
||||
return &model.Object{
|
||||
Name: file.Name(),
|
||||
Size: file.Size(),
|
||||
Modified: file.ModTime(),
|
||||
IsFolder: file.IsDir(),
|
||||
}
|
||||
}
|
||||
|
||||
func filterPassword(err error) error {
|
||||
if err != nil && strings.Contains(err.Error(), "password") {
|
||||
return errs.WrongArchivePassword
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func decompress(fsys fs2.FS, filePath, targetPath string, up model.UpdateProgress) error {
|
||||
rc, err := fsys.Open(filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
stat, err := rc.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
f, err := os.OpenFile(stdpath.Join(targetPath, stat.Name()), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: rc,
|
||||
Size: stat.Size(),
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
return err
|
||||
}
|
96
internal/archive/iso9660/iso9660.go
Normal file
96
internal/archive/iso9660/iso9660.go
Normal file
@ -0,0 +1,96 @@
|
||||
package iso9660
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/kdomanski/iso9660"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
)
|
||||
|
||||
type ISO9660 struct {
|
||||
}
|
||||
|
||||
func (t *ISO9660) AcceptedExtensions() []string {
|
||||
return []string{".iso"}
|
||||
}
|
||||
|
||||
func (t *ISO9660) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: "",
|
||||
Encrypted: false,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
img, err := getImage(ss)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dir, err := getObj(img, args.InnerPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !dir.IsDir() {
|
||||
return nil, errs.NotFolder
|
||||
}
|
||||
children, err := dir.GetChildren()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ret := make([]model.Obj, 0, len(children))
|
||||
for _, child := range children {
|
||||
ret = append(ret, toModelObj(child))
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
img, err := getImage(ss)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
obj, err := getObj(img, args.InnerPath)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
if obj.IsDir() {
|
||||
return nil, 0, errs.NotFile
|
||||
}
|
||||
return io.NopCloser(obj.Reader()), obj.Size(), nil
|
||||
}
|
||||
|
||||
func (t *ISO9660) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
img, err := getImage(ss)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
obj, err := getObj(img, args.InnerPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if obj.IsDir() {
|
||||
if args.InnerPath != "/" {
|
||||
outputPath = stdpath.Join(outputPath, obj.Name())
|
||||
if err = os.MkdirAll(outputPath, 0700); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
var children []*iso9660.File
|
||||
if children, err = obj.GetChildren(); err == nil {
|
||||
err = decompressAll(children, outputPath)
|
||||
}
|
||||
} else {
|
||||
err = decompress(obj, outputPath, up)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*ISO9660)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&ISO9660{})
|
||||
}
|
100
internal/archive/iso9660/utils.go
Normal file
100
internal/archive/iso9660/utils.go
Normal file
@ -0,0 +1,100 @@
|
||||
package iso9660
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/kdomanski/iso9660"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func getImage(ss *stream.SeekableStream) (*iso9660.Image, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return iso9660.OpenImage(reader)
|
||||
}
|
||||
|
||||
func getObj(img *iso9660.Image, path string) (*iso9660.File, error) {
|
||||
obj, err := img.RootDir()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if path == "/" {
|
||||
return obj, nil
|
||||
}
|
||||
paths := strings.Split(strings.TrimPrefix(path, "/"), "/")
|
||||
for _, p := range paths {
|
||||
if !obj.IsDir() {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
children, err := obj.GetChildren()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
exist := false
|
||||
for _, child := range children {
|
||||
if child.Name() == p {
|
||||
obj = child
|
||||
exist = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !exist {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
}
|
||||
return obj, nil
|
||||
}
|
||||
|
||||
func toModelObj(file *iso9660.File) model.Obj {
|
||||
return &model.Object{
|
||||
Name: file.Name(),
|
||||
Size: file.Size(),
|
||||
Modified: file.ModTime(),
|
||||
IsFolder: file.IsDir(),
|
||||
}
|
||||
}
|
||||
|
||||
func decompress(f *iso9660.File, path string, up model.UpdateProgress) error {
|
||||
file, err := os.OpenFile(stdpath.Join(path, f.Name()), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer file.Close()
|
||||
_, err = io.Copy(file, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: f.Reader(),
|
||||
Size: f.Size(),
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func decompressAll(children []*iso9660.File, path string) error {
|
||||
for _, child := range children {
|
||||
if child.IsDir() {
|
||||
nextChildren, err := child.GetChildren()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
nextPath := stdpath.Join(path, child.Name())
|
||||
if err = os.MkdirAll(nextPath, 0700); err != nil {
|
||||
return err
|
||||
}
|
||||
if err = decompressAll(nextChildren, nextPath); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if err := decompress(child, path, func(_ float64) {}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
15
internal/archive/tool/base.go
Normal file
15
internal/archive/tool/base.go
Normal file
@ -0,0 +1,15 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"io"
|
||||
)
|
||||
|
||||
type Tool interface {
|
||||
AcceptedExtensions() []string
|
||||
GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error)
|
||||
List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error)
|
||||
Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error)
|
||||
Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error
|
||||
}
|
23
internal/archive/tool/utils.go
Normal file
23
internal/archive/tool/utils.go
Normal file
@ -0,0 +1,23 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
)
|
||||
|
||||
var (
|
||||
Tools = make(map[string]Tool)
|
||||
)
|
||||
|
||||
func RegisterTool(tool Tool) {
|
||||
for _, ext := range tool.AcceptedExtensions() {
|
||||
Tools[ext] = tool
|
||||
}
|
||||
}
|
||||
|
||||
func GetArchiveTool(ext string) (Tool, error) {
|
||||
t, ok := Tools[ext]
|
||||
if !ok {
|
||||
return nil, errs.UnknownArchiveFormat
|
||||
}
|
||||
return t, nil
|
||||
}
|
156
internal/archive/zip/utils.go
Normal file
156
internal/archive/zip/utils.go
Normal file
@ -0,0 +1,156 @@
|
||||
package zip
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/saintfish/chardet"
|
||||
"github.com/yeka/zip"
|
||||
"golang.org/x/text/encoding"
|
||||
"golang.org/x/text/encoding/charmap"
|
||||
"golang.org/x/text/encoding/japanese"
|
||||
"golang.org/x/text/encoding/korean"
|
||||
"golang.org/x/text/encoding/simplifiedchinese"
|
||||
"golang.org/x/text/encoding/traditionalchinese"
|
||||
"golang.org/x/text/encoding/unicode"
|
||||
"golang.org/x/text/encoding/unicode/utf32"
|
||||
"golang.org/x/text/transform"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func toModelObj(file os.FileInfo) *model.Object {
|
||||
return &model.Object{
|
||||
Name: decodeName(file.Name()),
|
||||
Size: file.Size(),
|
||||
Modified: file.ModTime(),
|
||||
IsFolder: file.IsDir(),
|
||||
}
|
||||
}
|
||||
|
||||
func decompress(file *zip.File, filePath, outputPath, password string) error {
|
||||
targetPath := outputPath
|
||||
dir, base := stdpath.Split(filePath)
|
||||
if dir != "" {
|
||||
targetPath = stdpath.Join(targetPath, dir)
|
||||
err := os.MkdirAll(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if base != "" {
|
||||
err := _decompress(file, targetPath, password, func(_ float64) {})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func _decompress(file *zip.File, targetPath, password string, up model.UpdateProgress) error {
|
||||
if file.IsEncrypted() {
|
||||
file.SetPassword(password)
|
||||
}
|
||||
rc, err := file.Open()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer rc.Close()
|
||||
f, err := os.OpenFile(stdpath.Join(targetPath, file.FileInfo().Name()), os.O_WRONLY|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = io.Copy(f, &stream.ReaderUpdatingProgress{
|
||||
Reader: &stream.SimpleReaderWithSize{
|
||||
Reader: rc,
|
||||
Size: file.FileInfo().Size(),
|
||||
},
|
||||
UpdateProgress: up,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func filterPassword(err error) error {
|
||||
if err != nil && strings.Contains(err.Error(), "password") {
|
||||
return errs.WrongArchivePassword
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func decodeName(name string) string {
|
||||
b := []byte(name)
|
||||
detector := chardet.NewTextDetector()
|
||||
result, err := detector.DetectBest(b)
|
||||
if err != nil {
|
||||
return name
|
||||
}
|
||||
enc := getEncoding(result.Charset)
|
||||
if enc == nil {
|
||||
return name
|
||||
}
|
||||
i := bytes.NewReader(b)
|
||||
decoder := transform.NewReader(i, enc.NewDecoder())
|
||||
content, _ := io.ReadAll(decoder)
|
||||
return string(content)
|
||||
}
|
||||
|
||||
func getEncoding(name string) (enc encoding.Encoding) {
|
||||
switch name {
|
||||
case "UTF-16BE":
|
||||
enc = unicode.UTF16(unicode.BigEndian, unicode.IgnoreBOM)
|
||||
case "UTF-16LE":
|
||||
enc = unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM)
|
||||
case "UTF-32BE":
|
||||
enc = utf32.UTF32(utf32.BigEndian, utf32.IgnoreBOM)
|
||||
case "UTF-32LE":
|
||||
enc = utf32.UTF32(utf32.LittleEndian, utf32.IgnoreBOM)
|
||||
case "ISO-8859-1":
|
||||
enc = charmap.ISO8859_1
|
||||
case "ISO-8859-2":
|
||||
enc = charmap.ISO8859_2
|
||||
case "ISO-8859-3":
|
||||
enc = charmap.ISO8859_3
|
||||
case "ISO-8859-4":
|
||||
enc = charmap.ISO8859_4
|
||||
case "ISO-8859-5":
|
||||
enc = charmap.ISO8859_5
|
||||
case "ISO-8859-6":
|
||||
enc = charmap.ISO8859_6
|
||||
case "ISO-8859-7":
|
||||
enc = charmap.ISO8859_7
|
||||
case "ISO-8859-8":
|
||||
enc = charmap.ISO8859_8
|
||||
case "ISO-8859-8-I":
|
||||
enc = charmap.ISO8859_8I
|
||||
case "ISO-8859-9":
|
||||
enc = charmap.ISO8859_9
|
||||
case "windows-1251":
|
||||
enc = charmap.Windows1251
|
||||
case "windows-1256":
|
||||
enc = charmap.Windows1256
|
||||
case "KOI8-R":
|
||||
enc = charmap.KOI8R
|
||||
case "Shift_JIS":
|
||||
enc = japanese.ShiftJIS
|
||||
case "GB-18030":
|
||||
enc = simplifiedchinese.GB18030
|
||||
case "EUC-JP":
|
||||
enc = japanese.EUCJP
|
||||
case "EUC-KR":
|
||||
enc = korean.EUCKR
|
||||
case "Big5":
|
||||
enc = traditionalchinese.Big5
|
||||
case "ISO-2022-JP":
|
||||
enc = japanese.ISO2022JP
|
||||
default:
|
||||
enc = nil
|
||||
}
|
||||
return
|
||||
}
|
174
internal/archive/zip/zip.go
Normal file
174
internal/archive/zip/zip.go
Normal file
@ -0,0 +1,174 @@
|
||||
package zip
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/archive/tool"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"github.com/yeka/zip"
|
||||
"io"
|
||||
"os"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Zip struct {
|
||||
}
|
||||
|
||||
func (_ *Zip) AcceptedExtensions() []string {
|
||||
return []string{".zip"}
|
||||
}
|
||||
|
||||
func (_ *Zip) GetMeta(ss *stream.SeekableStream, args model.ArchiveArgs) (model.ArchiveMeta, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
encrypted := false
|
||||
for _, file := range zipReader.File {
|
||||
if file.IsEncrypted() {
|
||||
encrypted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
return &model.ArchiveMetaInfo{
|
||||
Comment: zipReader.Comment,
|
||||
Encrypted: encrypted,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (_ *Zip) List(ss *stream.SeekableStream, args model.ArchiveInnerArgs) ([]model.Obj, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if args.InnerPath == "/" {
|
||||
ret := make([]model.Obj, 0)
|
||||
passVerified := false
|
||||
for _, file := range zipReader.File {
|
||||
if !passVerified && file.IsEncrypted() {
|
||||
file.SetPassword(args.Password)
|
||||
rc, e := file.Open()
|
||||
if e != nil {
|
||||
return nil, filterPassword(e)
|
||||
}
|
||||
_ = rc.Close()
|
||||
passVerified = true
|
||||
}
|
||||
name := decodeName(file.Name)
|
||||
if strings.Contains(strings.TrimSuffix(name, "/"), "/") {
|
||||
continue
|
||||
}
|
||||
ret = append(ret, toModelObj(file.FileInfo()))
|
||||
}
|
||||
return ret, nil
|
||||
} else {
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/") + "/"
|
||||
ret := make([]model.Obj, 0)
|
||||
exist := false
|
||||
for _, file := range zipReader.File {
|
||||
name := decodeName(file.Name)
|
||||
if name == innerPath {
|
||||
exist = true
|
||||
}
|
||||
dir := stdpath.Dir(strings.TrimSuffix(name, "/")) + "/"
|
||||
if dir != innerPath {
|
||||
continue
|
||||
}
|
||||
ret = append(ret, toModelObj(file.FileInfo()))
|
||||
}
|
||||
if !exist {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
return ret, nil
|
||||
}
|
||||
}
|
||||
|
||||
func (_ *Zip) Extract(ss *stream.SeekableStream, args model.ArchiveInnerArgs) (io.ReadCloser, int64, error) {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
for _, file := range zipReader.File {
|
||||
if decodeName(file.Name) == innerPath {
|
||||
if file.IsEncrypted() {
|
||||
file.SetPassword(args.Password)
|
||||
}
|
||||
r, e := file.Open()
|
||||
if e != nil {
|
||||
return nil, 0, e
|
||||
}
|
||||
return r, file.FileInfo().Size(), nil
|
||||
}
|
||||
}
|
||||
return nil, 0, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (_ *Zip) Decompress(ss *stream.SeekableStream, outputPath string, args model.ArchiveInnerArgs, up model.UpdateProgress) error {
|
||||
reader, err := stream.NewReadAtSeeker(ss, 0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
zipReader, err := zip.NewReader(reader, ss.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if args.InnerPath == "/" {
|
||||
for i, file := range zipReader.File {
|
||||
name := decodeName(file.Name)
|
||||
err = decompress(file, name, outputPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(float64(i+1) * 100.0 / float64(len(zipReader.File)))
|
||||
}
|
||||
} else {
|
||||
innerPath := strings.TrimPrefix(args.InnerPath, "/")
|
||||
innerBase := stdpath.Base(innerPath)
|
||||
createdBaseDir := false
|
||||
for _, file := range zipReader.File {
|
||||
name := decodeName(file.Name)
|
||||
if name == innerPath {
|
||||
err = _decompress(file, outputPath, args.Password, up)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
break
|
||||
} else if strings.HasPrefix(name, innerPath+"/") {
|
||||
targetPath := stdpath.Join(outputPath, innerBase)
|
||||
if !createdBaseDir {
|
||||
err = os.Mkdir(targetPath, 0700)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createdBaseDir = true
|
||||
}
|
||||
restPath := strings.TrimPrefix(name, innerPath+"/")
|
||||
err = decompress(file, restPath, targetPath, args.Password)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ tool.Tool = (*Zip)(nil)
|
||||
|
||||
func init() {
|
||||
tool.RegisterTool(&Zip{})
|
||||
}
|
Reference in New Issue
Block a user