feat(archive): archive manage (#7817)

* feat(archive): archive management

* fix(ftp-server): remove duplicate ReadAtSeeker realization

* fix(archive): bad seeking of SeekableStream

* fix(archive): split internal and driver extraction api

* feat(archive): patch

* fix(shutdown): clear decompress upload tasks

* chore

* feat(archive): support .iso format

* chore
This commit is contained in:
KirCute_ECT
2025-01-18 23:28:12 +08:00
committed by GitHub
parent ab22cf8233
commit bb40e2e2cd
36 changed files with 2854 additions and 127 deletions

View File

@ -8,10 +8,8 @@ import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/server/common"
"github.com/pkg/errors"
"io"
fs2 "io/fs"
"net/http"
"os"
@ -20,9 +18,7 @@ import (
type FileDownloadProxy struct {
ftpserver.FileTransfer
ss *stream.SeekableStream
reader io.Reader
cur int64
reader stream.SStreamReadAtSeeker
}
func OpenDownload(ctx context.Context, reqPath string, offset int64) (*FileDownloadProxy, error) {
@ -55,22 +51,16 @@ func OpenDownload(ctx context.Context, reqPath string, offset int64) (*FileDownl
if err != nil {
return nil, err
}
var reader io.Reader
if offset != 0 {
reader, err = ss.RangeRead(http_range.Range{Start: offset, Length: -1})
if err != nil {
return nil, err
}
} else {
reader = ss
reader, err := stream.NewReadAtSeeker(ss, offset)
if err != nil {
_ = ss.Close()
return nil, err
}
return &FileDownloadProxy{ss: ss, reader: reader}, nil
return &FileDownloadProxy{reader: reader}, nil
}
func (f *FileDownloadProxy) Read(p []byte) (n int, err error) {
n, err = f.reader.Read(p)
f.cur += int64(n)
return n, err
return f.reader.Read(p)
}
func (f *FileDownloadProxy) Write(p []byte) (n int, err error) {
@ -78,32 +68,11 @@ func (f *FileDownloadProxy) Write(p []byte) (n int, err error) {
}
func (f *FileDownloadProxy) Seek(offset int64, whence int) (int64, error) {
switch whence {
case io.SeekStart:
break
case io.SeekCurrent:
offset += f.cur
break
case io.SeekEnd:
offset += f.ss.GetSize()
break
default:
return 0, errs.NotSupport
}
if offset < 0 {
return 0, errors.New("Seek: negative position")
}
reader, err := f.ss.RangeRead(http_range.Range{Start: offset, Length: -1})
if err != nil {
return f.cur, err
}
f.cur = offset
f.reader = reader
return offset, nil
return f.reader.Seek(offset, whence)
}
func (f *FileDownloadProxy) Close() error {
return f.ss.Close()
return f.reader.Close()
}
type OsFileInfoAdapter struct {