Compare commits
12 Commits
Author | SHA1 | Date | |
---|---|---|---|
afcfbf02ea | |||
cad04e07dd | |||
30f732138c | |||
04034bd03b | |||
6ec9a8d4c7 | |||
3f7882b467 | |||
a4511c1963 | |||
9d1f122717 | |||
5dd73d80d8 | |||
fce872bc1b | |||
df6c4c80c2 | |||
d2ff040cf8 |
@ -1,11 +1,9 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/binary"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
@ -45,6 +43,9 @@ func (d *Pan123) Init(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{})
|
||||
}, nil)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -180,40 +181,23 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
const DEFAULT int64 = 10485760
|
||||
var uploadFile io.Reader
|
||||
// const DEFAULT int64 = 10485760
|
||||
h := md5.New()
|
||||
if d.StreamUpload && stream.GetSize() > DEFAULT {
|
||||
// 只计算前10MIB
|
||||
buf := bytes.NewBuffer(make([]byte, 0, DEFAULT))
|
||||
if n, err := io.CopyN(io.MultiWriter(buf, h), stream, DEFAULT); err != io.EOF && n == 0 {
|
||||
return err
|
||||
}
|
||||
// 增加额外参数防止MD5碰撞
|
||||
h.Write([]byte(stream.GetName()))
|
||||
num := make([]byte, 8)
|
||||
binary.BigEndian.PutUint64(num, uint64(stream.GetSize()))
|
||||
h.Write(num)
|
||||
// 拼装
|
||||
uploadFile = io.MultiReader(buf, stream)
|
||||
} else {
|
||||
// 计算完整文件MD5
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
if _, err = io.Copy(h, tempFile); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadFile = tempFile
|
||||
// need to calculate md5 of the full content
|
||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
if _, err = io.Copy(h, tempFile); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
data := base.Json{
|
||||
@ -237,7 +221,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return nil
|
||||
}
|
||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||
err = d.newUpload(ctx, &resp, stream, uploadFile, up)
|
||||
err = d.newUpload(ctx, &resp, stream, tempFile, up)
|
||||
return err
|
||||
} else {
|
||||
cfg := &aws.Config{
|
||||
@ -254,7 +238,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: uploadFile,
|
||||
Body: tempFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
}
|
||||
|
@ -11,7 +11,6 @@ type Addition struct {
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
StreamUpload bool `json:"stream_upload"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,10 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"net/url"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -42,7 +45,30 @@ func (f File) GetID() string {
|
||||
return strconv.FormatInt(f.FileId, 10)
|
||||
}
|
||||
|
||||
func (f File) Thumb() string {
|
||||
if f.DownloadUrl == "" {
|
||||
return ""
|
||||
}
|
||||
du, err := url.Parse(f.DownloadUrl)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||
query := du.Query()
|
||||
query.Set("w", "70")
|
||||
query.Set("h", "70")
|
||||
if !query.Has("type") {
|
||||
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||
}
|
||||
if !query.Has("trade_key") {
|
||||
query.Set("trade_key", "123pan-thumbnail")
|
||||
}
|
||||
du.RawQuery = query.Encode()
|
||||
return du.String()
|
||||
}
|
||||
|
||||
var _ model.Obj = (*File)(nil)
|
||||
var _ model.Thumb = (*File)(nil)
|
||||
|
||||
//func (f File) Thumb() string {
|
||||
//
|
||||
|
@ -19,6 +19,7 @@ const (
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = AApi
|
||||
SignIn = MainApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
FileList = MainApi + "/file/list/new"
|
||||
DownloadInfo = MainApi + "/file/download_info"
|
||||
@ -50,6 +51,13 @@ func (d *Pan123) login() error {
|
||||
}
|
||||
}
|
||||
res, err := base.RestyClient.R().
|
||||
SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
"user-agent": base.UserAgent,
|
||||
}).
|
||||
SetBody(body).Post(SignIn)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -69,7 +77,8 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"platform": "web",
|
||||
"app-version": "1.2",
|
||||
"app-version": "3",
|
||||
"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
|
@ -42,8 +42,8 @@ func calSign(body, ts, randStr string) string {
|
||||
sort.Strings(strs)
|
||||
body = strings.Join(strs, "")
|
||||
body = base64.StdEncoding.EncodeToString([]byte(body))
|
||||
res := utils.GetMD5Encode(body) + utils.GetMD5Encode(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5Encode(res))
|
||||
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
|
||||
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
|
||||
return res
|
||||
}
|
||||
|
||||
|
@ -385,7 +385,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
|
||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||
sliceMd5 := fileMd5
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5 = utils.GetMD5Encode(strings.Join(md5s, "\n"))
|
||||
sliceMd5 = utils.GetMD5EncodeStr(strings.Join(md5s, "\n"))
|
||||
}
|
||||
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
|
||||
"uploadFileId": uploadFileId,
|
||||
|
@ -51,7 +51,7 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
}
|
||||
|
||||
// 避免重复登陆
|
||||
identity := utils.GetMD5Encode(y.Username + y.Password)
|
||||
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||
if !y.isLogin() || y.identity != identity {
|
||||
y.identity = identity
|
||||
if err = y.login(); err != nil {
|
||||
|
@ -519,7 +519,7 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 提交上传
|
||||
@ -577,7 +577,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
sliceMd5Hex := fileMd5Hex
|
||||
if file.GetSize() > DEFAULT {
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||
}
|
||||
|
||||
// 检测是否支持快传
|
||||
|
@ -67,7 +67,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
// init deviceID
|
||||
deviceID := utils.GetSHA256Encode(d.UserID)
|
||||
deviceID := utils.GetSHA256Encode([]byte(d.UserID))
|
||||
// init privateKey
|
||||
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
||||
state := State{
|
||||
@ -193,7 +193,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
if d.RapidUpload {
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
io.CopyN(buf, file, 1024)
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.String())
|
||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
if localFile != nil {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
@ -259,7 +259,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
||||
*/
|
||||
buf := make([]byte, 8)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5Encode(d.AccessToken)[:16], 16)
|
||||
r, _ := new(big.Int).SetString(utils.GetMD5EncodeStr(d.AccessToken)[:16], 16)
|
||||
i := new(big.Int).SetInt64(file.GetSize())
|
||||
o := new(big.Int).SetInt64(0)
|
||||
if file.GetSize() > 0 {
|
||||
|
@ -3,8 +3,6 @@ package aliyundrive_open
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
@ -153,74 +151,7 @@ func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// rapid_upload is not currently supported
|
||||
// 1. create
|
||||
// Part Size Unit: Bytes, Default: 20MB,
|
||||
// Maximum number of slices 10,000, ≈195.3125GB
|
||||
var partSize int64 = 20 * 1024 * 1024
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := 1
|
||||
if stream.GetSize() > partSize {
|
||||
if stream.GetSize() > 1*1024*1024*1024*1024 { // file Size over 1TB
|
||||
partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
|
||||
} else if stream.GetSize() > 768*1024*1024*1024 { // over 768GB
|
||||
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||
} else if stream.GetSize() > 512*1024*1024*1024 { // over 512GB
|
||||
partSize = 82463373 // ≈ 78.6432MB
|
||||
} else if stream.GetSize() > 384*1024*1024*1024 { // over 384GB
|
||||
partSize = 54975582 // ≈ 52.4288MB
|
||||
} else if stream.GetSize() > 256*1024*1024*1024 { // over 256GB
|
||||
partSize = 41231687 // ≈ 39.3216MB
|
||||
} else if stream.GetSize() > 128*1024*1024*1024 { // over 128GB
|
||||
partSize = 27487791 // ≈ 26.2144MB
|
||||
}
|
||||
count = int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err = d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err = d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
return d.upload(ctx, dstDir, stream, up)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
|
@ -14,6 +14,7 @@ type Addition struct {
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||
RapidUpload bool `json:"rapid_upload" help:"If you enable this option, the file will be uploaded to the server first, so the progress will be incorrect"`
|
||||
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
||||
AccessToken string
|
||||
}
|
||||
|
268
drivers/aliyundrive_open/upload.go
Normal file
268
drivers/aliyundrive_open/upload.go
Normal file
@ -0,0 +1,268 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func calPartSize(fileSize int64) int64 {
|
||||
var partSize int64 = 20 * 1024 * 1024
|
||||
if fileSize > partSize {
|
||||
if fileSize > 1*1024*1024*1024*1024 { // file Size over 1TB
|
||||
partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
|
||||
} else if fileSize > 768*1024*1024*1024 { // over 768GB
|
||||
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||
} else if fileSize > 512*1024*1024*1024 { // over 512GB
|
||||
partSize = 82463373 // ≈ 78.6432MB
|
||||
} else if fileSize > 384*1024*1024*1024 { // over 384GB
|
||||
partSize = 54975582 // ≈ 52.4288MB
|
||||
} else if fileSize > 256*1024*1024*1024 { // over 256GB
|
||||
partSize = 41231687 // ≈ 39.3216MB
|
||||
} else if fileSize > 128*1024*1024*1024 { // over 128GB
|
||||
partSize = 27487791 // ≈ 26.2144MB
|
||||
}
|
||||
}
|
||||
return partSize
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) normalUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, createResp CreateResp, count int, partSize int64) error {
|
||||
log.Debugf("[aliyundive_open] normal upload")
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err := d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
type ProofRange struct {
|
||||
Start int64
|
||||
End int64
|
||||
}
|
||||
|
||||
func getProofRange(input string, size int64) (*ProofRange, error) {
|
||||
if size == 0 {
|
||||
return &ProofRange{}, nil
|
||||
}
|
||||
tmpStr := utils.GetMD5EncodeStr(input)[0:16]
|
||||
tmpInt, err := strconv.ParseUint(tmpStr, 16, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
index := tmpInt % uint64(size)
|
||||
pr := &ProofRange{
|
||||
Start: int64(index),
|
||||
End: int64(index) + 8,
|
||||
}
|
||||
if pr.End >= size {
|
||||
pr.End = size
|
||||
}
|
||||
return pr, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) calProofCode(file *os.File, fileSize int64) (string, error) {
|
||||
proofRange, err := getProofRange(d.AccessToken, fileSize)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
buf := make([]byte, proofRange.End-proofRange.Start)
|
||||
_, err = file.ReadAt(buf, proofRange.Start)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(buf), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// 1. create
|
||||
// Part Size Unit: Bytes, Default: 20MB,
|
||||
// Maximum number of slices 10,000, ≈195.3125GB
|
||||
var partSize = calPartSize(stream.GetSize())
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
// rapid upload
|
||||
rapidUpload := stream.GetSize() > 100*1024 && d.RapidUpload
|
||||
if rapidUpload {
|
||||
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||
// read 1024 bytes to calculate pre hash
|
||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||
_, err := io.CopyN(buf, stream, 1024)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
createData["size"] = stream.GetSize()
|
||||
createData["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||
// if support seek, seek to start
|
||||
if localFile, ok := stream.(io.Seeker); ok {
|
||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
// Put spliced head back to stream
|
||||
stream.SetReadCloser(struct {
|
||||
io.Reader
|
||||
io.Closer
|
||||
}{
|
||||
Reader: io.MultiReader(buf, stream.GetReadCloser()),
|
||||
Closer: stream.GetReadCloser(),
|
||||
})
|
||||
}
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
if e.Code != "PreHashMatched" || !rapidUpload {
|
||||
return err
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
|
||||
// convert to local file
|
||||
file, err := utils.CreateTempFile(stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = stream.GetReadCloser().Close()
|
||||
stream.SetReadCloser(file)
|
||||
// calculate full hash
|
||||
h := sha1.New()
|
||||
_, err = io.Copy(h, file)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
delete(createData, "pre_hash")
|
||||
createData["proof_version"] = "v1"
|
||||
createData["content_hash_name"] = "sha1"
|
||||
createData["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||
// seek to start
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
createData["proof_code"], err = d.calProofCode(file, stream.GetSize())
|
||||
if err != nil {
|
||||
return fmt.Errorf("cal proof code error: %s", err.Error())
|
||||
}
|
||||
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if createResp.RapidUpload {
|
||||
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||
return nil
|
||||
}
|
||||
// failed to rapid upload, try normal upload
|
||||
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
|
||||
return d.normalUpload(ctx, stream, up, createResp, count, partSize)
|
||||
}
|
@ -5,7 +5,6 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
@ -48,6 +47,11 @@ func (d *AliyundriveOpen) refreshToken() error {
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
b, err, _ := d.requestReturnErrResp(uri, method, callback, retry...)
|
||||
return b, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||
req := base.RestyClient.R()
|
||||
// TODO check whether access_token is expired
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
@ -61,20 +65,20 @@ func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback,
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, err, nil
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if e.Code != "" {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, err, nil
|
||||
}
|
||||
return d.request(uri, method, callback, true)
|
||||
return d.requestReturnErrResp(uri, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message)
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message), &e
|
||||
}
|
||||
return res.Body(), nil
|
||||
return res.Body(), nil, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) list(ctx context.Context, data base.Json) (*Files, error) {
|
||||
@ -117,59 +121,3 @@ func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File,
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -64,7 +64,7 @@ func readDir(dirname string) ([]fs.FileInfo, error) {
|
||||
func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||
fullPath := file.GetPath()
|
||||
thumbPrefix := "alist_thumb_"
|
||||
thumbName := thumbPrefix + utils.GetMD5Encode(fullPath) + ".png"
|
||||
thumbName := thumbPrefix + utils.GetMD5EncodeStr(fullPath) + ".png"
|
||||
if d.ThumbCacheFolder != "" {
|
||||
// skip if the file is a thumbnail
|
||||
if strings.HasPrefix(file.GetName(), thumbPrefix) {
|
||||
@ -91,7 +91,7 @@ func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||
srcBuf = imgBuf
|
||||
}
|
||||
|
||||
image, err := imaging.Decode(srcBuf)
|
||||
image, err := imaging.Decode(srcBuf, imaging.AutoOrientation(true))
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
@ -12,6 +12,7 @@ type Addition struct {
|
||||
Region string `json:"region"`
|
||||
AccessKeyID string `json:"access_key_id" required:"true"`
|
||||
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
||||
SessionToken string `json:"session_token"`
|
||||
CustomHost string `json:"custom_host"`
|
||||
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
||||
Placeholder string `json:"placeholder"`
|
||||
|
@ -22,7 +22,7 @@ import (
|
||||
|
||||
func (d *S3) initSession() error {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, ""),
|
||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, d.SessionToken),
|
||||
Region: &d.Region,
|
||||
Endpoint: &d.Endpoint,
|
||||
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
||||
|
@ -56,7 +56,7 @@ func (x *Thunder) Init(ctx context.Context) (err error) {
|
||||
"j",
|
||||
"4scKJNdd7F27Hv7tbt",
|
||||
},
|
||||
DeviceID: utils.GetMD5Encode(x.Username + x.Password),
|
||||
DeviceID: utils.GetMD5EncodeStr(x.Username + x.Password),
|
||||
ClientID: "Xp6vsxz_7IYVw2BB",
|
||||
ClientSecret: "Xp6vsy4tN9toTVdMSpomVdXpRmES",
|
||||
ClientVersion: "7.51.0.8196",
|
||||
@ -137,7 +137,7 @@ func (x *ThunderExpert) Init(ctx context.Context) (err error) {
|
||||
|
||||
DeviceID: func() string {
|
||||
if len(x.DeviceID) != 32 {
|
||||
return utils.GetMD5Encode(x.DeviceID)
|
||||
return utils.GetMD5EncodeStr(x.DeviceID)
|
||||
}
|
||||
return x.DeviceID
|
||||
}(),
|
||||
|
@ -78,7 +78,7 @@ type Addition struct {
|
||||
|
||||
// 登录特征,用于判断是否重新登录
|
||||
func (i *Addition) GetIdentity() string {
|
||||
return utils.GetMD5Encode(i.Username + i.Password)
|
||||
return utils.GetMD5EncodeStr(i.Username + i.Password)
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -100,7 +100,7 @@ func (c *Common) GetCaptchaSign() (timestamp, sign string) {
|
||||
timestamp = fmt.Sprint(time.Now().UnixMilli())
|
||||
str := fmt.Sprint(c.ClientID, c.ClientVersion, c.PackageName, c.DeviceID, timestamp)
|
||||
for _, algorithm := range c.Algorithms {
|
||||
str = utils.GetMD5Encode(str + algorithm)
|
||||
str = utils.GetMD5EncodeStr(str + algorithm)
|
||||
}
|
||||
sign = "1." + str
|
||||
return
|
||||
|
@ -81,7 +81,7 @@ func (d *USS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
||||
expireAt := time.Now().Add(downExp).Unix()
|
||||
upd := url.QueryEscape(path.Base(file.GetPath()))
|
||||
signStr := strings.Join([]string{d.OperatorPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
|
||||
upt := utils.GetMD5Encode(signStr)[12:20] + fmt.Sprint(expireAt)
|
||||
upt := utils.GetMD5EncodeStr(signStr)[12:20] + fmt.Sprint(expireAt)
|
||||
link := fmt.Sprintf("%s?_upd=%s&_upt=%s", u, upd, upt)
|
||||
return &model.Link{URL: link}, nil
|
||||
}
|
||||
|
24
go.mod
24
go.mod
@ -8,7 +8,7 @@ require (
|
||||
github.com/Xhofe/wopan-sdk-go v0.1.1
|
||||
github.com/avast/retry-go v3.0.0+incompatible
|
||||
github.com/aws/aws-sdk-go v1.44.262
|
||||
github.com/blevesearch/bleve/v2 v2.3.8
|
||||
github.com/blevesearch/bleve/v2 v2.3.9
|
||||
github.com/caarlos0/env/v9 v9.0.0
|
||||
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||
github.com/deckarep/golang-set/v2 v2.3.0
|
||||
@ -23,14 +23,14 @@ require (
|
||||
github.com/gorilla/websocket v1.5.0
|
||||
github.com/hirochachacha/go-smb2 v1.1.0
|
||||
github.com/ipfs/go-ipfs-api v0.6.0
|
||||
github.com/jlaffaye/ftp v0.1.0
|
||||
github.com/jlaffaye/ftp v0.2.0
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/maruel/natural v1.1.0
|
||||
github.com/natefinch/lumberjack v2.0.0+incompatible
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/pkg/sftp v1.13.5
|
||||
github.com/pquerna/otp v1.4.0
|
||||
github.com/sirupsen/logrus v1.9.2
|
||||
github.com/sirupsen/logrus v1.9.3
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca
|
||||
github.com/u2takey/ffmpeg-go v0.4.1
|
||||
@ -48,7 +48,7 @@ require (
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||
github.com/RoaringBitmap/roaring v0.9.4 // indirect
|
||||
github.com/RoaringBitmap/roaring v1.2.3 // indirect
|
||||
github.com/aead/ecdh v0.2.0 // indirect
|
||||
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible // indirect
|
||||
github.com/andreburgaud/crypt2go v1.1.0 // indirect
|
||||
@ -59,16 +59,16 @@ require (
|
||||
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
|
||||
github.com/blevesearch/gtreap v0.1.1 // indirect
|
||||
github.com/blevesearch/mmap-go v1.0.4 // indirect
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 // indirect
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 // indirect
|
||||
github.com/blevesearch/segment v0.9.1 // indirect
|
||||
github.com/blevesearch/snowballstem v0.9.0 // indirect
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
|
||||
github.com/blevesearch/vellum v1.0.9 // indirect
|
||||
github.com/blevesearch/zapx/v11 v11.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v12 v12.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v13 v13.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v14 v14.3.7 // indirect
|
||||
github.com/blevesearch/zapx/v15 v15.3.10 // indirect
|
||||
github.com/blevesearch/vellum v1.0.10 // indirect
|
||||
github.com/blevesearch/zapx/v11 v11.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v12 v12.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v13 v13.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v14 v14.3.9 // indirect
|
||||
github.com/blevesearch/zapx/v15 v15.3.12 // indirect
|
||||
github.com/bluele/gcache v0.0.2 // indirect
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
|
||||
github.com/bytedance/sonic v1.9.1 // indirect
|
||||
@ -131,7 +131,7 @@ require (
|
||||
github.com/u2takey/go-utils v0.3.1 // indirect
|
||||
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||
github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c // indirect
|
||||
go.etcd.io/bbolt v1.3.5 // indirect
|
||||
go.etcd.io/bbolt v1.3.7 // indirect
|
||||
golang.org/x/arch v0.3.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/text v0.11.0 // indirect
|
||||
|
49
go.sum
49
go.sum
@ -1,7 +1,7 @@
|
||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/RoaringBitmap/roaring v0.9.4 h1:ckvZSX5gwCRaJYBNe7syNawCU5oruY9gQmjXlp4riwo=
|
||||
github.com/RoaringBitmap/roaring v0.9.4/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA=
|
||||
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
|
||||
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
|
||||
github.com/SheltonZhu/115driver v1.0.14 h1:uW3dl8J9KDMw+3gPxQdhTysoGhw0/uI1484GT9xhfU4=
|
||||
github.com/SheltonZhu/115driver v1.0.14/go.mod h1:00ixivHH5HqDj4S7kAWbkuUrjtsJTxc7cGv5RMw3RVs=
|
||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a h1:RenIAa2q4H8UcS/cqmwdT1WCWIAH5aumP8m8RpbqVsE=
|
||||
@ -23,8 +23,8 @@ github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT
|
||||
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA=
|
||||
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
||||
github.com/blevesearch/bleve/v2 v2.3.8 h1:IqFyMJ73n4gY8AmVqM8Sa6EtAZ5beE8yramVqCvs2kQ=
|
||||
github.com/blevesearch/bleve/v2 v2.3.8/go.mod h1:Lh9aZEHrLKxwPnW4z4lsBEGnflZQ1V/aWP/t+htsiDw=
|
||||
github.com/blevesearch/bleve/v2 v2.3.9 h1:pUMvK0mxAexqasZcVj8lazmWnEW5XiV0tASIqANiNTQ=
|
||||
github.com/blevesearch/bleve/v2 v2.3.9/go.mod h1:1PibElcjlQMQHF9uS9mRv58ODQgj4pCWHA1Wfd+qagU=
|
||||
github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
|
||||
github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
|
||||
github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
|
||||
@ -35,26 +35,26 @@ github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZG
|
||||
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
|
||||
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
|
||||
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 h1:LmGmo5twU3gV+natJbKmOktS9eMhokPGKWuR+jX84vk=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4/go.mod h1:PgVnbbg/t1UkgezPDu8EHLi1BHQ17xUwsFdU6NnOYS0=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 h1:1g713kpCQZ8u4a3stRGBfrwVOuGRnmxOVU5MQkUPrHU=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.5/go.mod h1:f2nOkKS1HcjgIWZgDAErgBdxmr2eyt0Kn7IY+FU1Xe4=
|
||||
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
|
||||
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
|
||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
|
||||
github.com/blevesearch/vellum v1.0.9 h1:PL+NWVk3dDGPCV0hoDu9XLLJgqU4E5s/dOeEJByQ2uQ=
|
||||
github.com/blevesearch/vellum v1.0.9/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
||||
github.com/blevesearch/zapx/v11 v11.3.7 h1:Y6yIAF/DVPiqZUA/jNgSLXmqewfzwHzuwfKyfdG+Xaw=
|
||||
github.com/blevesearch/zapx/v11 v11.3.7/go.mod h1:Xk9Z69AoAWIOvWudNDMlxJDqSYGf90LS0EfnaAIvXCA=
|
||||
github.com/blevesearch/zapx/v12 v12.3.7 h1:DfQ6rsmZfEK4PzzJJRXjiM6AObG02+HWvprlXQ1Y7eI=
|
||||
github.com/blevesearch/zapx/v12 v12.3.7/go.mod h1:SgEtYIBGvM0mgIBn2/tQE/5SdrPXaJUaT/kVqpAPxm0=
|
||||
github.com/blevesearch/zapx/v13 v13.3.7 h1:igIQg5eKmjw168I7av0Vtwedf7kHnQro/M+ubM4d2l8=
|
||||
github.com/blevesearch/zapx/v13 v13.3.7/go.mod h1:yyrB4kJ0OT75UPZwT/zS+Ru0/jYKorCOOSY5dBzAy+s=
|
||||
github.com/blevesearch/zapx/v14 v14.3.7 h1:gfe+fbWslDWP/evHLtp/GOvmNM3sw1BbqD7LhycBX20=
|
||||
github.com/blevesearch/zapx/v14 v14.3.7/go.mod h1:9J/RbOkqZ1KSjmkOes03AkETX7hrXT0sFMpWH4ewC4w=
|
||||
github.com/blevesearch/zapx/v15 v15.3.10 h1:bQ9ZxJCj6rKp873EuVJu2JPxQ+EWQZI1cjJGeroovaQ=
|
||||
github.com/blevesearch/zapx/v15 v15.3.10/go.mod h1:m7Y6m8soYUvS7MjN9eKlz1xrLCcmqfFadmu7GhWIrLY=
|
||||
github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxyMIPI=
|
||||
github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
||||
github.com/blevesearch/zapx/v11 v11.3.9 h1:y3ijS4h4MJdmQ07MHASxat4owAixreK2xdo76w9ncrw=
|
||||
github.com/blevesearch/zapx/v11 v11.3.9/go.mod h1:jcAYnQwlr+LqD2vLjDWjWiZDXDXGFqPbpPDRTd3XmS4=
|
||||
github.com/blevesearch/zapx/v12 v12.3.9 h1:MXGLlZ03oxXH3DMJTZaBaRj2xb6t4wQVZeZK/wu1M6w=
|
||||
github.com/blevesearch/zapx/v12 v12.3.9/go.mod h1:QXCMwmOkdLnMDgTN1P4CcuX5F851iUOtOwXbw0HMBYs=
|
||||
github.com/blevesearch/zapx/v13 v13.3.9 h1:+VAz9V0VmllHXlZV4DCvfYj0nqaZHgF3MeEHwOyRBwQ=
|
||||
github.com/blevesearch/zapx/v13 v13.3.9/go.mod h1:s+WjNp4WSDtrBVBpa37DUOd7S/Gr/jTZ7ST/MbCVj/0=
|
||||
github.com/blevesearch/zapx/v14 v14.3.9 h1:wuqxATgsTCNHM9xsOFOeFp8H2heZ/gMX/tsl9lRK8U4=
|
||||
github.com/blevesearch/zapx/v14 v14.3.9/go.mod h1:MWZ4v8AzFBRurhDzkLvokFW8ljcq9Evm27mkWe8OGbM=
|
||||
github.com/blevesearch/zapx/v15 v15.3.12 h1:w/kU9aHyfMDEdwHGZzCiakC3HZ9z5gYlXaALDC4Dct8=
|
||||
github.com/blevesearch/zapx/v15 v15.3.12/go.mod h1:tx53gDJS/7Oa3Je820cmVurqCuJ4dqdAy1kiDMV/IUo=
|
||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||
@ -169,8 +169,8 @@ github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkr
|
||||
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jlaffaye/ftp v0.1.0 h1:DLGExl5nBoSFoNshAUHwXAezXwXBvFdx7/qwhucWNSE=
|
||||
github.com/jlaffaye/ftp v0.1.0/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE=
|
||||
github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg=
|
||||
github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI=
|
||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||
@ -268,8 +268,8 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA
|
||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||
github.com/sirupsen/logrus v1.9.2 h1:oxx1eChJGI6Uks2ZC4W1zpLlVgqB8ner4EuQwV4Ik1Y=
|
||||
github.com/sirupsen/logrus v1.9.2/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
|
||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
|
||||
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||
@ -315,8 +315,8 @@ github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c/go.mod h1:
|
||||
github.com/winfsp/cgofuse v1.5.0 h1:MsBP7Mi/LiJf/7/F3O/7HjjR009ds6KCdqXzKpZSWxI=
|
||||
github.com/winfsp/cgofuse v1.5.0/go.mod h1:h3awhoUOcn2VYVKCwDaYxSLlZwnyK+A8KaDoLUp2lbU=
|
||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||
go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=
|
||||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
||||
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
||||
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
||||
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
|
||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||
@ -358,7 +358,6 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
@ -9,24 +9,28 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func GetSHA1Encode(data string) string {
|
||||
func GetSHA1Encode(data []byte) string {
|
||||
h := sha1.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetSHA256Encode(data string) string {
|
||||
func GetSHA256Encode(data []byte) string {
|
||||
h := sha256.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetMD5Encode(data string) string {
|
||||
func GetMD5Encode(data []byte) string {
|
||||
h := md5.New()
|
||||
h.Write([]byte(data))
|
||||
h.Write(data)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
func GetMD5EncodeStr(data string) string {
|
||||
return GetMD5Encode([]byte(data))
|
||||
}
|
||||
|
||||
var DEC = map[string]string{
|
||||
"-": "+",
|
||||
"_": "/",
|
||||
|
Reference in New Issue
Block a user