Compare commits
93 Commits
Author | SHA1 | Date | |
---|---|---|---|
579f0c06af | |||
b12d92acc9 | |||
e700ce15e5 | |||
7dbef7d559 | |||
7e9cdd8b07 | |||
cee6bc6b5d | |||
cfd23c05b4 | |||
0c1acd72ca | |||
e2ca06dcca | |||
0828fd787d | |||
2e23ea68d4 | |||
4afa822bec | |||
f2ca9b40db | |||
4c2535cb22 | |||
d4ea8787c9 | |||
a4de04528a | |||
f60aae7499 | |||
de8f9e9eee | |||
cace9db12f | |||
ec2fb82836 | |||
afcfbf02ea | |||
cad04e07dd | |||
30f732138c | |||
04034bd03b | |||
6ec9a8d4c7 | |||
3f7882b467 | |||
a4511c1963 | |||
9d1f122717 | |||
5dd73d80d8 | |||
fce872bc1b | |||
df6c4c80c2 | |||
d2ff040cf8 | |||
a31af209cc | |||
3f8b3da52b | |||
6887f14ec6 | |||
3e0de5eaac | |||
61101a60f4 | |||
3529023bf9 | |||
d1d1a089a4 | |||
fa66358b1e | |||
2b533e4b91 | |||
d3530a8d80 | |||
6052eb3512 | |||
d17f7f7cad | |||
8bdc67ec3d | |||
4fabc27366 | |||
e4c7b0f17c | |||
5e8bfb017e | |||
7d20a01dba | |||
59dbf4496f | |||
12f40608e6 | |||
89832c296f | |||
f09bb88846 | |||
c518f59528 | |||
e9c74f9959 | |||
21b8e7f6e5 | |||
2ae9cd8634 | |||
cfee536b96 | |||
1c8fe3b24c | |||
84e23c397d | |||
f7baec2e65 | |||
378bab32f1 | |||
6cd8151cad | |||
541449e10f | |||
ca5a53fc24 | |||
f646d2a699 | |||
363e036bf0 | |||
e23f00f349 | |||
9600267bda | |||
a66b0e0151 | |||
3bfa00d5d2 | |||
6cbd2532cc | |||
47976af0d3 | |||
4dca52be85 | |||
62bb09300d | |||
f9e067abec | |||
1e62666406 | |||
0e0cdf15ef | |||
b124fdc092 | |||
5141b3c165 | |||
881d6e271e | |||
bd2418c438 | |||
8421c72c5c | |||
a80e21997c | |||
4369cbbac3 | |||
89f76d7899 | |||
ef68f84787 | |||
2c1f70fbe9 | |||
b2f5757f8d | |||
6b97b4eb20 | |||
645c10c11f | |||
571bcf07b0 | |||
63de65be45 |
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
2
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -21,7 +21,7 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: Alist Version / Alist 版本
|
label: Alist Version / Alist 版本
|
||||||
description: What version of our software are you running?
|
description: What version of our software are you running?
|
||||||
placeholder: v2.0.0
|
placeholder: v3.xx.xx
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
- type: input
|
- type: input
|
||||||
|
2
.github/stale.yml
vendored
2
.github/stale.yml
vendored
@ -1,7 +1,7 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
# Number of days of inactivity before an issue becomes stale
|
||||||
daysUntilStale: 44
|
daysUntilStale: 44
|
||||||
# Number of days of inactivity before a stale issue is closed
|
# Number of days of inactivity before a stale issue is closed
|
||||||
daysUntilClose: 10
|
daysUntilClose: 20
|
||||||
# Issues with these labels will never be considered stale
|
# Issues with these labels will never be considered stale
|
||||||
exemptLabels:
|
exemptLabels:
|
||||||
- accepted
|
- accepted
|
||||||
|
2
.github/workflows/issue_close_stale.yml
vendored
2
.github/workflows/issue_close_stale.yml
vendored
@ -18,4 +18,4 @@ jobs:
|
|||||||
inactive-day: 8
|
inactive-day: 8
|
||||||
close-reason: 'not_planned'
|
close-reason: 'not_planned'
|
||||||
body: |
|
body: |
|
||||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue.
|
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue. Thank you for your contributions again.
|
2
.github/workflows/issue_question.yml
vendored
2
.github/workflows/issue_question.yml
vendored
@ -10,7 +10,7 @@ jobs:
|
|||||||
if: github.event.label.name == 'question'
|
if: github.event.label.name == 'question'
|
||||||
steps:
|
steps:
|
||||||
- name: Create comment
|
- name: Create comment
|
||||||
uses: actions-cool/issues-helper@v3.4.0
|
uses: actions-cool/issues-helper@v3.5.1
|
||||||
with:
|
with:
|
||||||
actions: 'create-comment'
|
actions: 'create-comment'
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
Prerequisites:
|
Prerequisites:
|
||||||
|
|
||||||
- [git](https://git-scm.com)
|
- [git](https://git-scm.com)
|
||||||
- [Go 1.19+](https://golang.org/doc/install)
|
- [Go 1.20+](https://golang.org/doc/install)
|
||||||
- [gcc](https://gcc.gnu.org/)
|
- [gcc](https://gcc.gnu.org/)
|
||||||
- [nodejs](https://nodejs.org/)
|
- [nodejs](https://nodejs.org/)
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
FROM alpine:3.17 as builder
|
FROM alpine:3.18 as builder
|
||||||
LABEL stage=go-builder
|
LABEL stage=go-builder
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
COPY ./ ./
|
COPY ./ ./
|
||||||
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
||||||
bash build.sh release docker
|
bash build.sh release docker
|
||||||
|
|
||||||
FROM alpine:3.17
|
FROM alpine:3.18
|
||||||
LABEL MAINTAINER="i@nn.ci"
|
LABEL MAINTAINER="i@nn.ci"
|
||||||
VOLUME /opt/alist/data/
|
VOLUME /opt/alist/data/
|
||||||
WORKDIR /opt/alist/
|
WORKDIR /opt/alist/
|
||||||
@ -14,5 +14,5 @@ COPY entrypoint.sh /entrypoint.sh
|
|||||||
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||||
chmod +x /entrypoint.sh
|
chmod +x /entrypoint.sh
|
||||||
ENV PUID=0 PGID=0 UMASK=022
|
ENV PUID=0 PGID=0 UMASK=022
|
||||||
EXPOSE 5244
|
EXPOSE 5244 5245
|
||||||
CMD [ "/entrypoint.sh" ]
|
CMD [ "/entrypoint.sh" ]
|
||||||
|
10
README.md
10
README.md
@ -1,6 +1,6 @@
|
|||||||
<div align="center">
|
<div align="center">
|
||||||
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
<p><em>🗂️A file list program that supports multiple storage, powered by Gin and Solidjs.</em></p>
|
<p><em>🗂️A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
|
||||||
<div>
|
<div>
|
||||||
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||||
@ -39,7 +39,7 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
English | [中文](./README_cn.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
@ -62,6 +62,7 @@ English | [中文](./README_cn.md) | [Contributing](./CONTRIBUTING.md) | [CODE_O
|
|||||||
- [x] [YandexDisk](https://disk.yandex.com/)
|
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||||
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||||
- [x] [Terabox](https://www.terabox.com/main)
|
- [x] [Terabox](https://www.terabox.com/main)
|
||||||
|
- [x] [UC](https://drive.uc.cn)
|
||||||
- [x] [Quark](https://pan.quark.cn)
|
- [x] [Quark](https://pan.quark.cn)
|
||||||
- [x] [Thunder](https://pan.xunlei.com)
|
- [x] [Thunder](https://pan.xunlei.com)
|
||||||
- [x] [Lanzou](https://www.lanzou.com/)
|
- [x] [Lanzou](https://www.lanzou.com/)
|
||||||
@ -72,6 +73,7 @@ English | [中文](./README_cn.md) | [Contributing](./CONTRIBUTING.md) | [CODE_O
|
|||||||
- [x] SMB
|
- [x] SMB
|
||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
- [x] Easy to deploy and out-of-the-box
|
- [x] Easy to deploy and out-of-the-box
|
||||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||||
- [x] Image preview in gallery mode
|
- [x] Image preview in gallery mode
|
||||||
@ -109,8 +111,8 @@ https://alist.nn.ci/guide/sponsor.html
|
|||||||
|
|
||||||
### Special sponsors
|
### Special sponsors
|
||||||
|
|
||||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
|
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||||
- [KinhDown 百度云盘不限速下载!永久免费!已稳定运行3年!非常可靠!Q群 -> 786799372](https://kinhdown.com)
|
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
@ -39,7 +39,7 @@
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
[English](./README.md) | 中文 | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
[English](./README.md) | 中文 | [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||||
|
|
||||||
## 功能
|
## 功能
|
||||||
|
|
||||||
@ -61,6 +61,7 @@
|
|||||||
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
|
||||||
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
- [x] [Yandex.Disk](https://disk.yandex.com/)
|
||||||
- [x] [百度网盘](http://pan.baidu.com/)
|
- [x] [百度网盘](http://pan.baidu.com/)
|
||||||
|
- [x] [UC网盘](https://drive.uc.cn)
|
||||||
- [x] [夸克网盘](https://pan.quark.cn)
|
- [x] [夸克网盘](https://pan.quark.cn)
|
||||||
- [x] [迅雷网盘](https://pan.xunlei.com)
|
- [x] [迅雷网盘](https://pan.xunlei.com)
|
||||||
- [x] [蓝奏云](https://www.lanzou.com/)
|
- [x] [蓝奏云](https://www.lanzou.com/)
|
||||||
@ -71,6 +72,7 @@
|
|||||||
- [x] SMB
|
- [x] SMB
|
||||||
- [x] [115](https://115.com/)
|
- [x] [115](https://115.com/)
|
||||||
- [X] Cloudreve
|
- [X] Cloudreve
|
||||||
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
- [x] 部署方便,开箱即用
|
- [x] 部署方便,开箱即用
|
||||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||||
- [x] 画廊模式下的图像预览
|
- [x] 画廊模式下的图像预览
|
||||||
@ -107,8 +109,8 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
|||||||
|
|
||||||
### 特别赞助
|
### 特别赞助
|
||||||
|
|
||||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
|
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
|
||||||
- [KinhDown 百度云盘不限速下载!永久免费!已稳定运行3年!非常可靠!Q群 -> 786799372](https://kinhdown.com)
|
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||||
|
|
||||||
## 贡献者
|
## 贡献者
|
||||||
|
137
README_ja.md
Normal file
137
README_ja.md
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
<div align="center">
|
||||||
|
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
|
||||||
|
<p><em>🗂️Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
|
||||||
|
<div>
|
||||||
|
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
|
||||||
|
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
|
||||||
|
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
|
||||||
|
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
|
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
|
||||||
|
</a>
|
||||||
|
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
|
||||||
|
<img src="https://badges.crowdin.net/alist/localized.svg">
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
<div>
|
||||||
|
<a href="https://github.com/Xhofe/alist/discussions">
|
||||||
|
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
|
||||||
|
</a>
|
||||||
|
<a href="https://discord.gg/F4ymsH4xv2">
|
||||||
|
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
|
||||||
|
</a>
|
||||||
|
<a href="https://github.com/Xhofe/alist/releases">
|
||||||
|
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
|
||||||
|
</a>
|
||||||
|
<a href="https://hub.docker.com/r/xhofe/alist">
|
||||||
|
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
|
||||||
|
</a>
|
||||||
|
<a href="https://alist.nn.ci/guide/sponsor.html">
|
||||||
|
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[English](./README.md) | [中文](./README_cn.md) | 日本語 | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||||
|
|
||||||
|
## 特徴
|
||||||
|
|
||||||
|
- [x] マルチストレージ
|
||||||
|
- [x] ローカルストレージ
|
||||||
|
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
||||||
|
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||||
|
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||||
|
- [x] [GoogleDrive](https://drive.google.com/)
|
||||||
|
- [x] [123pan](https://www.123pan.com/)
|
||||||
|
- [x] FTP / SFTP
|
||||||
|
- [x] [PikPak](https://www.mypikpak.com/)
|
||||||
|
- [x] [S3](https://aws.amazon.com/s3/)
|
||||||
|
- [x] [Seafile](https://seafile.com/)
|
||||||
|
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
|
||||||
|
- [x] WebDav(Support OneDrive/SharePoint without API)
|
||||||
|
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
|
||||||
|
- [x] [Mediatrack](https://www.mediatrack.cn/)
|
||||||
|
- [x] [139yun](https://yun.139.com/) (Personal, Family)
|
||||||
|
- [x] [YandexDisk](https://disk.yandex.com/)
|
||||||
|
- [x] [BaiduNetdisk](http://pan.baidu.com/)
|
||||||
|
- [x] [Terabox](https://www.terabox.com/main)
|
||||||
|
- [x] [UC](https://drive.uc.cn)
|
||||||
|
- [x] [Quark](https://pan.quark.cn)
|
||||||
|
- [x] [Thunder](https://pan.xunlei.com)
|
||||||
|
- [x] [Lanzou](https://www.lanzou.com/)
|
||||||
|
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
||||||
|
- [x] [Google photo](https://photos.google.com/)
|
||||||
|
- [x] [Mega.nz](https://mega.nz)
|
||||||
|
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||||
|
- [x] SMB
|
||||||
|
- [x] [115](https://115.com/)
|
||||||
|
- [X] Cloudreve
|
||||||
|
- [x] [Dropbox](https://www.dropbox.com/)
|
||||||
|
- [x] デプロイが簡単で、すぐに使える
|
||||||
|
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
||||||
|
- [x] ギャラリーモードでの画像プレビュー
|
||||||
|
- [x] ビデオとオーディオのプレビュー、歌詞と字幕のサポート
|
||||||
|
- [x] Office ドキュメントのプレビュー (docx, pptx, xlsx, ...)
|
||||||
|
- [x] `README.md` のプレビューレンダリング
|
||||||
|
- [x] ファイルのパーマリンクコピーと直接ダウンロード
|
||||||
|
- [x] ダークモード
|
||||||
|
- [x] 国際化
|
||||||
|
- [x] 保護されたルート (パスワード保護と認証)
|
||||||
|
- [x] WebDav (詳細は https://alist.nn.ci/guide/webdav.html を参照)
|
||||||
|
- [x] [Docker デプロイ](https://hub.docker.com/r/xhofe/alist)
|
||||||
|
- [x] Cloudflare ワーカープロキシ
|
||||||
|
- [x] ファイル/フォルダパッケージのダウンロード
|
||||||
|
- [x] ウェブアップロード(訪問者にアップロードを許可できる), 削除, mkdir, 名前変更, 移動, コピー
|
||||||
|
- [x] オフラインダウンロード
|
||||||
|
- [x] 二つのストレージ間でファイルをコピー
|
||||||
|
|
||||||
|
## ドキュメント
|
||||||
|
|
||||||
|
<https://alist.nn.ci/>
|
||||||
|
|
||||||
|
## デモ
|
||||||
|
|
||||||
|
<https://al.nn.ci>
|
||||||
|
|
||||||
|
## ディスカッション
|
||||||
|
|
||||||
|
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
|
||||||
|
|
||||||
|
## スポンサー
|
||||||
|
|
||||||
|
AList はオープンソースのソフトウェアです。もしあなたがこのプロジェクトを気に入ってくださり、続けて欲しいと思ってくださるなら、ぜひスポンサーになってくださるか、1口でも寄付をしてくださるようご検討ください!すべての愛とサポートに感謝します:
|
||||||
|
https://alist.nn.ci/guide/sponsor.html
|
||||||
|
|
||||||
|
### スペシャルスポンサー
|
||||||
|
|
||||||
|
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||||
|
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||||
|
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||||
|
|
||||||
|
## コントリビューター
|
||||||
|
|
||||||
|
これらの素晴らしい人々に感謝します:
|
||||||
|
|
||||||
|
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||||
|
|
||||||
|
## ライセンス
|
||||||
|
|
||||||
|
`AList` は AGPL-3.0 ライセンスの下でライセンスされたオープンソースソフトウェアです。
|
||||||
|
|
||||||
|
## 免責事項
|
||||||
|
- このプログラムはフリーでオープンソースのプロジェクトです。ネットワークディスク上でファイルを共有するように設計されており、golang のダウンロードや学習に便利です。利用にあたっては関連法規を遵守し、悪用しないようお願いします;
|
||||||
|
- このプログラムは、公式インターフェースの動作を破壊することなく、公式 sdk/インターフェースを呼び出すことで実装されています;
|
||||||
|
- このプログラムは、302リダイレクト/トラフィック転送のみを行い、いかなるユーザーデータも傍受、保存、改ざんしません;
|
||||||
|
- このプログラムを使用する前に、アカウントの禁止、ダウンロード速度の制限など、対応するリスクを理解し、負担する必要があります;
|
||||||
|
- もし侵害があれば、[メール](mailto:i@nn.ci)で私に連絡してください。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
24
build.sh
24
build.sh
@ -54,11 +54,30 @@ BuildWinArm64() {
|
|||||||
|
|
||||||
BuildDev() {
|
BuildDev() {
|
||||||
rm -rf .git/
|
rm -rf .git/
|
||||||
xgo -targets=linux/amd64,windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
|
||||||
mkdir -p "dist"
|
mkdir -p "dist"
|
||||||
|
muslflags="--extldflags '-static -fpic' $ldflags"
|
||||||
|
BASE="https://musl.nn.ci/"
|
||||||
|
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross)
|
||||||
|
for i in "${FILES[@]}"; do
|
||||||
|
url="${BASE}${i}.tgz"
|
||||||
|
curl -L -o "${i}.tgz" "${url}"
|
||||||
|
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||||
|
done
|
||||||
|
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64)
|
||||||
|
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc)
|
||||||
|
for i in "${!OS_ARCHES[@]}"; do
|
||||||
|
os_arch=${OS_ARCHES[$i]}
|
||||||
|
cgo_cc=${CGO_ARGS[$i]}
|
||||||
|
echo building for ${os_arch}
|
||||||
|
export GOOS=${os_arch%%-*}
|
||||||
|
export GOARCH=${os_arch##*-}
|
||||||
|
export CC=${cgo_cc}
|
||||||
|
export CGO_ENABLED=1
|
||||||
|
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||||
|
done
|
||||||
|
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||||
mv alist-* dist
|
mv alist-* dist
|
||||||
cd dist
|
cd dist
|
||||||
upx -9 ./alist-linux*
|
|
||||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||||
upx -9 ./alist-windows-amd64-upx.exe
|
upx -9 ./alist-windows-amd64-upx.exe
|
||||||
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
||||||
@ -79,6 +98,7 @@ BuildRelease() {
|
|||||||
url="${BASE}${i}.tgz"
|
url="${BASE}${i}.tgz"
|
||||||
curl -L -o "${i}.tgz" "${url}"
|
curl -L -o "${i}.tgz" "${url}"
|
||||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||||
|
rm -f "${i}.tgz"
|
||||||
done
|
done
|
||||||
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-arm linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
|
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-arm linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
|
||||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc arm-linux-musleabihf-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
|
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc arm-linux-musleabihf-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
|
||||||
|
@ -24,7 +24,7 @@ func Execute() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
RootCmd.PersistentFlags().StringVar(&flags.DataDir, "data", "data", "config file")
|
RootCmd.PersistentFlags().StringVar(&flags.DataDir, "data", "data", "data folder")
|
||||||
RootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
|
RootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
|
||||||
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
||||||
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
||||||
|
110
cmd/server.go
110
cmd/server.go
@ -3,9 +3,12 @@ package cmd
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"os/signal"
|
"os/signal"
|
||||||
|
"strconv"
|
||||||
|
"sync"
|
||||||
"syscall"
|
"syscall"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -28,6 +31,10 @@ var ServerCmd = &cobra.Command{
|
|||||||
the address is defined in config file`,
|
the address is defined in config file`,
|
||||||
Run: func(cmd *cobra.Command, args []string) {
|
Run: func(cmd *cobra.Command, args []string) {
|
||||||
Init()
|
Init()
|
||||||
|
if conf.Conf.DelayedStart != 0 {
|
||||||
|
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
||||||
|
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
||||||
|
}
|
||||||
bootstrap.InitAria2()
|
bootstrap.InitAria2()
|
||||||
bootstrap.InitQbittorrent()
|
bootstrap.InitQbittorrent()
|
||||||
bootstrap.LoadStorages()
|
bootstrap.LoadStorages()
|
||||||
@ -37,42 +44,95 @@ the address is defined in config file`,
|
|||||||
r := gin.New()
|
r := gin.New()
|
||||||
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
|
||||||
server.Init(r)
|
server.Init(r)
|
||||||
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
|
var httpSrv, httpsSrv, unixSrv *http.Server
|
||||||
utils.Log.Infof("start server @ %s", base)
|
if conf.Conf.Scheme.HttpPort != -1 {
|
||||||
srv := &http.Server{Addr: base, Handler: r}
|
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpPort)
|
||||||
go func() {
|
utils.Log.Infof("start HTTP server @ %s", httpBase)
|
||||||
var err error
|
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
||||||
if conf.Conf.Scheme.Https {
|
go func() {
|
||||||
//err = r.RunTLS(base, conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
err := httpSrv.ListenAndServe()
|
||||||
err = srv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
if err != nil && err != http.ErrServerClosed {
|
||||||
} else {
|
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||||
err = srv.ListenAndServe()
|
}
|
||||||
}
|
}()
|
||||||
if err != nil && err != http.ErrServerClosed {
|
}
|
||||||
utils.Log.Fatalf("failed to start: %s", err.Error())
|
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||||
}
|
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpsPort)
|
||||||
}()
|
utils.Log.Infof("start HTTPS server @ %s", httpsBase)
|
||||||
|
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||||
|
go func() {
|
||||||
|
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||||
|
if err != nil && err != http.ErrServerClosed {
|
||||||
|
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
if conf.Conf.Scheme.UnixFile != "" {
|
||||||
|
utils.Log.Infof("start unix server @ %s", conf.Conf.Scheme.UnixFile)
|
||||||
|
unixSrv = &http.Server{Handler: r}
|
||||||
|
go func() {
|
||||||
|
listener, err := net.Listen("unix", conf.Conf.Scheme.UnixFile)
|
||||||
|
if err != nil {
|
||||||
|
utils.Log.Fatalf("failed to listen unix: %+v", err)
|
||||||
|
}
|
||||||
|
// set socket file permission
|
||||||
|
mode, err := strconv.ParseUint(conf.Conf.Scheme.UnixFilePerm, 8, 32)
|
||||||
|
if err != nil {
|
||||||
|
utils.Log.Errorf("failed to parse socket file permission: %+v", err)
|
||||||
|
} else {
|
||||||
|
err = os.Chmod(conf.Conf.Scheme.UnixFile, os.FileMode(mode))
|
||||||
|
if err != nil {
|
||||||
|
utils.Log.Errorf("failed to chmod socket file: %+v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err = unixSrv.Serve(listener)
|
||||||
|
if err != nil && err != http.ErrServerClosed {
|
||||||
|
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
// Wait for interrupt signal to gracefully shutdown the server with
|
// Wait for interrupt signal to gracefully shutdown the server with
|
||||||
// a timeout of 5 seconds.
|
// a timeout of 1 second.
|
||||||
quit := make(chan os.Signal)
|
quit := make(chan os.Signal, 1)
|
||||||
// kill (no param) default send syscanll.SIGTERM
|
// kill (no param) default send syscanll.SIGTERM
|
||||||
// kill -2 is syscall.SIGINT
|
// kill -2 is syscall.SIGINT
|
||||||
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
|
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
|
||||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||||
<-quit
|
<-quit
|
||||||
utils.Log.Println("Shutdown Server ...")
|
utils.Log.Println("Shutdown server...")
|
||||||
|
|
||||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
if err := srv.Shutdown(ctx); err != nil {
|
var wg sync.WaitGroup
|
||||||
utils.Log.Fatal("Server Shutdown:", err)
|
if conf.Conf.Scheme.HttpPort != -1 {
|
||||||
|
wg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
if err := httpSrv.Shutdown(ctx); err != nil {
|
||||||
|
utils.Log.Fatal("HTTP server shutdown err: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
}
|
}
|
||||||
// catching ctx.Done(). timeout of 3 seconds.
|
if conf.Conf.Scheme.HttpsPort != -1 {
|
||||||
select {
|
wg.Add(1)
|
||||||
case <-ctx.Done():
|
go func() {
|
||||||
utils.Log.Println("timeout of 1 seconds.")
|
defer wg.Done()
|
||||||
|
if err := httpsSrv.Shutdown(ctx); err != nil {
|
||||||
|
utils.Log.Fatal("HTTPS server shutdown err: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
}
|
}
|
||||||
utils.Log.Println("Server exiting")
|
if conf.Conf.Scheme.UnixFile != "" {
|
||||||
|
wg.Add(1)
|
||||||
|
go func() {
|
||||||
|
defer wg.Done()
|
||||||
|
if err := unixSrv.Shutdown(ctx); err != nil {
|
||||||
|
utils.Log.Fatal("Unix server shutdown err: ", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
}
|
||||||
|
wg.Wait()
|
||||||
|
utils.Log.Println("Server exit")
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ services:
|
|||||||
- '/etc/alist:/opt/alist/data'
|
- '/etc/alist:/opt/alist/data'
|
||||||
ports:
|
ports:
|
||||||
- '5244:5244'
|
- '5244:5244'
|
||||||
|
- '5245:5245'
|
||||||
environment:
|
environment:
|
||||||
- PUID=0
|
- PUID=0
|
||||||
- PGID=0
|
- PGID=0
|
||||||
|
@ -1,10 +1,11 @@
|
|||||||
package _115
|
package _115
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/tls"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -14,9 +15,11 @@ func (d *Pan115) login() error {
|
|||||||
var err error
|
var err error
|
||||||
opts := []driver.Option{
|
opts := []driver.Option{
|
||||||
driver.UA(UserAgent),
|
driver.UA(UserAgent),
|
||||||
|
func(c *driver.Pan115Client) {
|
||||||
|
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||||
|
},
|
||||||
}
|
}
|
||||||
d.client = driver.New(opts...)
|
d.client = driver.New(opts...)
|
||||||
d.client.SetHttpClient(base.HttpClient)
|
|
||||||
cr := &driver.Credential{}
|
cr := &driver.Credential{}
|
||||||
if d.Addition.QRCodeToken != "" {
|
if d.Addition.QRCodeToken != "" {
|
||||||
s := &driver.QRCodeSession{
|
s := &driver.QRCodeSession{
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
package _123
|
package _123
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/binary"
|
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
@ -45,6 +43,9 @@ func (d *Pan123) Init(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Drop(ctx context.Context) error {
|
func (d *Pan123) Drop(ctx context.Context) error {
|
||||||
|
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{})
|
||||||
|
}, nil)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -98,7 +99,7 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
u_ := u.String()
|
u_ := u.String()
|
||||||
log.Debug("download url: ", u_)
|
log.Debug("download url: ", u_)
|
||||||
res, err := base.NoRedirectClient.R().Get(u_)
|
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -109,9 +110,12 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
log.Debugln("res code: ", res.StatusCode())
|
log.Debugln("res code: ", res.StatusCode())
|
||||||
if res.StatusCode() == 302 {
|
if res.StatusCode() == 302 {
|
||||||
link.URL = res.Header().Get("location")
|
link.URL = res.Header().Get("location")
|
||||||
} else if res.StatusCode() == 200 {
|
} else if res.StatusCode() < 300 {
|
||||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||||
}
|
}
|
||||||
|
link.Header = http.Header{
|
||||||
|
"Referer": []string{"https://www.123pan.com/"},
|
||||||
|
}
|
||||||
return &link, nil
|
return &link, nil
|
||||||
} else {
|
} else {
|
||||||
return nil, fmt.Errorf("can't convert obj")
|
return nil, fmt.Errorf("can't convert obj")
|
||||||
@ -177,40 +181,23 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
const DEFAULT int64 = 10485760
|
// const DEFAULT int64 = 10485760
|
||||||
var uploadFile io.Reader
|
|
||||||
h := md5.New()
|
h := md5.New()
|
||||||
if d.StreamUpload && stream.GetSize() > DEFAULT {
|
// need to calculate md5 of the full content
|
||||||
// 只计算前10MIB
|
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||||
buf := bytes.NewBuffer(make([]byte, 0, DEFAULT))
|
if err != nil {
|
||||||
if n, err := io.CopyN(io.MultiWriter(buf, h), stream, DEFAULT); err != io.EOF && n == 0 {
|
return err
|
||||||
return err
|
}
|
||||||
}
|
defer func() {
|
||||||
// 增加额外参数防止MD5碰撞
|
_ = tempFile.Close()
|
||||||
h.Write([]byte(stream.GetName()))
|
_ = os.Remove(tempFile.Name())
|
||||||
num := make([]byte, 8)
|
}()
|
||||||
binary.BigEndian.PutUint64(num, uint64(stream.GetSize()))
|
if _, err = io.Copy(h, tempFile); err != nil {
|
||||||
h.Write(num)
|
return err
|
||||||
// 拼装
|
}
|
||||||
uploadFile = io.MultiReader(buf, stream)
|
_, err = tempFile.Seek(0, io.SeekStart)
|
||||||
} else {
|
if err != nil {
|
||||||
// 计算完整文件MD5
|
return err
|
||||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer func() {
|
|
||||||
_ = tempFile.Close()
|
|
||||||
_ = os.Remove(tempFile.Name())
|
|
||||||
}()
|
|
||||||
if _, err = io.Copy(h, tempFile); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
_, err = tempFile.Seek(0, io.SeekStart)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
uploadFile = tempFile
|
|
||||||
}
|
}
|
||||||
etag := hex.EncodeToString(h.Sum(nil))
|
etag := hex.EncodeToString(h.Sum(nil))
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
@ -234,7 +221,8 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||||
err = d.newUpload(ctx, &resp, stream, uploadFile, up)
|
err = d.newUpload(ctx, &resp, stream, tempFile, up)
|
||||||
|
return err
|
||||||
} else {
|
} else {
|
||||||
cfg := &aws.Config{
|
cfg := &aws.Config{
|
||||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||||
@ -250,7 +238,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &resp.Data.Bucket,
|
Bucket: &resp.Data.Bucket,
|
||||||
Key: &resp.Data.Key,
|
Key: &resp.Data.Key,
|
||||||
Body: uploadFile,
|
Body: tempFile,
|
||||||
}
|
}
|
||||||
_, err = uploader.UploadWithContext(ctx, input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
}
|
}
|
||||||
|
@ -11,7 +11,6 @@ type Addition struct {
|
|||||||
driver.RootID
|
driver.RootID
|
||||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
StreamUpload bool `json:"stream_upload"`
|
|
||||||
AccessToken string
|
AccessToken string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
package _123
|
package _123
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
@ -42,7 +45,30 @@ func (f File) GetID() string {
|
|||||||
return strconv.FormatInt(f.FileId, 10)
|
return strconv.FormatInt(f.FileId, 10)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (f File) Thumb() string {
|
||||||
|
if f.DownloadUrl == "" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
du, err := url.Parse(f.DownloadUrl)
|
||||||
|
if err != nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
|
||||||
|
query := du.Query()
|
||||||
|
query.Set("w", "70")
|
||||||
|
query.Set("h", "70")
|
||||||
|
if !query.Has("type") {
|
||||||
|
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
|
||||||
|
}
|
||||||
|
if !query.Has("trade_key") {
|
||||||
|
query.Set("trade_key", "123pan-thumbnail")
|
||||||
|
}
|
||||||
|
du.RawQuery = query.Encode()
|
||||||
|
return du.String()
|
||||||
|
}
|
||||||
|
|
||||||
var _ model.Obj = (*File)(nil)
|
var _ model.Obj = (*File)(nil)
|
||||||
|
var _ model.Thumb = (*File)(nil)
|
||||||
|
|
||||||
//func (f File) Thumb() string {
|
//func (f File) Thumb() string {
|
||||||
//
|
//
|
||||||
|
@ -34,25 +34,53 @@ func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, star
|
|||||||
return &s3PreSignedUrls, nil
|
return &s3PreSignedUrls, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp) error {
|
func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
|
"StorageNode": upReq.Data.StorageNode,
|
||||||
|
"bucket": upReq.Data.Bucket,
|
||||||
|
"key": upReq.Data.Key,
|
||||||
|
"partNumberEnd": end,
|
||||||
|
"partNumberStart": start,
|
||||||
|
"uploadId": upReq.Data.UploadId,
|
||||||
|
}
|
||||||
|
var s3PreSignedUrls S3PreSignedURLs
|
||||||
|
_, err := d.request(S3Auth, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(data).SetContext(ctx)
|
||||||
|
}, &s3PreSignedUrls)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &s3PreSignedUrls, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.FileStreamer, isMultipart bool) error {
|
||||||
|
data := base.Json{
|
||||||
|
"StorageNode": upReq.Data.StorageNode,
|
||||||
"bucket": upReq.Data.Bucket,
|
"bucket": upReq.Data.Bucket,
|
||||||
|
"fileId": upReq.Data.FileId,
|
||||||
|
"fileSize": file.GetSize(),
|
||||||
|
"isMultipart": isMultipart,
|
||||||
"key": upReq.Data.Key,
|
"key": upReq.Data.Key,
|
||||||
"uploadId": upReq.Data.UploadId,
|
"uploadId": upReq.Data.UploadId,
|
||||||
"StorageNode": upReq.Data.StorageNode,
|
|
||||||
}
|
}
|
||||||
_, err := d.request(S3Complete, http.MethodPost, func(req *resty.Request) {
|
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetContext(ctx)
|
req.SetBody(data).SetContext(ctx)
|
||||||
}, nil)
|
}, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, reader io.Reader, up driver.UpdateProgress) error {
|
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, reader io.Reader, up driver.UpdateProgress) error {
|
||||||
chunkSize := int64(1024 * 1024 * 5)
|
chunkSize := int64(1024 * 1024 * 16)
|
||||||
// fetch s3 pre signed urls
|
// fetch s3 pre signed urls
|
||||||
chunkCount := int(math.Ceil(float64(file.GetSize()) / float64(chunkSize)))
|
chunkCount := int(math.Ceil(float64(file.GetSize()) / float64(chunkSize)))
|
||||||
// upload 10 chunks each batch
|
// only 1 batch is allowed
|
||||||
batchSize := 10
|
isMultipart := chunkCount > 1
|
||||||
|
batchSize := 1
|
||||||
|
getS3UploadUrl := d.getS3Auth
|
||||||
|
if isMultipart {
|
||||||
|
batchSize = 10
|
||||||
|
getS3UploadUrl = d.getS3PreSignedUrls
|
||||||
|
}
|
||||||
for i := 1; i <= chunkCount; i += batchSize {
|
for i := 1; i <= chunkCount; i += batchSize {
|
||||||
if utils.IsCanceled(ctx) {
|
if utils.IsCanceled(ctx) {
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
@ -62,7 +90,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
|||||||
if end > chunkCount+1 {
|
if end > chunkCount+1 {
|
||||||
end = chunkCount + 1
|
end = chunkCount + 1
|
||||||
}
|
}
|
||||||
s3PreSignedUrls, err := d.getS3PreSignedUrls(ctx, upReq, start, end)
|
s3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, start, end)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -75,7 +103,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
|||||||
if j == chunkCount {
|
if j == chunkCount {
|
||||||
curSize = file.GetSize() - (int64(chunkCount)-1)*chunkSize
|
curSize = file.GetSize() - (int64(chunkCount)-1)*chunkSize
|
||||||
}
|
}
|
||||||
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.LimitReader(reader, chunkSize), curSize, false)
|
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.LimitReader(reader, chunkSize), curSize, false, getS3UploadUrl)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -83,10 +111,10 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// complete s3 upload
|
// complete s3 upload
|
||||||
return d.completeS3(ctx, upReq)
|
return d.completeS3(ctx, upReq, file, chunkCount > 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool) error {
|
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool, getS3UploadUrl func(ctx context.Context, upReq *UploadResp, start int, end int) (*S3PreSignedURLs, error)) error {
|
||||||
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
|
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
|
||||||
if uploadUrl == "" {
|
if uploadUrl == "" {
|
||||||
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
|
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
|
||||||
@ -108,13 +136,13 @@ func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSign
|
|||||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
|
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
|
||||||
}
|
}
|
||||||
// refresh s3 pre signed urls
|
// refresh s3 pre signed urls
|
||||||
newS3PreSignedUrls, err := d.getS3PreSignedUrls(ctx, upReq, cur, end)
|
newS3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, cur, end)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
|
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
|
||||||
// retry
|
// retry
|
||||||
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true)
|
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true, getS3UploadUrl)
|
||||||
}
|
}
|
||||||
if res.StatusCode != http.StatusOK {
|
if res.StatusCode != http.StatusOK {
|
||||||
body, err := io.ReadAll(res.Body)
|
body, err := io.ReadAll(res.Body)
|
||||||
|
@ -15,19 +15,26 @@ import (
|
|||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
const (
|
const (
|
||||||
API = "https://www.123pan.com/b/api"
|
Api = "https://www.123pan.com/api"
|
||||||
SignIn = API + "/user/sign_in"
|
AApi = "https://www.123pan.com/a/api"
|
||||||
UserInfo = API + "/user/info"
|
BApi = "https://www.123pan.com/b/api"
|
||||||
FileList = API + "/file/list/new"
|
MainApi = Api
|
||||||
DownloadInfo = "https://www.123pan.com/a/api/file/download_info"
|
SignIn = MainApi + "/user/sign_in"
|
||||||
Mkdir = API + "/file/upload_request"
|
Logout = MainApi + "/user/logout"
|
||||||
Move = API + "/file/mod_pid"
|
UserInfo = MainApi + "/user/info"
|
||||||
Rename = API + "/file/rename"
|
FileList = MainApi + "/file/list/new"
|
||||||
Trash = API + "/file/trash"
|
DownloadInfo = MainApi + "/file/download_info"
|
||||||
UploadRequest = API + "/file/upload_request"
|
Mkdir = MainApi + "/file/upload_request"
|
||||||
UploadComplete = API + "/file/upload_complete"
|
Move = MainApi + "/file/mod_pid"
|
||||||
S3PreSignedUrls = API + "/file/s3_repare_upload_parts_batch"
|
Rename = MainApi + "/file/rename"
|
||||||
S3Complete = API + "/file/s3_complete_multipart_upload"
|
Trash = MainApi + "/file/trash"
|
||||||
|
UploadRequest = MainApi + "/file/upload_request"
|
||||||
|
UploadComplete = MainApi + "/file/upload_complete"
|
||||||
|
S3PreSignedUrls = MainApi + "/file/s3_repare_upload_parts_batch"
|
||||||
|
S3Auth = MainApi + "/file/s3_upload_object/auth"
|
||||||
|
UploadCompleteV2 = MainApi + "/file/upload_complete/v2"
|
||||||
|
S3Complete = MainApi + "/file/s3_complete_multipart_upload"
|
||||||
|
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (d *Pan123) login() error {
|
func (d *Pan123) login() error {
|
||||||
@ -42,9 +49,18 @@ func (d *Pan123) login() error {
|
|||||||
body = base.Json{
|
body = base.Json{
|
||||||
"passport": d.Username,
|
"passport": d.Username,
|
||||||
"password": d.Password,
|
"password": d.Password,
|
||||||
|
"remember": true,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
|
SetHeaders(map[string]string{
|
||||||
|
"origin": "https://www.123pan.com",
|
||||||
|
"referer": "https://www.123pan.com/",
|
||||||
|
"user-agent": "Dart/2.19(dart:io)",
|
||||||
|
"platform": "android",
|
||||||
|
"app-version": "36",
|
||||||
|
//"user-agent": base.UserAgent,
|
||||||
|
}).
|
||||||
SetBody(body).Post(SignIn)
|
SetBody(body).Post(SignIn)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -57,13 +73,30 @@ func (d *Pan123) login() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
//func authKey(reqUrl string) (*string, error) {
|
||||||
|
// reqURL, err := url.Parse(reqUrl)
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// nowUnix := time.Now().Unix()
|
||||||
|
// random := rand.Intn(0x989680)
|
||||||
|
//
|
||||||
|
// p4 := fmt.Sprintf("%d|%d|%s|%s|%s|%s", nowUnix, random, reqURL.Path, "web", "3", AuthKeySalt)
|
||||||
|
// authKey := fmt.Sprintf("%d-%d-%x", nowUnix, random, md5.Sum([]byte(p4)))
|
||||||
|
// return &authKey, nil
|
||||||
|
//}
|
||||||
|
|
||||||
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"origin": "https://www.123pan.com",
|
"origin": "https://www.123pan.com",
|
||||||
|
"referer": "https://www.123pan.com/",
|
||||||
"authorization": "Bearer " + d.AccessToken,
|
"authorization": "Bearer " + d.AccessToken,
|
||||||
"platform": "web",
|
"user-agent": "Dart/2.19(dart:io)",
|
||||||
"app-version": "1.2",
|
"platform": "android",
|
||||||
|
"app-version": "36",
|
||||||
|
//"user-agent": base.UserAgent,
|
||||||
})
|
})
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -71,6 +104,11 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
|||||||
if resp != nil {
|
if resp != nil {
|
||||||
req.SetResult(resp)
|
req.SetResult(resp)
|
||||||
}
|
}
|
||||||
|
//authKey, err := authKey(url)
|
||||||
|
//if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
//}
|
||||||
|
//req.SetQueryParam("auth-key", *authKey)
|
||||||
res, err := req.Execute(method, url)
|
res, err := req.Execute(method, url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -2,10 +2,12 @@ package _139
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -18,6 +20,7 @@ import (
|
|||||||
type Yun139 struct {
|
type Yun139 struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
Account string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Config() driver.Config {
|
func (d *Yun139) Config() driver.Config {
|
||||||
@ -29,7 +32,20 @@ func (d *Yun139) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *Yun139) Init(ctx context.Context) error {
|
func (d *Yun139) Init(ctx context.Context) error {
|
||||||
_, err := d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
if d.Authorization == "" {
|
||||||
|
return fmt.Errorf("authorization is empty")
|
||||||
|
}
|
||||||
|
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
decodeStr := string(decode)
|
||||||
|
splits := strings.Split(decodeStr, ":")
|
||||||
|
if len(splits) < 2 {
|
||||||
|
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||||
|
}
|
||||||
|
d.Account = splits[1]
|
||||||
|
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||||
"qryUserExternInfoReq": base.Json{
|
"qryUserExternInfoReq": base.Json{
|
||||||
"commonAccountInfo": base.Json{
|
"commonAccountInfo": base.Json{
|
||||||
"account": d.Account,
|
"account": d.Account,
|
||||||
@ -284,6 +300,9 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
|
|
||||||
var partSize = getPartSize(stream.GetSize())
|
var partSize = getPartSize(stream.GetSize())
|
||||||
part := (stream.GetSize() + partSize - 1) / partSize
|
part := (stream.GetSize() + partSize - 1) / partSize
|
||||||
|
if part == 0 {
|
||||||
|
part = 1
|
||||||
|
}
|
||||||
for i := int64(0); i < part; i++ {
|
for i := int64(0); i < part; i++ {
|
||||||
if utils.IsCanceled(ctx) {
|
if utils.IsCanceled(ctx) {
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
@ -315,13 +334,11 @@ func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
log.Debugf("%+v", res)
|
log.Debugf("%+v", res)
|
||||||
|
|
||||||
if res.StatusCode != http.StatusOK {
|
if res.StatusCode != http.StatusOK {
|
||||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||||
}
|
}
|
||||||
|
|
||||||
res.Body.Close()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -6,8 +6,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
Account string `json:"account" required:"true"`
|
//Account string `json:"account" required:"true"`
|
||||||
Cookie string `json:"cookie" type:"text" required:"true"`
|
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
||||||
CloudID string `json:"cloud_id"`
|
CloudID string `json:"cloud_id"`
|
||||||
|
@ -42,8 +42,8 @@ func calSign(body, ts, randStr string) string {
|
|||||||
sort.Strings(strs)
|
sort.Strings(strs)
|
||||||
body = strings.Join(strs, "")
|
body = strings.Join(strs, "")
|
||||||
body = base64.StdEncoding.EncodeToString([]byte(body))
|
body = base64.StdEncoding.EncodeToString([]byte(body))
|
||||||
res := utils.GetMD5Encode(body) + utils.GetMD5Encode(ts+":"+randStr)
|
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
|
||||||
res = strings.ToUpper(utils.GetMD5Encode(res))
|
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,7 +72,7 @@ func (d *Yun139) request(pathname string, method string, callback base.ReqCallba
|
|||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"CMS-DEVICE": "default",
|
"CMS-DEVICE": "default",
|
||||||
"Cookie": d.Cookie,
|
"Authorization": "Basic " + d.Authorization,
|
||||||
"mcloud-channel": "1000101",
|
"mcloud-channel": "1000101",
|
||||||
"mcloud-client": "10701",
|
"mcloud-client": "10701",
|
||||||
//"mcloud-route": "001",
|
//"mcloud-route": "001",
|
||||||
|
@ -385,7 +385,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
|
|||||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||||
sliceMd5 := fileMd5
|
sliceMd5 := fileMd5
|
||||||
if file.GetSize() > DEFAULT {
|
if file.GetSize() > DEFAULT {
|
||||||
sliceMd5 = utils.GetMD5Encode(strings.Join(md5s, "\n"))
|
sliceMd5 = utils.GetMD5EncodeStr(strings.Join(md5s, "\n"))
|
||||||
}
|
}
|
||||||
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
|
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
|
||||||
"uploadFileId": uploadFileId,
|
"uploadFileId": uploadFileId,
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -19,8 +18,7 @@ type Cloud189PC struct {
|
|||||||
|
|
||||||
identity string
|
identity string
|
||||||
|
|
||||||
client *resty.Client
|
client *resty.Client
|
||||||
putClient *resty.Client
|
|
||||||
|
|
||||||
loginParam *LoginParam
|
loginParam *LoginParam
|
||||||
tokenInfo *AppSessionResp
|
tokenInfo *AppSessionResp
|
||||||
@ -51,12 +49,9 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
|||||||
"Referer": WEB_URL,
|
"Referer": WEB_URL,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
if y.putClient == nil {
|
|
||||||
y.putClient = base.NewRestyClient().SetTimeout(120 * time.Second)
|
|
||||||
}
|
|
||||||
|
|
||||||
// 避免重复登陆
|
// 避免重复登陆
|
||||||
identity := utils.GetMD5Encode(y.Username + y.Password)
|
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
|
||||||
if !y.isLogin() || y.identity != identity {
|
if !y.isLogin() || y.identity != identity {
|
||||||
y.identity = identity
|
y.identity = identity
|
||||||
if err = y.login(); err != nil {
|
if err = y.login(); err != nil {
|
||||||
@ -266,8 +261,14 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
if y.RapidUpload {
|
switch y.UploadMethod {
|
||||||
|
case "stream":
|
||||||
|
return y.CommonUpload(ctx, dstDir, stream, up)
|
||||||
|
case "old":
|
||||||
|
return y.OldUpload(ctx, dstDir, stream, up)
|
||||||
|
case "rapid":
|
||||||
return y.FastUpload(ctx, dstDir, stream, up)
|
return y.FastUpload(ctx, dstDir, stream, up)
|
||||||
|
default:
|
||||||
|
return y.CommonUpload(ctx, dstDir, stream, up)
|
||||||
}
|
}
|
||||||
return y.CommonUpload(ctx, dstDir, stream, up)
|
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
"encoding/pem"
|
"encoding/pem"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"math"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
@ -131,3 +132,18 @@ func BoolToNumber(b bool) int {
|
|||||||
}
|
}
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 计算分片大小
|
||||||
|
// 对分片数量有限制
|
||||||
|
// 10MIB 20 MIB 999片
|
||||||
|
// 50MIB 60MIB 70MIB 80MIB ∞MIB 1999片
|
||||||
|
func partSize(size int64) int64 {
|
||||||
|
const DEFAULT = 1024 * 1024 * 10 // 10MIB
|
||||||
|
if size > DEFAULT*2*999 {
|
||||||
|
return int64(math.Max(math.Ceil((float64(size)/1999) /*=单个切片大小*/ /float64(DEFAULT)) /*=倍率*/, 5) * DEFAULT)
|
||||||
|
}
|
||||||
|
if size > DEFAULT*999 {
|
||||||
|
return DEFAULT * 2 // 20MIB
|
||||||
|
}
|
||||||
|
return DEFAULT
|
||||||
|
}
|
||||||
|
@ -14,13 +14,14 @@ type Addition struct {
|
|||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
||||||
FamilyID string `json:"family_id"`
|
FamilyID string `json:"family_id"`
|
||||||
RapidUpload bool `json:"rapid_upload"`
|
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
||||||
NoUseOcr bool `json:"no_use_ocr"`
|
NoUseOcr bool `json:"no_use_ocr"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
Name: "189CloudPC",
|
Name: "189CloudPC",
|
||||||
DefaultRoot: "-11",
|
DefaultRoot: "-11",
|
||||||
|
CheckStatus: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
|
@ -10,20 +10,62 @@ import (
|
|||||||
|
|
||||||
// 居然有四种返回方式
|
// 居然有四种返回方式
|
||||||
type RespErr struct {
|
type RespErr struct {
|
||||||
ResCode string `json:"res_code"`
|
ResCode any `json:"res_code"` // int or string
|
||||||
ResMessage string `json:"res_message"`
|
ResMessage string `json:"res_message"`
|
||||||
|
|
||||||
|
Error_ string `json:"error"`
|
||||||
|
|
||||||
XMLName xml.Name `xml:"error"`
|
XMLName xml.Name `xml:"error"`
|
||||||
Code string `json:"code" xml:"code"`
|
Code string `json:"code" xml:"code"`
|
||||||
Message string `json:"message" xml:"message"`
|
Message string `json:"message" xml:"message"`
|
||||||
|
Msg string `json:"msg"`
|
||||||
// Code string `json:"code"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
|
|
||||||
ErrorCode string `json:"errorCode"`
|
ErrorCode string `json:"errorCode"`
|
||||||
ErrorMsg string `json:"errorMsg"`
|
ErrorMsg string `json:"errorMsg"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (e *RespErr) HasError() bool {
|
||||||
|
switch v := e.ResCode.(type) {
|
||||||
|
case int, int64, int32:
|
||||||
|
return v != 0
|
||||||
|
case string:
|
||||||
|
return e.ResCode != ""
|
||||||
|
}
|
||||||
|
return (e.Code != "" && e.Code != "SUCCESS") || e.ErrorCode != "" || e.Error_ != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *RespErr) Error() string {
|
||||||
|
switch v := e.ResCode.(type) {
|
||||||
|
case int, int64, int32:
|
||||||
|
if v != 0 {
|
||||||
|
return fmt.Sprintf("res_code: %d ,res_msg: %s", v, e.ResMessage)
|
||||||
|
}
|
||||||
|
case string:
|
||||||
|
if e.ResCode != "" {
|
||||||
|
return fmt.Sprintf("res_code: %s ,res_msg: %s", e.ResCode, e.ResMessage)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.Code != "" && e.Code != "SUCCESS" {
|
||||||
|
if e.Msg != "" {
|
||||||
|
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Msg)
|
||||||
|
}
|
||||||
|
if e.Message != "" {
|
||||||
|
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Message)
|
||||||
|
}
|
||||||
|
return "code: " + e.Code
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.ErrorCode != "" {
|
||||||
|
return fmt.Sprintf("err_code: %s ,err_msg: %s", e.ErrorCode, e.ErrorMsg)
|
||||||
|
}
|
||||||
|
|
||||||
|
if e.Error_ != "" {
|
||||||
|
return fmt.Sprintf("error: %s ,message: %s", e.ErrorCode, e.Message)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
// 登陆需要的参数
|
// 登陆需要的参数
|
||||||
type LoginParam struct {
|
type LoginParam struct {
|
||||||
// 加密后的用户名和密码
|
// 加密后的用户名和密码
|
||||||
@ -218,6 +260,42 @@ type Part struct {
|
|||||||
RequestHeader string `json:"requestHeader"`
|
RequestHeader string `json:"requestHeader"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* 第二种上传方式 */
|
||||||
|
type CreateUploadFileResp struct {
|
||||||
|
// 上传文件请求ID
|
||||||
|
UploadFileId int64 `json:"uploadFileId"`
|
||||||
|
// 上传文件数据的URL路径
|
||||||
|
FileUploadUrl string `json:"fileUploadUrl"`
|
||||||
|
// 上传文件完成后确认路径
|
||||||
|
FileCommitUrl string `json:"fileCommitUrl"`
|
||||||
|
// 文件是否已存在云盘中,0-未存在,1-已存在
|
||||||
|
FileDataExists int `json:"fileDataExists"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GetUploadFileStatusResp struct {
|
||||||
|
CreateUploadFileResp
|
||||||
|
|
||||||
|
// 已上传的大小
|
||||||
|
DataSize int64 `json:"dataSize"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *GetUploadFileStatusResp) GetSize() int64 {
|
||||||
|
return r.DataSize + r.Size
|
||||||
|
}
|
||||||
|
|
||||||
|
type CommitUploadFileResp struct {
|
||||||
|
XMLName xml.Name `xml:"file"`
|
||||||
|
Id string `xml:"id"`
|
||||||
|
Name string `xml:"name"`
|
||||||
|
Size string `xml:"size"`
|
||||||
|
Md5 string `xml:"md5"`
|
||||||
|
CreateDate string `xml:"createDate"`
|
||||||
|
Rev string `xml:"rev"`
|
||||||
|
UserId string `xml:"userId"`
|
||||||
|
}
|
||||||
|
|
||||||
|
/* query 加密参数*/
|
||||||
type Params map[string]string
|
type Params map[string]string
|
||||||
|
|
||||||
func (p Params) Set(k, v string) {
|
func (p Params) Set(k, v string) {
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
|
"encoding/xml"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"math"
|
"math"
|
||||||
@ -15,6 +16,7 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
@ -23,9 +25,12 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/internal/setting"
|
"github.com/alist-org/alist/v3/internal/setting"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
|
||||||
|
"github.com/avast/retry-go"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
jsoniter "github.com/json-iterator/go"
|
jsoniter "github.com/json-iterator/go"
|
||||||
|
"github.com/pkg/errors"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -47,7 +52,7 @@ const (
|
|||||||
CHANNEL_ID = "web_cloud.189.cn"
|
CHANNEL_ID = "web_cloud.189.cn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
|
||||||
dateOfGmt := getHttpDateStr()
|
dateOfGmt := getHttpDateStr()
|
||||||
sessionKey := y.tokenInfo.SessionKey
|
sessionKey := y.tokenInfo.SessionKey
|
||||||
sessionSecret := y.tokenInfo.SessionSecret
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
@ -56,19 +61,40 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
|||||||
sessionSecret = y.tokenInfo.FamilySessionSecret
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
}
|
}
|
||||||
|
|
||||||
req := y.client.R().SetQueryParams(clientSuffix()).SetHeaders(map[string]string{
|
header := map[string]string{
|
||||||
"Date": dateOfGmt,
|
"Date": dateOfGmt,
|
||||||
"SessionKey": sessionKey,
|
"SessionKey": sessionKey,
|
||||||
"X-Request-ID": uuid.NewString(),
|
"X-Request-ID": uuid.NewString(),
|
||||||
})
|
"Signature": signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, params),
|
||||||
|
}
|
||||||
|
return header
|
||||||
|
}
|
||||||
|
|
||||||
|
func (y *Cloud189PC) EncryptParams(params Params) string {
|
||||||
|
sessionSecret := y.tokenInfo.SessionSecret
|
||||||
|
if y.isFamily() {
|
||||||
|
sessionSecret = y.tokenInfo.FamilySessionSecret
|
||||||
|
}
|
||||||
|
if params != nil {
|
||||||
|
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
|
||||||
|
req := y.client.R().SetQueryParams(clientSuffix())
|
||||||
|
|
||||||
// 设置params
|
// 设置params
|
||||||
var paramsData string
|
paramsData := y.EncryptParams(params)
|
||||||
if params != nil {
|
if paramsData != "" {
|
||||||
paramsData = AesECBEncrypt(params.Encode(), sessionSecret[:16])
|
|
||||||
req.SetQueryParam("params", paramsData)
|
req.SetQueryParam("params", paramsData)
|
||||||
}
|
}
|
||||||
req.SetHeader("Signature", signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, paramsData))
|
|
||||||
|
// Signature
|
||||||
|
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
|
||||||
|
|
||||||
|
var erron RespErr
|
||||||
|
req.SetError(&erron)
|
||||||
|
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -80,32 +106,6 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
var erron RespErr
|
|
||||||
utils.Json.Unmarshal(res.Body(), &erron)
|
|
||||||
|
|
||||||
if erron.ResCode != "" {
|
|
||||||
return nil, fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
|
|
||||||
}
|
|
||||||
if erron.Code != "" && erron.Code != "SUCCESS" {
|
|
||||||
if erron.Msg != "" {
|
|
||||||
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Msg)
|
|
||||||
}
|
|
||||||
if erron.Message != "" {
|
|
||||||
return nil, fmt.Errorf("code: %s ,msg: %s", erron.Code, erron.Message)
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf(res.String())
|
|
||||||
}
|
|
||||||
switch erron.ErrorCode {
|
|
||||||
case "":
|
|
||||||
break
|
|
||||||
case "InvalidSessionKey":
|
|
||||||
if err = y.refreshSession(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return y.request(url, method, callback, params, resp)
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("err_code: %s ,err_msg: %s", erron.ErrorCode, erron.ErrorMsg)
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.Contains(res.String(), "userSessionBO is null") {
|
if strings.Contains(res.String(), "userSessionBO is null") {
|
||||||
if err = y.refreshSession(); err != nil {
|
if err = y.refreshSession(); err != nil {
|
||||||
@ -114,14 +114,17 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
|
|||||||
return y.request(url, method, callback, params, resp)
|
return y.request(url, method, callback, params, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
resCode := utils.Json.Get(res.Body(), "res_code").ToInt64()
|
// 处理错误
|
||||||
message := utils.Json.Get(res.Body(), "res_message").ToString()
|
if erron.HasError() {
|
||||||
switch resCode {
|
if erron.ErrorCode == "InvalidSessionKey" {
|
||||||
case 0:
|
if err = y.refreshSession(); err != nil {
|
||||||
return res.Body(), nil
|
return nil, err
|
||||||
default:
|
}
|
||||||
return nil, fmt.Errorf("res_code: %d ,res_msg: %s", resCode, message)
|
return y.request(url, method, callback, params, resp)
|
||||||
|
}
|
||||||
|
return nil, &erron
|
||||||
}
|
}
|
||||||
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
@ -132,6 +135,50 @@ func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{
|
|||||||
return y.request(url, http.MethodPost, callback, nil, resp)
|
return y.request(url, http.MethodPost, callback, nil, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
|
||||||
|
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
query := req.URL.Query()
|
||||||
|
for key, value := range clientSuffix() {
|
||||||
|
query.Add(key, value)
|
||||||
|
}
|
||||||
|
req.URL.RawQuery = query.Encode()
|
||||||
|
|
||||||
|
for key, value := range headers {
|
||||||
|
req.Header.Add(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if sign {
|
||||||
|
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
|
||||||
|
req.Header.Add(key, value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
body, err := io.ReadAll(resp.Body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var erron RespErr
|
||||||
|
jsoniter.Unmarshal(body, &erron)
|
||||||
|
xml.Unmarshal(body, &erron)
|
||||||
|
if erron.HasError() {
|
||||||
|
return nil, &erron
|
||||||
|
}
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, errors.Errorf("put fail,err:%s", string(body))
|
||||||
|
}
|
||||||
|
return body, nil
|
||||||
|
}
|
||||||
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
|
||||||
fullUrl := API_URL
|
fullUrl := API_URL
|
||||||
if y.isFamily() {
|
if y.isFamily() {
|
||||||
@ -186,7 +233,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj,
|
|||||||
|
|
||||||
func (y *Cloud189PC) login() (err error) {
|
func (y *Cloud189PC) login() (err error) {
|
||||||
// 初始化登陆所需参数
|
// 初始化登陆所需参数
|
||||||
if y.loginParam == nil || !y.NoUseOcr {
|
if y.loginParam == nil {
|
||||||
if err = y.initLoginParam(); err != nil {
|
if err = y.initLoginParam(); err != nil {
|
||||||
// 验证码也通过错误返回
|
// 验证码也通过错误返回
|
||||||
return err
|
return err
|
||||||
@ -197,7 +244,7 @@ func (y *Cloud189PC) login() (err error) {
|
|||||||
y.VCode = ""
|
y.VCode = ""
|
||||||
// 销毁登陆参数
|
// 销毁登陆参数
|
||||||
y.loginParam = nil
|
y.loginParam = nil
|
||||||
// 遇到错误,重新加载登陆参数
|
// 遇到错误,重新加载登陆参数(刷新验证码)
|
||||||
if err != nil && y.NoUseOcr {
|
if err != nil && y.NoUseOcr {
|
||||||
if err1 := y.initLoginParam(); err1 != nil {
|
if err1 := y.initLoginParam(); err1 != nil {
|
||||||
err = fmt.Errorf("err1: %s \nerr2: %s", err, err1)
|
err = fmt.Errorf("err1: %s \nerr2: %s", err, err1)
|
||||||
@ -249,9 +296,8 @@ func (y *Cloud189PC) login() (err error) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if erron.ResCode != "" {
|
if erron.HasError() {
|
||||||
err = fmt.Errorf(erron.ResMessage)
|
return &erron
|
||||||
return
|
|
||||||
}
|
}
|
||||||
if tokenInfo.ResCode != 0 {
|
if tokenInfo.ResCode != 0 {
|
||||||
err = fmt.Errorf(tokenInfo.ResMessage)
|
err = fmt.Errorf(tokenInfo.ResMessage)
|
||||||
@ -304,6 +350,22 @@ func (y *Cloud189PC) initLoginParam() error {
|
|||||||
param.RsaPassword = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Password)
|
param.RsaPassword = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Password)
|
||||||
y.loginParam = ¶m
|
y.loginParam = ¶m
|
||||||
|
|
||||||
|
// 判断是否需要验证码
|
||||||
|
resp, err := y.client.R().
|
||||||
|
SetHeader("REQID", param.ReqId).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"appKey": APP_ID,
|
||||||
|
"accountType": ACCOUNT_TYPE,
|
||||||
|
"userName": param.RsaUsername,
|
||||||
|
}).Post(AUTH_URL + "/api/logbox/oauth2/needcaptcha.do")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.String() == "0" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// 拉取验证码
|
||||||
imgRes, err := y.client.R().
|
imgRes, err := y.client.R().
|
||||||
SetQueryParams(map[string]string{
|
SetQueryParams(map[string]string{
|
||||||
"token": param.CaptchaToken,
|
"token": param.CaptchaToken,
|
||||||
@ -359,33 +421,23 @@ func (y *Cloud189PC) refreshSession() (err error) {
|
|||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
|
||||||
switch erron.ResCode {
|
if erron.HasError() {
|
||||||
case "":
|
if erron.ResCode == "UserInvalidOpenToken" {
|
||||||
break
|
if err = y.login(); err != nil {
|
||||||
case "UserInvalidOpenToken":
|
return err
|
||||||
if err = y.login(); err != nil {
|
}
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
default:
|
return &erron
|
||||||
err = fmt.Errorf("res_code: %s ,res_msg: %s", erron.ResCode, erron.ResMessage)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
switch userSessionResp.ResCode {
|
|
||||||
case 0:
|
|
||||||
y.tokenInfo.UserSessionResp = userSessionResp
|
|
||||||
default:
|
|
||||||
err = fmt.Errorf("code: %d , msg: %s", userSessionResp.ResCode, userSessionResp.ResMessage)
|
|
||||||
}
|
}
|
||||||
|
y.tokenInfo.UserSessionResp = userSessionResp
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// 普通上传
|
// 普通上传
|
||||||
func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||||
const DEFAULT int64 = 10485760
|
var DEFAULT = partSize(file.GetSize())
|
||||||
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
var count = int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||||
|
|
||||||
requestID := uuid.NewString()
|
|
||||||
params := Params{
|
params := Params{
|
||||||
"parentFolderId": dstDir.GetID(),
|
"parentFolderId": dstDir.GetID(),
|
||||||
"fileName": url.QueryEscape(file.GetName()),
|
"fileName": url.QueryEscape(file.GetName()),
|
||||||
@ -407,7 +459,6 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
var initMultiUpload InitMultiUploadResp
|
var initMultiUpload InitMultiUploadResp
|
||||||
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, params, &initMultiUpload)
|
}, params, &initMultiUpload)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -417,7 +468,7 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
silceMd5 := md5.New()
|
silceMd5 := md5.New()
|
||||||
silceMd5Hexs := make([]string, 0, count)
|
silceMd5Hexs := make([]string, 0, count)
|
||||||
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
byteData := bytes.NewBuffer(make([]byte, DEFAULT))
|
||||||
for i := int64(1); i <= count; i++ {
|
for i := 1; i <= count; i++ {
|
||||||
if utils.IsCanceled(ctx) {
|
if utils.IsCanceled(ctx) {
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
}
|
}
|
||||||
@ -440,7 +491,6 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
|
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
|
||||||
func(req *resty.Request) {
|
func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, Params{
|
}, Params{
|
||||||
"partInfo": fmt.Sprintf("%d-%s", i, silceMd5Base64),
|
"partInfo": fmt.Sprintf("%d-%s", i, silceMd5Base64),
|
||||||
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
||||||
@ -451,32 +501,31 @@ func (y *Cloud189PC) CommonUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
|
|
||||||
// 开始上传
|
// 开始上传
|
||||||
uploadData := uploadUrl.UploadUrls[fmt.Sprint("partNumber_", i)]
|
uploadData := uploadUrl.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||||
res, err := y.putClient.R().
|
|
||||||
SetContext(ctx).
|
err = retry.Do(func() error {
|
||||||
SetQueryParams(clientSuffix()).
|
_, err := y.put(ctx, uploadData.RequestURL, ParseHttpHeader(uploadData.RequestHeader), false, bytes.NewReader(byteData.Bytes()))
|
||||||
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
|
return err
|
||||||
SetBody(byteData).
|
},
|
||||||
Put(uploadData.RequestURL)
|
retry.Context(ctx),
|
||||||
|
retry.Attempts(3),
|
||||||
|
retry.Delay(time.Second),
|
||||||
|
retry.MaxDelay(5*time.Second))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if res.StatusCode() != http.StatusOK {
|
|
||||||
return fmt.Errorf("updload fail,msg: %s", res.String())
|
|
||||||
}
|
|
||||||
up(int(i * 100 / count))
|
up(int(i * 100 / count))
|
||||||
}
|
}
|
||||||
|
|
||||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||||
sliceMd5Hex := fileMd5Hex
|
sliceMd5Hex := fileMd5Hex
|
||||||
if file.GetSize() > DEFAULT {
|
if file.GetSize() > DEFAULT {
|
||||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||||
}
|
}
|
||||||
|
|
||||||
// 提交上传
|
// 提交上传
|
||||||
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
||||||
func(req *resty.Request) {
|
func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
"uploadFileId": initMultiUpload.Data.UploadFileID,
|
||||||
"fileMd5": fileMd5Hex,
|
"fileMd5": fileMd5Hex,
|
||||||
@ -500,7 +549,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
_ = os.Remove(tempFile.Name())
|
_ = os.Remove(tempFile.Name())
|
||||||
}()
|
}()
|
||||||
|
|
||||||
const DEFAULT int64 = 10485760
|
var DEFAULT = partSize(file.GetSize())
|
||||||
count := int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
count := int(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
|
||||||
|
|
||||||
// 优先计算所需信息
|
// 优先计算所需信息
|
||||||
@ -528,10 +577,9 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||||
sliceMd5Hex := fileMd5Hex
|
sliceMd5Hex := fileMd5Hex
|
||||||
if file.GetSize() > DEFAULT {
|
if file.GetSize() > DEFAULT {
|
||||||
sliceMd5Hex = strings.ToUpper(utils.GetMD5Encode(strings.Join(silceMd5Hexs, "\n")))
|
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
|
||||||
}
|
}
|
||||||
|
|
||||||
requestID := uuid.NewString()
|
|
||||||
// 检测是否支持快传
|
// 检测是否支持快传
|
||||||
params := Params{
|
params := Params{
|
||||||
"parentFolderId": dstDir.GetID(),
|
"parentFolderId": dstDir.GetID(),
|
||||||
@ -554,7 +602,6 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
var uploadInfo InitMultiUploadResp
|
var uploadInfo InitMultiUploadResp
|
||||||
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, params, &uploadInfo)
|
}, params, &uploadInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -566,7 +613,6 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
|
_, err = y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
|
||||||
func(req *resty.Request) {
|
func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||||
"partInfo": strings.Join(silceMd5Base64s, ","),
|
"partInfo": strings.Join(silceMd5Base64s, ","),
|
||||||
@ -575,26 +621,29 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
buf := make([]byte, DEFAULT)
|
||||||
for i := 1; i <= count; i++ {
|
for i := 1; i <= count; i++ {
|
||||||
select {
|
if utils.IsCanceled(ctx) {
|
||||||
case <-ctx.Done():
|
|
||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
default:
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
n, err := io.ReadFull(tempFile, buf)
|
||||||
|
if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||||
|
return err
|
||||||
|
}
|
||||||
uploadData := uploadUrls.UploadUrls[fmt.Sprint("partNumber_", i)]
|
uploadData := uploadUrls.UploadUrls[fmt.Sprint("partNumber_", i)]
|
||||||
res, err := y.putClient.R().
|
err = retry.Do(func() error {
|
||||||
SetContext(ctx).
|
_, err := y.put(ctx, uploadData.RequestURL, ParseHttpHeader(uploadData.RequestHeader), false, bytes.NewReader(buf[:n]))
|
||||||
SetQueryParams(clientSuffix()).
|
return err
|
||||||
SetHeaders(ParseHttpHeader(uploadData.RequestHeader)).
|
},
|
||||||
SetBody(io.LimitReader(tempFile, DEFAULT)).
|
retry.Context(ctx),
|
||||||
Put(uploadData.RequestURL)
|
retry.Attempts(3),
|
||||||
|
retry.Delay(time.Second),
|
||||||
|
retry.MaxDelay(5*time.Second))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if res.StatusCode() != http.StatusOK {
|
|
||||||
return fmt.Errorf("updload fail,msg: %s", res.String())
|
|
||||||
}
|
|
||||||
up(int(i * 100 / count))
|
up(int(i * 100 / count))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -603,7 +652,6 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
|
||||||
func(req *resty.Request) {
|
func(req *resty.Request) {
|
||||||
req.SetContext(ctx)
|
req.SetContext(ctx)
|
||||||
req.SetHeader("X-Request-ID", requestID)
|
|
||||||
}, Params{
|
}, Params{
|
||||||
"uploadFileId": uploadInfo.Data.UploadFileID,
|
"uploadFileId": uploadInfo.Data.UploadFileID,
|
||||||
"isLog": "0",
|
"isLog": "0",
|
||||||
@ -612,6 +660,137 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (err error) {
|
||||||
|
// 需要获取完整文件md5,必须支持 io.Seek
|
||||||
|
tempFile, err := utils.CreateTempFile(file.GetReadCloser())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = tempFile.Close()
|
||||||
|
_ = os.Remove(tempFile.Name())
|
||||||
|
}()
|
||||||
|
|
||||||
|
// 计算md5
|
||||||
|
fileMd5 := md5.New()
|
||||||
|
if _, err := io.Copy(fileMd5, tempFile); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||||
|
|
||||||
|
// 创建上传会话
|
||||||
|
var uploadInfo CreateUploadFileResp
|
||||||
|
|
||||||
|
fullUrl := API_URL + "/createUploadFile.action"
|
||||||
|
if y.isFamily() {
|
||||||
|
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||||
|
}
|
||||||
|
_, err = y.post(fullUrl, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
if y.isFamily() {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"familyId": y.FamilyID,
|
||||||
|
"fileMd5": fileMd5Hex,
|
||||||
|
"fileName": file.GetName(),
|
||||||
|
"fileSize": fmt.Sprint(file.GetSize()),
|
||||||
|
"parentId": dstDir.GetID(),
|
||||||
|
"resumePolicy": "1",
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"parentFolderId": dstDir.GetID(),
|
||||||
|
"fileName": file.GetName(),
|
||||||
|
"size": fmt.Sprint(file.GetSize()),
|
||||||
|
"md5": fileMd5Hex,
|
||||||
|
"opertype": "3",
|
||||||
|
"flag": "1",
|
||||||
|
"resumePolicy": "1",
|
||||||
|
"isLog": "0",
|
||||||
|
// "baseFileId": "",
|
||||||
|
// "lastWrite":"",
|
||||||
|
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
|
||||||
|
// "fileExt": "",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, &uploadInfo)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 网盘中不存在该文件,开始上传
|
||||||
|
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
|
||||||
|
for status.Size < file.GetSize() && status.FileDataExists != 1 {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
header := map[string]string{
|
||||||
|
"ResumePolicy": "1",
|
||||||
|
"Expect": "100-continue",
|
||||||
|
}
|
||||||
|
|
||||||
|
if y.isFamily() {
|
||||||
|
header["FamilyId"] = fmt.Sprint(y.FamilyID)
|
||||||
|
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
|
} else {
|
||||||
|
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
|
||||||
|
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 获取断点状态
|
||||||
|
fullUrl := API_URL + "/getUploadFileStatus.action"
|
||||||
|
if y.isFamily() {
|
||||||
|
fullUrl = API_URL + "/family/file/getFamilyFileStatus.action"
|
||||||
|
}
|
||||||
|
_, err = y.get(fullUrl, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetQueryParams(map[string]string{
|
||||||
|
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||||
|
"resumePolicy": "1",
|
||||||
|
})
|
||||||
|
if y.isFamily() {
|
||||||
|
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
|
||||||
|
}
|
||||||
|
}, &status)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
up(int(status.Size / file.GetSize()))
|
||||||
|
}
|
||||||
|
|
||||||
|
// 提交
|
||||||
|
var resp CommitUploadFileResp
|
||||||
|
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
if y.isFamily() {
|
||||||
|
req.SetHeaders(map[string]string{
|
||||||
|
"ResumePolicy": "1",
|
||||||
|
"UploadFileId": fmt.Sprint(status.UploadFileId),
|
||||||
|
"FamilyId": fmt.Sprint(y.FamilyID),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
req.SetFormData(map[string]string{
|
||||||
|
"opertype": "3",
|
||||||
|
"resumePolicy": "1",
|
||||||
|
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||||
|
"isLog": "0",
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}, &resp)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
func (y *Cloud189PC) isFamily() bool {
|
func (y *Cloud189PC) isFamily() bool {
|
||||||
return y.Type == "family"
|
return y.Type == "family"
|
||||||
}
|
}
|
||||||
|
@ -103,7 +103,8 @@ func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs)
|
|||||||
}
|
}
|
||||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: fmt.Sprintf("/p%s?sign=%s",
|
URL: fmt.Sprintf("%s/p%s?sign=%s",
|
||||||
|
common.GetApiUrl(args.HttpReq),
|
||||||
utils.EncodePath(reqPath, true),
|
utils.EncodePath(reqPath, true),
|
||||||
sign.Sign(reqPath)),
|
sign.Sign(reqPath)),
|
||||||
}, nil
|
}, nil
|
||||||
|
@ -175,6 +175,7 @@ func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
|||||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||||
SetHeader("Password", d.MetaPassword).
|
SetHeader("Password", d.MetaPassword).
|
||||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||||
|
SetContentLength(true).
|
||||||
SetBody(stream.GetReadCloser())
|
SetBody(stream.GetReadCloser())
|
||||||
})
|
})
|
||||||
return err
|
return err
|
||||||
|
@ -9,6 +9,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/alist-org/alist/v3/server/common"
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (d *AListV3) login() error {
|
func (d *AListV3) login() error {
|
||||||
@ -38,6 +39,7 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||||
if res.StatusCode() >= 400 {
|
if res.StatusCode() >= 400 {
|
||||||
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||||
}
|
}
|
||||||
|
@ -67,7 +67,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
// init deviceID
|
// init deviceID
|
||||||
deviceID := utils.GetSHA256Encode(d.UserID)
|
deviceID := utils.GetSHA256Encode([]byte(d.UserID))
|
||||||
// init privateKey
|
// init privateKey
|
||||||
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
||||||
state := State{
|
state := State{
|
||||||
@ -193,7 +193,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
|||||||
if d.RapidUpload {
|
if d.RapidUpload {
|
||||||
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||||
io.CopyN(buf, file, 1024)
|
io.CopyN(buf, file, 1024)
|
||||||
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.String())
|
reqBody["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||||
if localFile != nil {
|
if localFile != nil {
|
||||||
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -259,7 +259,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
|||||||
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
|
||||||
*/
|
*/
|
||||||
buf := make([]byte, 8)
|
buf := make([]byte, 8)
|
||||||
r, _ := new(big.Int).SetString(utils.GetMD5Encode(d.AccessToken)[:16], 16)
|
r, _ := new(big.Int).SetString(utils.GetMD5EncodeStr(d.AccessToken)[:16], 16)
|
||||||
i := new(big.Int).SetInt64(file.GetSize())
|
i := new(big.Int).SetInt64(file.GetSize())
|
||||||
o := new(big.Int).SetInt64(0)
|
o := new(big.Int).SetInt64(0)
|
||||||
if file.GetSize() > 0 {
|
if file.GetSize() > 0 {
|
||||||
|
@ -2,11 +2,11 @@ package aliyundrive_open
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"io"
|
"fmt"
|
||||||
"math"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/Xhofe/rateg"
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -22,7 +22,7 @@ type AliyundriveOpen struct {
|
|||||||
|
|
||||||
DriveId string
|
DriveId string
|
||||||
|
|
||||||
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
|
limitList func(ctx context.Context, data base.Json) (*Files, error)
|
||||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,8 +40,14 @@ func (d *AliyundriveOpen) Init(ctx context.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
|
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
|
||||||
d.limitList = utils.LimitRateCtx(d.list, time.Second/4)
|
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
|
||||||
d.limitLink = utils.LimitRateCtx(d.link, time.Second)
|
Limit: 4,
|
||||||
|
Bucket: 1,
|
||||||
|
})
|
||||||
|
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
|
||||||
|
Limit: 1,
|
||||||
|
Bucket: 1,
|
||||||
|
})
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -49,8 +55,11 @@ func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
files, err := d.getFiles(dir.GetID())
|
if d.limitList == nil {
|
||||||
|
return nil, fmt.Errorf("driver not init")
|
||||||
|
}
|
||||||
|
files, err := d.getFiles(ctx, dir.GetID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -59,10 +68,6 @@ func (d *AliyundriveOpen) list(ctx context.Context, dir model.Obj) ([]model.Obj,
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
return d.limitList(ctx, dir)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||||
res, err := d.request("/adrive/v1.0/openFile/getDownloadUrl", http.MethodPost, func(req *resty.Request) {
|
res, err := d.request("/adrive/v1.0/openFile/getDownloadUrl", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(base.Json{
|
req.SetBody(base.Json{
|
||||||
@ -83,6 +88,9 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if d.limitLink == nil {
|
||||||
|
return nil, fmt.Errorf("driver not init")
|
||||||
|
}
|
||||||
return d.limitLink(ctx, file)
|
return d.limitLink(ctx, file)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,59 +158,7 @@ func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
// rapid_upload is not currently supported
|
return d.upload(ctx, dstDir, stream, up)
|
||||||
// 1. create
|
|
||||||
const DEFAULT int64 = 20971520
|
|
||||||
createData := base.Json{
|
|
||||||
"drive_id": d.DriveId,
|
|
||||||
"parent_file_id": dstDir.GetID(),
|
|
||||||
"name": stream.GetName(),
|
|
||||||
"type": "file",
|
|
||||||
"check_name_mode": "ignore",
|
|
||||||
}
|
|
||||||
count := 1
|
|
||||||
if stream.GetSize() > DEFAULT {
|
|
||||||
count = int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
|
|
||||||
createData["part_info_list"] = makePartInfos(count)
|
|
||||||
}
|
|
||||||
var createResp CreateResp
|
|
||||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetBody(createData).SetResult(&createResp)
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// 2. upload
|
|
||||||
preTime := time.Now()
|
|
||||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
|
||||||
if utils.IsCanceled(ctx) {
|
|
||||||
return ctx.Err()
|
|
||||||
}
|
|
||||||
err = d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, DEFAULT)), &createResp, true)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if count > 0 {
|
|
||||||
up(i * 100 / count)
|
|
||||||
}
|
|
||||||
// refresh upload url if 50 minutes passed
|
|
||||||
if time.Since(preTime) > 50*time.Minute {
|
|
||||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
preTime = time.Now()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// 3. complete
|
|
||||||
_, err = d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetBody(base.Json{
|
|
||||||
"drive_id": d.DriveId,
|
|
||||||
"file_id": createResp.FileId,
|
|
||||||
"upload_id": createResp.UploadId,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
@ -10,10 +10,11 @@ type Addition struct {
|
|||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.nn.ci/alist/ali_open/token"`
|
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
|
||||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||||
|
RapidUpload bool `json:"rapid_upload" help:"If you enable this option, the file will be uploaded to the server first, so the progress will be incorrect"`
|
||||||
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
||||||
AccessToken string
|
AccessToken string
|
||||||
}
|
}
|
||||||
|
268
drivers/aliyundrive_open/upload.go
Normal file
268
drivers/aliyundrive_open/upload.go
Normal file
@ -0,0 +1,268 @@
|
|||||||
|
package aliyundrive_open
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
func makePartInfos(size int) []base.Json {
|
||||||
|
partInfoList := make([]base.Json, size)
|
||||||
|
for i := 0; i < size; i++ {
|
||||||
|
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||||
|
}
|
||||||
|
return partInfoList
|
||||||
|
}
|
||||||
|
|
||||||
|
func calPartSize(fileSize int64) int64 {
|
||||||
|
var partSize int64 = 20 * 1024 * 1024
|
||||||
|
if fileSize > partSize {
|
||||||
|
if fileSize > 1*1024*1024*1024*1024 { // file Size over 1TB
|
||||||
|
partSize = 5 * 1024 * 1024 * 1024 // file part size 5GB
|
||||||
|
} else if fileSize > 768*1024*1024*1024 { // over 768GB
|
||||||
|
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
|
||||||
|
} else if fileSize > 512*1024*1024*1024 { // over 512GB
|
||||||
|
partSize = 82463373 // ≈ 78.6432MB
|
||||||
|
} else if fileSize > 384*1024*1024*1024 { // over 384GB
|
||||||
|
partSize = 54975582 // ≈ 52.4288MB
|
||||||
|
} else if fileSize > 256*1024*1024*1024 { // over 256GB
|
||||||
|
partSize = 41231687 // ≈ 39.3216MB
|
||||||
|
} else if fileSize > 128*1024*1024*1024 { // over 128GB
|
||||||
|
partSize = 27487791 // ≈ 26.2144MB
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return partSize
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||||
|
partInfoList := makePartInfos(count)
|
||||||
|
var resp CreateResp
|
||||||
|
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"drive_id": d.DriveId,
|
||||||
|
"file_id": fileId,
|
||||||
|
"part_info_list": partInfoList,
|
||||||
|
"upload_id": uploadId,
|
||||||
|
}).SetResult(&resp)
|
||||||
|
})
|
||||||
|
return resp.PartInfoList, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||||
|
partInfo := resp.PartInfoList[i-1]
|
||||||
|
uploadUrl := partInfo.UploadUrl
|
||||||
|
if d.InternalUpload {
|
||||||
|
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||||
|
}
|
||||||
|
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
res, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
if retry {
|
||||||
|
reader.Reset()
|
||||||
|
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
res.Body.Close()
|
||||||
|
if retry && res.StatusCode == http.StatusForbidden {
|
||||||
|
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
reader.Reset()
|
||||||
|
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||||
|
}
|
||||||
|
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||||
|
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) normalUpload(ctx context.Context, stream model.FileStreamer, up driver.UpdateProgress, createResp CreateResp, count int, partSize int64) error {
|
||||||
|
log.Debugf("[aliyundive_open] normal upload")
|
||||||
|
// 2. upload
|
||||||
|
preTime := time.Now()
|
||||||
|
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
|
err := d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, partSize)), &createResp, true)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if count > 0 {
|
||||||
|
up(i * 100 / count)
|
||||||
|
}
|
||||||
|
// refresh upload url if 50 minutes passed
|
||||||
|
if time.Since(preTime) > 50*time.Minute {
|
||||||
|
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
preTime = time.Now()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// 3. complete
|
||||||
|
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"drive_id": d.DriveId,
|
||||||
|
"file_id": createResp.FileId,
|
||||||
|
"upload_id": createResp.UploadId,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
type ProofRange struct {
|
||||||
|
Start int64
|
||||||
|
End int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func getProofRange(input string, size int64) (*ProofRange, error) {
|
||||||
|
if size == 0 {
|
||||||
|
return &ProofRange{}, nil
|
||||||
|
}
|
||||||
|
tmpStr := utils.GetMD5EncodeStr(input)[0:16]
|
||||||
|
tmpInt, err := strconv.ParseUint(tmpStr, 16, 64)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
index := tmpInt % uint64(size)
|
||||||
|
pr := &ProofRange{
|
||||||
|
Start: int64(index),
|
||||||
|
End: int64(index) + 8,
|
||||||
|
}
|
||||||
|
if pr.End >= size {
|
||||||
|
pr.End = size
|
||||||
|
}
|
||||||
|
return pr, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) calProofCode(file *os.File, fileSize int64) (string, error) {
|
||||||
|
proofRange, err := getProofRange(d.AccessToken, fileSize)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
buf := make([]byte, proofRange.End-proofRange.Start)
|
||||||
|
_, err = file.ReadAt(buf, proofRange.Start)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return base64.StdEncoding.EncodeToString(buf), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
// 1. create
|
||||||
|
// Part Size Unit: Bytes, Default: 20MB,
|
||||||
|
// Maximum number of slices 10,000, ≈195.3125GB
|
||||||
|
var partSize = calPartSize(stream.GetSize())
|
||||||
|
createData := base.Json{
|
||||||
|
"drive_id": d.DriveId,
|
||||||
|
"parent_file_id": dstDir.GetID(),
|
||||||
|
"name": stream.GetName(),
|
||||||
|
"type": "file",
|
||||||
|
"check_name_mode": "ignore",
|
||||||
|
}
|
||||||
|
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
|
||||||
|
createData["part_info_list"] = makePartInfos(count)
|
||||||
|
// rapid upload
|
||||||
|
rapidUpload := stream.GetSize() > 100*1024 && d.RapidUpload
|
||||||
|
if rapidUpload {
|
||||||
|
log.Debugf("[aliyundrive_open] start cal pre_hash")
|
||||||
|
// read 1024 bytes to calculate pre hash
|
||||||
|
buf := bytes.NewBuffer(make([]byte, 0, 1024))
|
||||||
|
_, err := io.CopyN(buf, stream, 1024)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
createData["size"] = stream.GetSize()
|
||||||
|
createData["pre_hash"] = utils.GetSHA1Encode(buf.Bytes())
|
||||||
|
// if support seek, seek to start
|
||||||
|
if localFile, ok := stream.(io.Seeker); ok {
|
||||||
|
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Put spliced head back to stream
|
||||||
|
stream.SetReadCloser(struct {
|
||||||
|
io.Reader
|
||||||
|
io.Closer
|
||||||
|
}{
|
||||||
|
Reader: io.MultiReader(buf, stream.GetReadCloser()),
|
||||||
|
Closer: stream.GetReadCloser(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var createResp CreateResp
|
||||||
|
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(createData).SetResult(&createResp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
if e.Code != "PreHashMatched" || !rapidUpload {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
|
||||||
|
// convert to local file
|
||||||
|
file, err := utils.CreateTempFile(stream)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_ = stream.GetReadCloser().Close()
|
||||||
|
stream.SetReadCloser(file)
|
||||||
|
// calculate full hash
|
||||||
|
h := sha1.New()
|
||||||
|
_, err = io.Copy(h, file)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
delete(createData, "pre_hash")
|
||||||
|
createData["proof_version"] = "v1"
|
||||||
|
createData["content_hash_name"] = "sha1"
|
||||||
|
createData["content_hash"] = hex.EncodeToString(h.Sum(nil))
|
||||||
|
// seek to start
|
||||||
|
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
createData["proof_code"], err = d.calProofCode(file, stream.GetSize())
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("cal proof code error: %s", err.Error())
|
||||||
|
}
|
||||||
|
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(createData).SetResult(&createResp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if createResp.RapidUpload {
|
||||||
|
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
// failed to rapid upload, try normal upload
|
||||||
|
if _, err = file.Seek(0, io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
|
||||||
|
return d.normalUpload(ctx, stream, up, createResp, count, partSize)
|
||||||
|
}
|
@ -5,12 +5,12 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
@ -20,9 +20,9 @@ func (d *AliyundriveOpen) refreshToken() error {
|
|||||||
if d.OauthTokenURL != "" && d.ClientID == "" {
|
if d.OauthTokenURL != "" && d.ClientID == "" {
|
||||||
url = d.OauthTokenURL
|
url = d.OauthTokenURL
|
||||||
}
|
}
|
||||||
var resp base.TokenResp
|
//var resp base.TokenResp
|
||||||
var e ErrResp
|
var e ErrResp
|
||||||
_, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
ForceContentType("application/json").
|
ForceContentType("application/json").
|
||||||
SetBody(base.Json{
|
SetBody(base.Json{
|
||||||
"client_id": d.ClientID,
|
"client_id": d.ClientID,
|
||||||
@ -30,24 +30,31 @@ func (d *AliyundriveOpen) refreshToken() error {
|
|||||||
"grant_type": "refresh_token",
|
"grant_type": "refresh_token",
|
||||||
"refresh_token": d.RefreshToken,
|
"refresh_token": d.RefreshToken,
|
||||||
}).
|
}).
|
||||||
SetResult(&resp).
|
//SetResult(&resp).
|
||||||
SetError(&e).
|
SetError(&e).
|
||||||
Post(url)
|
Post(url)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
log.Debugf("[ali_open] refresh token response: %s", res.String())
|
||||||
if e.Code != "" {
|
if e.Code != "" {
|
||||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||||
}
|
}
|
||||||
if resp.RefreshToken == "" {
|
refresh, access := utils.Json.Get(res.Body(), "refresh_token").ToString(), utils.Json.Get(res.Body(), "access_token").ToString()
|
||||||
|
if refresh == "" {
|
||||||
return errors.New("failed to refresh token: refresh token is empty")
|
return errors.New("failed to refresh token: refresh token is empty")
|
||||||
}
|
}
|
||||||
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
|
d.RefreshToken, d.AccessToken = refresh, access
|
||||||
op.MustSaveDriverStorage(d)
|
op.MustSaveDriverStorage(d)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||||
|
b, err, _ := d.requestReturnErrResp(uri, method, callback, retry...)
|
||||||
|
return b, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
// TODO check whether access_token is expired
|
// TODO check whether access_token is expired
|
||||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
@ -61,30 +68,43 @@ func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback,
|
|||||||
req.SetError(&e)
|
req.SetError(&e)
|
||||||
res, err := req.Execute(method, d.base+uri)
|
res, err := req.Execute(method, d.base+uri)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
if res != nil {
|
||||||
|
log.Errorf("[aliyundrive_open] request error: %s", res.String())
|
||||||
|
}
|
||||||
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
isRetry := len(retry) > 0 && retry[0]
|
isRetry := len(retry) > 0 && retry[0]
|
||||||
if e.Code != "" {
|
if e.Code != "" {
|
||||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||||
err = d.refreshToken()
|
err = d.refreshToken()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err, nil
|
||||||
}
|
}
|
||||||
return d.request(uri, method, callback, true)
|
return d.requestReturnErrResp(uri, method, callback, true)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message)
|
return nil, fmt.Errorf("%s:%s", e.Code, e.Message), &e
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveOpen) getFiles(fileId string) ([]File, error) {
|
func (d *AliyundriveOpen) list(ctx context.Context, data base.Json) (*Files, error) {
|
||||||
|
var resp Files
|
||||||
|
_, err := d.request("/adrive/v1.0/openFile/list", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(data).SetResult(&resp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File, error) {
|
||||||
marker := "first"
|
marker := "first"
|
||||||
res := make([]File, 0)
|
res := make([]File, 0)
|
||||||
for marker != "" {
|
for marker != "" {
|
||||||
if marker == "first" {
|
if marker == "first" {
|
||||||
marker = ""
|
marker = ""
|
||||||
}
|
}
|
||||||
var resp Files
|
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
"limit": 200,
|
"limit": 200,
|
||||||
@ -98,9 +118,7 @@ func (d *AliyundriveOpen) getFiles(fileId string) ([]File, error) {
|
|||||||
//"video_thumbnail_width": 480,
|
//"video_thumbnail_width": 480,
|
||||||
//"image_thumbnail_width": 480,
|
//"image_thumbnail_width": 480,
|
||||||
}
|
}
|
||||||
_, err := d.request("/adrive/v1.0/openFile/list", http.MethodPost, func(req *resty.Request) {
|
resp, err := d.limitList(ctx, data)
|
||||||
req.SetBody(data).SetResult(&resp)
|
|
||||||
})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -109,59 +127,3 @@ func (d *AliyundriveOpen) getFiles(fileId string) ([]File, error) {
|
|||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func makePartInfos(size int) []base.Json {
|
|
||||||
partInfoList := make([]base.Json, size)
|
|
||||||
for i := 0; i < size; i++ {
|
|
||||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
|
||||||
}
|
|
||||||
return partInfoList
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
|
||||||
partInfoList := makePartInfos(count)
|
|
||||||
var resp CreateResp
|
|
||||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetBody(base.Json{
|
|
||||||
"drive_id": d.DriveId,
|
|
||||||
"file_id": fileId,
|
|
||||||
"part_info_list": partInfoList,
|
|
||||||
"upload_id": uploadId,
|
|
||||||
}).SetResult(&resp)
|
|
||||||
})
|
|
||||||
return resp.PartInfoList, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
|
||||||
partInfo := resp.PartInfoList[i-1]
|
|
||||||
uploadUrl := partInfo.UploadUrl
|
|
||||||
if d.InternalUpload {
|
|
||||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
|
||||||
}
|
|
||||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req = req.WithContext(ctx)
|
|
||||||
res, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
if retry {
|
|
||||||
reader.Reset()
|
|
||||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
res.Body.Close()
|
|
||||||
if retry && res.StatusCode == http.StatusForbidden {
|
|
||||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
reader.Reset()
|
|
||||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
|
||||||
}
|
|
||||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
|
||||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
@ -2,9 +2,11 @@ package aliyundrive_share
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/Xhofe/rateg"
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
"github.com/alist-org/alist/v3/internal/errs"
|
"github.com/alist-org/alist/v3/internal/errs"
|
||||||
@ -22,6 +24,9 @@ type AliyundriveShare struct {
|
|||||||
ShareToken string
|
ShareToken string
|
||||||
DriveId string
|
DriveId string
|
||||||
cron *cron.Cron
|
cron *cron.Cron
|
||||||
|
|
||||||
|
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
|
||||||
|
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveShare) Config() driver.Config {
|
func (d *AliyundriveShare) Config() driver.Config {
|
||||||
@ -48,6 +53,14 @@ func (d *AliyundriveShare) Init(ctx context.Context) error {
|
|||||||
log.Errorf("%+v", err)
|
log.Errorf("%+v", err)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
|
||||||
|
Limit: 4,
|
||||||
|
Bucket: 1,
|
||||||
|
})
|
||||||
|
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
|
||||||
|
Limit: 1,
|
||||||
|
Bucket: 1,
|
||||||
|
})
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -60,6 +73,13 @@ func (d *AliyundriveShare) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
if d.limitList == nil {
|
||||||
|
return nil, fmt.Errorf("driver not init")
|
||||||
|
}
|
||||||
|
return d.limitList(ctx, dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveShare) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
||||||
files, err := d.getFiles(dir.GetID())
|
files, err := d.getFiles(dir.GetID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -70,6 +90,13 @@ func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.L
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if d.limitLink == nil {
|
||||||
|
return nil, fmt.Errorf("driver not init")
|
||||||
|
}
|
||||||
|
return d.limitLink(ctx, file)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"drive_id": d.DriveId,
|
"drive_id": d.DriveId,
|
||||||
"file_id": file.GetID(),
|
"file_id": file.GetID(),
|
||||||
@ -79,7 +106,7 @@ func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.
|
|||||||
}
|
}
|
||||||
var resp ShareLinkResp
|
var resp ShareLinkResp
|
||||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data).SetResult(&resp)
|
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -9,6 +9,12 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// CanaryHeaderKey CanaryHeaderValue for lifting rate limit restrictions
|
||||||
|
CanaryHeaderKey = "X-Canary"
|
||||||
|
CanaryHeaderValue = "client=web,app=share,version=v2.3.1"
|
||||||
|
)
|
||||||
|
|
||||||
func (d *AliyundriveShare) refreshToken() error {
|
func (d *AliyundriveShare) refreshToken() error {
|
||||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||||
var resp base.TokenResp
|
var resp base.TokenResp
|
||||||
@ -58,6 +64,7 @@ func (d *AliyundriveShare) request(url, method string, callback base.ReqCallback
|
|||||||
SetError(&e).
|
SetError(&e).
|
||||||
SetHeader("content-type", "application/json").
|
SetHeader("content-type", "application/json").
|
||||||
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
||||||
|
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||||
SetHeader("x-share-token", d.ShareToken)
|
SetHeader("x-share-token", d.ShareToken)
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -91,7 +98,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
|||||||
data := base.Json{
|
data := base.Json{
|
||||||
"image_thumbnail_process": "image/resize,w_160/format,jpeg",
|
"image_thumbnail_process": "image/resize,w_160/format,jpeg",
|
||||||
"image_url_process": "image/resize,w_1920/format,jpeg",
|
"image_url_process": "image/resize,w_1920/format,jpeg",
|
||||||
"limit": 100,
|
"limit": 200,
|
||||||
"order_by": d.OrderBy,
|
"order_by": d.OrderBy,
|
||||||
"order_direction": d.OrderDirection,
|
"order_direction": d.OrderDirection,
|
||||||
"parent_file_id": fileId,
|
"parent_file_id": fileId,
|
||||||
@ -107,6 +114,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
|||||||
var resp ListResp
|
var resp ListResp
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetHeader("x-share-token", d.ShareToken).
|
SetHeader("x-share-token", d.ShareToken).
|
||||||
|
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||||
SetResult(&resp).SetError(&e).SetBody(data).
|
SetResult(&resp).SetError(&e).SetBody(data).
|
||||||
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||||
@ -24,11 +25,12 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/mopan"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/quark"
|
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
||||||
@ -41,6 +43,7 @@ import (
|
|||||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
||||||
|
_ "github.com/alist-org/alist/v3/drivers/wopan"
|
||||||
_ "github.com/alist-org/alist/v3/drivers/yandex_disk"
|
_ "github.com/alist-org/alist/v3/drivers/yandex_disk"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
package baidu_netdisk
|
package baidu_netdisk
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
|
||||||
"context"
|
"context"
|
||||||
"crypto/md5"
|
"crypto/md5"
|
||||||
"encoding/hex"
|
"encoding/hex"
|
||||||
@ -23,7 +22,6 @@ import (
|
|||||||
type BaiduNetdisk struct {
|
type BaiduNetdisk struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
AccessToken string
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduNetdisk) Config() driver.Config {
|
func (d *BaiduNetdisk) Config() driver.Config {
|
||||||
@ -35,7 +33,11 @@ func (d *BaiduNetdisk) GetAddition() driver.Additional {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduNetdisk) Init(ctx context.Context) error {
|
func (d *BaiduNetdisk) Init(ctx context.Context) error {
|
||||||
return d.refreshToken()
|
res, err := d.get("/xpan/nas", map[string]string{
|
||||||
|
"method": "uinfo",
|
||||||
|
}, nil)
|
||||||
|
log.Debugf("[baidu] get uinfo: %s", string(res))
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
|
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
|
||||||
@ -115,7 +117,6 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
_ = os.Remove(tempFile.Name())
|
_ = os.Remove(tempFile.Name())
|
||||||
}()
|
}()
|
||||||
var Default int64 = 4 * 1024 * 1024
|
var Default int64 = 4 * 1024 * 1024
|
||||||
defaultByteData := make([]byte, Default)
|
|
||||||
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
count := int(math.Ceil(float64(stream.GetSize()) / float64(Default)))
|
||||||
var SliceSize int64 = 256 * 1024
|
var SliceSize int64 = 256 * 1024
|
||||||
// cal md5
|
// cal md5
|
||||||
@ -127,20 +128,14 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
left := stream.GetSize()
|
left := stream.GetSize()
|
||||||
for i := 0; i < count; i++ {
|
for i := 0; i < count; i++ {
|
||||||
byteSize := Default
|
byteSize := Default
|
||||||
var byteData []byte
|
|
||||||
if left < Default {
|
if left < Default {
|
||||||
byteSize = left
|
byteSize = left
|
||||||
byteData = make([]byte, byteSize)
|
|
||||||
} else {
|
|
||||||
byteData = defaultByteData
|
|
||||||
}
|
}
|
||||||
left -= byteSize
|
left -= byteSize
|
||||||
_, err = io.ReadFull(tempFile, byteData)
|
_, err = io.Copy(io.MultiWriter(h1, h2), io.LimitReader(tempFile, byteSize))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
h1.Write(byteData)
|
|
||||||
h2.Write(byteData)
|
|
||||||
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
block_list = append(block_list, fmt.Sprintf("\"%s\"", hex.EncodeToString(h2.Sum(nil))))
|
||||||
h2.Reset()
|
h2.Reset()
|
||||||
}
|
}
|
||||||
@ -174,6 +169,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
params := map[string]string{
|
params := map[string]string{
|
||||||
"method": "precreate",
|
"method": "precreate",
|
||||||
}
|
}
|
||||||
|
log.Debugf("[baidu_netdisk] precreate data: %s", data)
|
||||||
var precreateResp PrecreateResp
|
var precreateResp PrecreateResp
|
||||||
_, err = d.post("/xpan/file", params, data, &precreateResp)
|
_, err = d.post("/xpan/file", params, data, &precreateResp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -196,24 +192,16 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
return ctx.Err()
|
return ctx.Err()
|
||||||
}
|
}
|
||||||
byteSize := Default
|
byteSize := Default
|
||||||
var byteData []byte
|
|
||||||
if left < Default {
|
if left < Default {
|
||||||
byteSize = left
|
byteSize = left
|
||||||
byteData = make([]byte, byteSize)
|
|
||||||
} else {
|
|
||||||
byteData = defaultByteData
|
|
||||||
}
|
}
|
||||||
left -= byteSize
|
left -= byteSize
|
||||||
_, err = io.ReadFull(tempFile, byteData)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
u := "https://d.pcs.baidu.com/rest/2.0/pcs/superfile2"
|
||||||
params["partseq"] = strconv.Itoa(partseq)
|
params["partseq"] = strconv.Itoa(partseq)
|
||||||
res, err := base.RestyClient.R().
|
res, err := base.RestyClient.R().
|
||||||
SetContext(ctx).
|
SetContext(ctx).
|
||||||
SetQueryParams(params).
|
SetQueryParams(params).
|
||||||
SetFileReader("file", stream.GetName(), bytes.NewReader(byteData)).
|
SetFileReader("file", stream.GetName(), io.LimitReader(tempFile, byteSize)).
|
||||||
Post(u)
|
Post(u)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -13,6 +13,8 @@ type Addition struct {
|
|||||||
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
|
||||||
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
|
||||||
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
|
||||||
|
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
|
||||||
|
AccessToken string
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -13,6 +13,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/op"
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
@ -62,16 +63,17 @@ func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCall
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
log.Debugf("[baidu_netdisk] req: %s, resp: %s", furl, res.String())
|
||||||
errno := utils.Json.Get(res.Body(), "errno").ToInt()
|
errno := utils.Json.Get(res.Body(), "errno").ToInt()
|
||||||
if errno != 0 {
|
if errno != 0 {
|
||||||
if errno == -6 {
|
if utils.SliceContains([]int{111, -6}, errno) {
|
||||||
err = d.refreshToken()
|
err = d.refreshToken()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return d.request(furl, method, callback, resp)
|
return d.request(furl, method, callback, resp)
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("errno: %d, refer to https://pan.baidu.com/union/doc/", errno)
|
return nil, fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
|
||||||
}
|
}
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
@ -154,7 +156,7 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
|||||||
"target": fmt.Sprintf("[\"%s\"]", file.GetPath()),
|
"target": fmt.Sprintf("[\"%s\"]", file.GetPath()),
|
||||||
"dlink": "1",
|
"dlink": "1",
|
||||||
"web": "5",
|
"web": "5",
|
||||||
//"origin": "dlna",
|
"origin": "dlna",
|
||||||
}
|
}
|
||||||
_, err := d.request("https://pan.baidu.com/api/filemetas", http.MethodGet, func(req *resty.Request) {
|
_, err := d.request("https://pan.baidu.com/api/filemetas", http.MethodGet, func(req *resty.Request) {
|
||||||
req.SetQueryParams(param)
|
req.SetQueryParams(param)
|
||||||
@ -165,7 +167,7 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
|||||||
return &model.Link{
|
return &model.Link{
|
||||||
URL: resp.Info[0].Dlink,
|
URL: resp.Info[0].Dlink,
|
||||||
Header: http.Header{
|
Header: http.Header{
|
||||||
"User-Agent": []string{"netdisk"},
|
"User-Agent": []string{d.CustomCrackUA},
|
||||||
},
|
},
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"math/rand"
|
"math/rand"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -16,11 +15,6 @@ func getTid() string {
|
|||||||
return fmt.Sprintf("3%d%.0f", time.Now().Unix(), math.Floor(9000000*rand.Float64()+1000000))
|
return fmt.Sprintf("3%d%.0f", time.Now().Unix(), math.Floor(9000000*rand.Float64()+1000000))
|
||||||
}
|
}
|
||||||
|
|
||||||
// 检查名称
|
|
||||||
func checkName(name string) bool {
|
|
||||||
return len(name) <= 50 && regexp.MustCompile("[\u4e00-\u9fa5A-Za-z0-9_-]").MatchString(name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func toTime(t int64) *time.Time {
|
func toTime(t int64) *time.Time {
|
||||||
tm := time.Unix(t, 0)
|
tm := time.Unix(t, 0)
|
||||||
return &tm
|
return &tm
|
||||||
|
@ -2,7 +2,6 @@ package baiduphoto
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
@ -22,10 +21,6 @@ const (
|
|||||||
FILE_API_URL_V2 = API_URL + "/file/v2"
|
FILE_API_URL_V2 = API_URL + "/file/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
ErrNotSupportName = errors.New("only chinese and english, numbers and underscores are supported, and the length is no more than 50")
|
|
||||||
)
|
|
||||||
|
|
||||||
func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
req := base.RestyClient.R().
|
req := base.RestyClient.R().
|
||||||
SetQueryParam("access_token", d.AccessToken)
|
SetQueryParam("access_token", d.AccessToken)
|
||||||
@ -48,6 +43,8 @@ func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallba
|
|||||||
return nil, fmt.Errorf("you have joined album")
|
return nil, fmt.Errorf("you have joined album")
|
||||||
case 50820:
|
case 50820:
|
||||||
return nil, fmt.Errorf("no shared albums found")
|
return nil, fmt.Errorf("no shared albums found")
|
||||||
|
case 50100:
|
||||||
|
return nil, fmt.Errorf("illegal title, only supports 50 characters")
|
||||||
case -6:
|
case -6:
|
||||||
if err = d.refreshToken(); err != nil {
|
if err = d.refreshToken(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -188,9 +185,6 @@ func (d *BaiduPhoto) GetAllAlbumFile(ctx context.Context, album *Album, passwd s
|
|||||||
|
|
||||||
// 创建相册
|
// 创建相册
|
||||||
func (d *BaiduPhoto) CreateAlbum(ctx context.Context, name string) (*Album, error) {
|
func (d *BaiduPhoto) CreateAlbum(ctx context.Context, name string) (*Album, error) {
|
||||||
if !checkName(name) {
|
|
||||||
return nil, ErrNotSupportName
|
|
||||||
}
|
|
||||||
var resp JoinOrCreateAlbumResp
|
var resp JoinOrCreateAlbumResp
|
||||||
_, err := d.Post(ALBUM_API_URL+"/create", func(r *resty.Request) {
|
_, err := d.Post(ALBUM_API_URL+"/create", func(r *resty.Request) {
|
||||||
r.SetContext(ctx).SetResult(&resp)
|
r.SetContext(ctx).SetResult(&resp)
|
||||||
@ -208,10 +202,6 @@ func (d *BaiduPhoto) CreateAlbum(ctx context.Context, name string) (*Album, erro
|
|||||||
|
|
||||||
// 相册改名
|
// 相册改名
|
||||||
func (d *BaiduPhoto) SetAlbumName(ctx context.Context, album *Album, name string) (*Album, error) {
|
func (d *BaiduPhoto) SetAlbumName(ctx context.Context, album *Album, name string) (*Album, error) {
|
||||||
if !checkName(name) {
|
|
||||||
return nil, ErrNotSupportName
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := d.Post(ALBUM_API_URL+"/settitle", func(r *resty.Request) {
|
_, err := d.Post(ALBUM_API_URL+"/settitle", func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetFormData(map[string]string{
|
r.SetFormData(map[string]string{
|
||||||
|
@ -40,8 +40,9 @@ func NewRestyClient() *resty.Client {
|
|||||||
|
|
||||||
func NewHttpClient() *http.Client {
|
func NewHttpClient() *http.Client {
|
||||||
return &http.Client{
|
return &http.Client{
|
||||||
Timeout: DefaultTimeout,
|
Timeout: time.Hour * 48,
|
||||||
Transport: &http.Transport{
|
Transport: &http.Transport{
|
||||||
|
Proxy: http.ProxyFromEnvironment,
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify},
|
TLSClientConfig: &tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
@ -30,6 +31,8 @@ func (d *Cloudreve) Init(ctx context.Context) error {
|
|||||||
if d.Cookie != "" {
|
if d.Cookie != "" {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
// removing trailing slash
|
||||||
|
d.Address = strings.TrimSuffix(d.Address, "/")
|
||||||
return d.login()
|
return d.login()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -57,7 +57,7 @@ func (d *Cloudreve) request(method string, path string, callback base.ReqCallbac
|
|||||||
return d.request(method, path, callback, out)
|
return d.request(method, path, callback, out)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return errors.New(r.Msg)
|
return errors.New(r.Msg)
|
||||||
}
|
}
|
||||||
sess := cookie.GetCookie(resp.Cookies(), "cloudreve-session")
|
sess := cookie.GetCookie(resp.Cookies(), "cloudreve-session")
|
||||||
|
222
drivers/dropbox/driver.go
Normal file
222
drivers/dropbox/driver.go
Normal file
@ -0,0 +1,222 @@
|
|||||||
|
package dropbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Dropbox struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
base string
|
||||||
|
contentBase string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Init(ctx context.Context) error {
|
||||||
|
query := "foo"
|
||||||
|
res, err := d.request("/2/check/user", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetBody(base.Json{
|
||||||
|
"query": query,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
result := utils.Json.Get(res, "result").ToString()
|
||||||
|
if result != query {
|
||||||
|
return fmt.Errorf("failed to check user: %s", string(res))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
files, err := d.getFiles(ctx, dir.GetPath())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||||
|
return fileToObj(src), nil
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
res, err := d.request("/2/files/get_temporary_link", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"path": file.GetPath(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
url := utils.Json.Get(res, "link").ToString()
|
||||||
|
exp := time.Hour
|
||||||
|
return &model.Link{
|
||||||
|
URL: url,
|
||||||
|
Expiration: &exp,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
_, err := d.request("/2/files/create_folder_v2", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"autorename": false,
|
||||||
|
"path": parentDir.GetPath() + "/" + dirName,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
toPath := dstDir.GetPath() + "/" + srcObj.GetName()
|
||||||
|
|
||||||
|
_, err := d.request("/2/files/move_v2", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"allow_ownership_transfer": false,
|
||||||
|
"allow_shared_folder": false,
|
||||||
|
"autorename": false,
|
||||||
|
"from_path": srcObj.GetID(),
|
||||||
|
"to_path": toPath,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
path := srcObj.GetPath()
|
||||||
|
fileName := srcObj.GetName()
|
||||||
|
toPath := path[:len(path)-len(fileName)] + newName
|
||||||
|
|
||||||
|
_, err := d.request("/2/files/move_v2", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"allow_ownership_transfer": false,
|
||||||
|
"allow_shared_folder": false,
|
||||||
|
"autorename": false,
|
||||||
|
"from_path": srcObj.GetID(),
|
||||||
|
"to_path": toPath,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
toPath := dstDir.GetPath() + "/" + srcObj.GetName()
|
||||||
|
_, err := d.request("/2/files/copy_v2", http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"allow_ownership_transfer": false,
|
||||||
|
"allow_shared_folder": false,
|
||||||
|
"autorename": false,
|
||||||
|
"from_path": srcObj.GetID(),
|
||||||
|
"to_path": toPath,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
uri := "/2/files/delete_v2"
|
||||||
|
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(base.Json{
|
||||||
|
"path": obj.GetID(),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
// 1. start
|
||||||
|
sessionId, err := d.startUploadSession(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2.append
|
||||||
|
// A single request should not upload more than 150 MB, and each call must be multiple of 4MB (except for last call)
|
||||||
|
const PartSize = 20971520
|
||||||
|
count := 1
|
||||||
|
if stream.GetSize() > PartSize {
|
||||||
|
count = int(math.Ceil(float64(stream.GetSize()) / float64(PartSize)))
|
||||||
|
}
|
||||||
|
offset := int64(0)
|
||||||
|
|
||||||
|
for i := 0; i < count; i++ {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return ctx.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
start := i * PartSize
|
||||||
|
byteSize := stream.GetSize() - int64(start)
|
||||||
|
if byteSize > PartSize {
|
||||||
|
byteSize = PartSize
|
||||||
|
}
|
||||||
|
|
||||||
|
url := d.contentBase + "/2/files/upload_session/append_v2"
|
||||||
|
reader := io.LimitReader(stream, PartSize)
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, reader)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("failed to update file when append to upload session, err: %+v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||||
|
|
||||||
|
args := UploadAppendArgs{
|
||||||
|
Close: false,
|
||||||
|
Cursor: UploadCursor{
|
||||||
|
Offset: offset,
|
||||||
|
SessionID: sessionId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
argsJson, err := utils.Json.MarshalToString(args)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("Dropbox-API-Arg", argsJson)
|
||||||
|
|
||||||
|
res, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
|
|
||||||
|
if count > 0 {
|
||||||
|
up((i + 1) * 100 / count)
|
||||||
|
}
|
||||||
|
|
||||||
|
offset += byteSize
|
||||||
|
|
||||||
|
}
|
||||||
|
// 3.finish
|
||||||
|
toPath := dstDir.GetPath() + "/" + stream.GetName()
|
||||||
|
err2 := d.finishUploadSession(ctx, toPath, offset, sessionId)
|
||||||
|
if err2 != nil {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*Dropbox)(nil)
|
42
drivers/dropbox/meta.go
Normal file
42
drivers/dropbox/meta.go
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
package dropbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
DefaultClientID = "76lrwrklhdn1icb"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
|
driver.RootPath
|
||||||
|
|
||||||
|
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/dropbox/token"`
|
||||||
|
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
|
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
|
|
||||||
|
AccessToken string
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "Dropbox",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: false,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &Dropbox{
|
||||||
|
base: "https://api.dropboxapi.com",
|
||||||
|
contentBase: "https://content.dropboxapi.com",
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
79
drivers/dropbox/types.go
Normal file
79
drivers/dropbox/types.go
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
package dropbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type TokenResp struct {
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
TokenType string `json:"token_type"`
|
||||||
|
ExpiresIn int `json:"expires_in"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ErrorResp struct {
|
||||||
|
Error struct {
|
||||||
|
Tag string `json:".tag"`
|
||||||
|
} `json:"error"`
|
||||||
|
ErrorSummary string `json:"error_summary"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type RefreshTokenErrorResp struct {
|
||||||
|
Error string `json:"error"`
|
||||||
|
ErrorDescription string `json:"error_description"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type File struct {
|
||||||
|
Tag string `json:".tag"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
PathLower string `json:"path_lower"`
|
||||||
|
PathDisplay string `json:"path_display"`
|
||||||
|
ID string `json:"id"`
|
||||||
|
ClientModified time.Time `json:"client_modified"`
|
||||||
|
ServerModified time.Time `json:"server_modified"`
|
||||||
|
Rev string `json:"rev"`
|
||||||
|
Size int `json:"size"`
|
||||||
|
IsDownloadable bool `json:"is_downloadable"`
|
||||||
|
ContentHash string `json:"content_hash"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ListResp struct {
|
||||||
|
Entries []File `json:"entries"`
|
||||||
|
Cursor string `json:"cursor"`
|
||||||
|
HasMore bool `json:"has_more"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadCursor struct {
|
||||||
|
Offset int64 `json:"offset"`
|
||||||
|
SessionID string `json:"session_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadAppendArgs struct {
|
||||||
|
Close bool `json:"close"`
|
||||||
|
Cursor UploadCursor `json:"cursor"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type UploadFinishArgs struct {
|
||||||
|
Commit struct {
|
||||||
|
Autorename bool `json:"autorename"`
|
||||||
|
Mode string `json:"mode"`
|
||||||
|
Mute bool `json:"mute"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
StrictConflict bool `json:"strict_conflict"`
|
||||||
|
} `json:"commit"`
|
||||||
|
Cursor UploadCursor `json:"cursor"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileToObj(f File) *model.ObjThumb {
|
||||||
|
return &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: f.ID,
|
||||||
|
Path: f.PathDisplay,
|
||||||
|
Name: f.Name,
|
||||||
|
Size: int64(f.Size),
|
||||||
|
Modified: f.ServerModified,
|
||||||
|
IsFolder: f.Tag == "folder",
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{},
|
||||||
|
}
|
||||||
|
}
|
199
drivers/dropbox/util.go
Normal file
199
drivers/dropbox/util.go
Normal file
@ -0,0 +1,199 @@
|
|||||||
|
package dropbox
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (d *Dropbox) refreshToken() error {
|
||||||
|
url := d.base + "/oauth2/token"
|
||||||
|
if utils.SliceContains([]string{"", DefaultClientID}, d.ClientID) {
|
||||||
|
url = d.OauthTokenURL
|
||||||
|
}
|
||||||
|
var tokenResp TokenResp
|
||||||
|
resp, err := base.RestyClient.R().
|
||||||
|
//ForceContentType("application/x-www-form-urlencoded").
|
||||||
|
//SetBasicAuth(d.ClientID, d.ClientSecret).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
"refresh_token": d.RefreshToken,
|
||||||
|
"client_id": d.ClientID,
|
||||||
|
"client_secret": d.ClientSecret,
|
||||||
|
}).
|
||||||
|
Post(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
log.Debugf("[dropbox] refresh token response: %s", resp.String())
|
||||||
|
if resp.StatusCode() != 200 {
|
||||||
|
return fmt.Errorf("failed to refresh token: %s", resp.String())
|
||||||
|
}
|
||||||
|
_ = utils.Json.UnmarshalFromString(resp.String(), &tokenResp)
|
||||||
|
d.AccessToken = tokenResp.AccessToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||||
|
req := base.RestyClient.R()
|
||||||
|
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||||
|
if method == http.MethodPost {
|
||||||
|
req.SetHeader("Content-Type", "application/json")
|
||||||
|
}
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
|
var e ErrorResp
|
||||||
|
req.SetError(&e)
|
||||||
|
res, err := req.Execute(method, d.base+uri)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
log.Debugf("[dropbox] request (%s) response: %s", uri, res.String())
|
||||||
|
isRetry := len(retry) > 0 && retry[0]
|
||||||
|
if res.StatusCode() != 200 {
|
||||||
|
body := res.String()
|
||||||
|
if !isRetry && (utils.SliceMeet([]string{"expired_access_token", "invalid_access_token", "authorization"}, body,
|
||||||
|
func(item string, v string) bool {
|
||||||
|
return strings.Contains(v, item)
|
||||||
|
}) || d.AccessToken == "") {
|
||||||
|
err = d.refreshToken()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return d.request(uri, method, callback, true)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("%s:%s", e.Error, e.ErrorSummary)
|
||||||
|
}
|
||||||
|
return res.Body(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) list(ctx context.Context, data base.Json, isContinue bool) (*ListResp, error) {
|
||||||
|
var resp ListResp
|
||||||
|
uri := "/2/files/list_folder"
|
||||||
|
if isContinue {
|
||||||
|
uri += "/continue"
|
||||||
|
}
|
||||||
|
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx).SetBody(data).SetResult(&resp)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) getFiles(ctx context.Context, path string) ([]File, error) {
|
||||||
|
hasMore := true
|
||||||
|
var marker string
|
||||||
|
res := make([]File, 0)
|
||||||
|
|
||||||
|
data := base.Json{
|
||||||
|
"include_deleted": false,
|
||||||
|
"include_has_explicit_shared_members": false,
|
||||||
|
"include_mounted_folders": false,
|
||||||
|
"include_non_downloadable_files": false,
|
||||||
|
"limit": 2000,
|
||||||
|
"path": path,
|
||||||
|
"recursive": false,
|
||||||
|
}
|
||||||
|
resp, err := d.list(ctx, data, false)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
marker = resp.Cursor
|
||||||
|
hasMore = resp.HasMore
|
||||||
|
res = append(res, resp.Entries...)
|
||||||
|
|
||||||
|
for hasMore {
|
||||||
|
data := base.Json{
|
||||||
|
"cursor": marker,
|
||||||
|
}
|
||||||
|
resp, err := d.list(ctx, data, true)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
marker = resp.Cursor
|
||||||
|
hasMore = resp.HasMore
|
||||||
|
res = append(res, resp.Entries...)
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) finishUploadSession(ctx context.Context, toPath string, offset int64, sessionId string) error {
|
||||||
|
url := d.contentBase + "/2/files/upload_session/finish"
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||||
|
|
||||||
|
uploadFinishArgs := UploadFinishArgs{
|
||||||
|
Commit: struct {
|
||||||
|
Autorename bool `json:"autorename"`
|
||||||
|
Mode string `json:"mode"`
|
||||||
|
Mute bool `json:"mute"`
|
||||||
|
Path string `json:"path"`
|
||||||
|
StrictConflict bool `json:"strict_conflict"`
|
||||||
|
}{
|
||||||
|
Autorename: true,
|
||||||
|
Mode: "add",
|
||||||
|
Mute: false,
|
||||||
|
Path: toPath,
|
||||||
|
StrictConflict: false,
|
||||||
|
},
|
||||||
|
Cursor: UploadCursor{
|
||||||
|
Offset: offset,
|
||||||
|
SessionID: sessionId,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
argsJson, err := utils.Json.MarshalToString(uploadFinishArgs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
req.Header.Set("Dropbox-API-Arg", argsJson)
|
||||||
|
|
||||||
|
res, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("failed to update file when finish session, err: %+v", err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
_ = res.Body.Close()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Dropbox) startUploadSession(ctx context.Context) (string, error) {
|
||||||
|
url := d.contentBase + "/2/files/upload_session/start"
|
||||||
|
req, err := http.NewRequest(http.MethodPost, url, nil)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
req = req.WithContext(ctx)
|
||||||
|
req.Header.Set("Content-Type", "application/octet-stream")
|
||||||
|
req.Header.Set("Authorization", "Bearer "+d.AccessToken)
|
||||||
|
req.Header.Set("Dropbox-API-Arg", "{\"close\":false}")
|
||||||
|
|
||||||
|
res, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("failed to update file when start session, err: %+v", err)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
body, err := io.ReadAll(res.Body)
|
||||||
|
sessionId := utils.Json.Get(body, "session_id").ToString()
|
||||||
|
|
||||||
|
_ = res.Body.Close()
|
||||||
|
return sessionId, nil
|
||||||
|
}
|
@ -18,6 +18,7 @@ type LanZou struct {
|
|||||||
Addition
|
Addition
|
||||||
model.Storage
|
model.Storage
|
||||||
uid string
|
uid string
|
||||||
|
vei string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LanZou) Config() driver.Config {
|
func (d *LanZou) Config() driver.Config {
|
||||||
@ -28,7 +29,7 @@ func (d *LanZou) GetAddition() driver.Additional {
|
|||||||
return &d.Addition
|
return &d.Addition
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LanZou) Init(ctx context.Context) error {
|
func (d *LanZou) Init(ctx context.Context) (err error) {
|
||||||
if d.IsCookie() {
|
if d.IsCookie() {
|
||||||
if d.RootFolderID == "" {
|
if d.RootFolderID == "" {
|
||||||
d.RootFolderID = "-1"
|
d.RootFolderID = "-1"
|
||||||
@ -38,8 +39,9 @@ func (d *LanZou) Init(ctx context.Context) error {
|
|||||||
return fmt.Errorf("cookie does not contain ylogin")
|
return fmt.Errorf("cookie does not contain ylogin")
|
||||||
}
|
}
|
||||||
d.uid = ylogin[1]
|
d.uid = ylogin[1]
|
||||||
|
d.vei, err = d.getVei()
|
||||||
}
|
}
|
||||||
return nil
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LanZou) Drop(ctx context.Context) error {
|
func (d *LanZou) Drop(ctx context.Context) error {
|
||||||
|
@ -22,12 +22,17 @@ var once sync.Once
|
|||||||
|
|
||||||
func (d *LanZou) doupload(callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *LanZou) doupload(callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
return d.post(d.BaseUrl+"/doupload.php", func(req *resty.Request) {
|
return d.post(d.BaseUrl+"/doupload.php", func(req *resty.Request) {
|
||||||
req.SetQueryParam("uid", d.uid)
|
req.SetQueryParams(map[string]string{
|
||||||
callback(req)
|
"uid": d.uid,
|
||||||
|
"vei": d.vei,
|
||||||
|
})
|
||||||
|
if callback != nil {
|
||||||
|
callback(req)
|
||||||
|
}
|
||||||
}, resp)
|
}, resp)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *LanZou) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *LanZou) get(url string, callback base.ReqCallback) ([]byte, error) {
|
||||||
return d.request(url, http.MethodGet, callback, false)
|
return d.request(url, http.MethodGet, callback, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -224,7 +229,7 @@ func (d *LanZou) getShareUrlHtml(shareID string) (string, error) {
|
|||||||
Value: vs,
|
Value: vs,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}, nil)
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
@ -315,7 +320,7 @@ func (d *LanZou) getFilesByShareUrl(shareID, pwd string, sharePageData string) (
|
|||||||
log.Errorf("lanzou: err => not find file page param ,data => %s\n", sharePageData)
|
log.Errorf("lanzou: err => not find file page param ,data => %s\n", sharePageData)
|
||||||
return nil, fmt.Errorf("not find file page param")
|
return nil, fmt.Errorf("not find file page param")
|
||||||
}
|
}
|
||||||
data, err := d.get(fmt.Sprint(d.ShareUrl, urlpaths[1]), nil, nil)
|
data, err := d.get(fmt.Sprint(d.ShareUrl, urlpaths[1]), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -445,3 +450,22 @@ func (d *LanZou) getFileRealInfo(downURL string) (*int64, *time.Time) {
|
|||||||
size, _ := strconv.ParseInt(res.Header().Get("Content-Length"), 10, 64)
|
size, _ := strconv.ParseInt(res.Header().Get("Content-Length"), 10, 64)
|
||||||
return &size, &time
|
return &size, &time
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *LanZou) getVei() (string, error) {
|
||||||
|
resp, err := d.get("https://pc.woozooo.com/mydisk.php", func(req *resty.Request) {
|
||||||
|
req.SetQueryParams(map[string]string{
|
||||||
|
"item": "files",
|
||||||
|
"action": "index",
|
||||||
|
"u": d.uid,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
html := RemoveNotes(string(resp))
|
||||||
|
data, err := htmlJsonToMap(html)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return data["vei"], nil
|
||||||
|
}
|
||||||
|
@ -64,7 +64,7 @@ func readDir(dirname string) ([]fs.FileInfo, error) {
|
|||||||
func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||||
fullPath := file.GetPath()
|
fullPath := file.GetPath()
|
||||||
thumbPrefix := "alist_thumb_"
|
thumbPrefix := "alist_thumb_"
|
||||||
thumbName := thumbPrefix + utils.GetMD5Encode(fullPath) + ".png"
|
thumbName := thumbPrefix + utils.GetMD5EncodeStr(fullPath) + ".png"
|
||||||
if d.ThumbCacheFolder != "" {
|
if d.ThumbCacheFolder != "" {
|
||||||
// skip if the file is a thumbnail
|
// skip if the file is a thumbnail
|
||||||
if strings.HasPrefix(file.GetName(), thumbPrefix) {
|
if strings.HasPrefix(file.GetName(), thumbPrefix) {
|
||||||
@ -91,7 +91,7 @@ func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
|||||||
srcBuf = imgBuf
|
srcBuf = imgBuf
|
||||||
}
|
}
|
||||||
|
|
||||||
image, err := imaging.Decode(srcBuf)
|
image, err := imaging.Decode(srcBuf, imaging.AutoOrientation(true))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
295
drivers/mopan/driver.go
Normal file
295
drivers/mopan/driver.go
Normal file
@ -0,0 +1,295 @@
|
|||||||
|
package mopan
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/avast/retry-go"
|
||||||
|
"github.com/foxxorcat/mopan-sdk-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MoPan struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
client *mopan.MoClient
|
||||||
|
|
||||||
|
userID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Init(ctx context.Context) error {
|
||||||
|
login := func() error {
|
||||||
|
data, err := d.client.Login(d.Phone, d.Password)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.client.SetAuthorization(data.Token)
|
||||||
|
|
||||||
|
info, err := d.client.GetUserInfo()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
d.userID = info.UserID
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
d.client = mopan.NewMoClient().
|
||||||
|
SetRestyClient(base.RestyClient).
|
||||||
|
SetOnAuthorizationExpired(func(_ error) error {
|
||||||
|
err := login()
|
||||||
|
if err != nil {
|
||||||
|
d.Status = err.Error()
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}).SetDeviceInfo(d.DeviceInfo)
|
||||||
|
d.DeviceInfo = d.client.GetDeviceInfo()
|
||||||
|
return login()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Drop(ctx context.Context) error {
|
||||||
|
d.client = nil
|
||||||
|
d.userID = ""
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
var files []model.Obj
|
||||||
|
for page := 1; ; page++ {
|
||||||
|
data, err := d.client.QueryFiles(dir.GetID(), page, mopan.WarpParamOption(
|
||||||
|
func(j mopan.Json) {
|
||||||
|
j["orderBy"] = d.OrderBy
|
||||||
|
j["descending"] = d.OrderDirection == "desc"
|
||||||
|
},
|
||||||
|
mopan.ParamOptionShareFile(d.CloudID),
|
||||||
|
))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(data.FileListAO.FileList)+len(data.FileListAO.FolderList) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
files = append(files, utils.MustSliceConvert(data.FileListAO.FolderList, folderToObj)...)
|
||||||
|
files = append(files, utils.MustSliceConvert(data.FileListAO.FileList, fileToObj)...)
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
data, err := d.client.GetFileDownloadUrl(file.GetID(), mopan.WarpParamOption(mopan.ParamOptionShareFile(d.CloudID)))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &model.Link{
|
||||||
|
URL: data.DownloadUrl,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||||
|
f, err := d.client.CreateFolder(dirName, parentDir.GetID(), mopan.WarpParamOption(
|
||||||
|
mopan.ParamOptionShareFile(d.CloudID),
|
||||||
|
))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return folderToObj(*f), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
return d.newTask(srcObj, dstDir, mopan.TASK_MOVE)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
_, err := d.client.RenameFolder(srcObj.GetID(), newName, mopan.WarpParamOption(
|
||||||
|
mopan.ParamOptionShareFile(d.CloudID),
|
||||||
|
))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
_, err := d.client.RenameFile(srcObj.GetID(), newName, mopan.WarpParamOption(
|
||||||
|
mopan.ParamOptionShareFile(d.CloudID),
|
||||||
|
))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return CloneObj(srcObj, srcObj.GetID(), newName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||||
|
return d.newTask(srcObj, dstDir, mopan.TASK_COPY)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) newTask(srcObj, dstDir model.Obj, taskType mopan.TaskType) (model.Obj, error) {
|
||||||
|
param := mopan.TaskParam{
|
||||||
|
UserOrCloudID: d.userID,
|
||||||
|
Source: 1,
|
||||||
|
TaskType: taskType,
|
||||||
|
TargetSource: 1,
|
||||||
|
TargetUserOrCloudID: d.userID,
|
||||||
|
TargetType: 1,
|
||||||
|
TargetFolderID: dstDir.GetID(),
|
||||||
|
TaskStatusDetailDTOList: []mopan.TaskFileParam{
|
||||||
|
{
|
||||||
|
FileID: srcObj.GetID(),
|
||||||
|
IsFolder: srcObj.IsDir(),
|
||||||
|
FileName: srcObj.GetName(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if d.CloudID != "" {
|
||||||
|
param.UserOrCloudID = d.CloudID
|
||||||
|
param.Source = 2
|
||||||
|
param.TargetSource = 2
|
||||||
|
param.TargetUserOrCloudID = d.CloudID
|
||||||
|
}
|
||||||
|
|
||||||
|
task, err := d.client.AddBatchTask(param)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for count := 0; count < 5; count++ {
|
||||||
|
stat, err := d.client.CheckBatchTask(mopan.TaskCheckParam{
|
||||||
|
TaskId: task.TaskIDList[0],
|
||||||
|
TaskType: task.TaskType,
|
||||||
|
TargetType: 1,
|
||||||
|
TargetFolderID: task.TargetFolderID,
|
||||||
|
TargetSource: param.TargetSource,
|
||||||
|
TargetUserOrCloudID: param.TargetUserOrCloudID,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch stat.TaskStatus {
|
||||||
|
case 2:
|
||||||
|
if err := d.client.CancelBatchTask(stat.TaskID, task.TaskType); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return nil, errors.New("file name conflict")
|
||||||
|
case 4:
|
||||||
|
if task.TaskType == mopan.TASK_MOVE {
|
||||||
|
return CloneObj(srcObj, srcObj.GetID(), srcObj.GetName()), nil
|
||||||
|
}
|
||||||
|
return CloneObj(srcObj, stat.SuccessedFileIDList[0], srcObj.GetName()), nil
|
||||||
|
}
|
||||||
|
time.Sleep(time.Second)
|
||||||
|
}
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
_, err := d.client.DeleteToRecycle([]mopan.TaskFileParam{
|
||||||
|
{
|
||||||
|
FileID: obj.GetID(),
|
||||||
|
IsFolder: obj.IsDir(),
|
||||||
|
FileName: obj.GetName(),
|
||||||
|
},
|
||||||
|
}, mopan.WarpParamOption(mopan.ParamOptionShareFile(d.CloudID)))
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||||
|
file, err := utils.CreateTempFile(stream)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = file.Close()
|
||||||
|
_ = os.Remove(file.Name())
|
||||||
|
}()
|
||||||
|
|
||||||
|
initUpdload, err := d.client.InitMultiUpload(ctx, mopan.UpdloadFileParam{
|
||||||
|
ParentFolderId: dstDir.GetID(),
|
||||||
|
FileName: stream.GetName(),
|
||||||
|
FileSize: stream.GetSize(),
|
||||||
|
File: file,
|
||||||
|
}, mopan.WarpParamOption(
|
||||||
|
mopan.ParamOptionShareFile(d.CloudID),
|
||||||
|
))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !initUpdload.FileDataExists {
|
||||||
|
parts, err := d.client.GetAllMultiUploadUrls(initUpdload.UploadFileID, initUpdload.PartInfo)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
d.client.CloudDiskStartBusiness()
|
||||||
|
for i, part := range parts {
|
||||||
|
if utils.IsCanceled(ctx) {
|
||||||
|
return nil, ctx.Err()
|
||||||
|
}
|
||||||
|
|
||||||
|
err := retry.Do(func() error {
|
||||||
|
if _, err := file.Seek(int64(part.PartNumber-1)*int64(initUpdload.PartSize), io.SeekStart); err != nil {
|
||||||
|
return retry.Unrecoverable(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
req, err := part.NewRequest(ctx, io.LimitReader(file, int64(initUpdload.PartSize)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
resp, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.StatusCode != http.StatusOK {
|
||||||
|
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
retry.Context(ctx),
|
||||||
|
retry.Attempts(3),
|
||||||
|
retry.Delay(time.Second),
|
||||||
|
retry.MaxDelay(5*time.Second))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
up(100 * (i + 1) / len(parts))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
uFile, err := d.client.CommitMultiUploadFile(initUpdload.UploadFileID, nil)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &model.Object{
|
||||||
|
ID: uFile.UserFileID,
|
||||||
|
Name: uFile.FileName,
|
||||||
|
Size: int64(uFile.FileSize),
|
||||||
|
Modified: time.Time(uFile.CreateDate),
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*MoPan)(nil)
|
||||||
|
var _ driver.MkdirResult = (*MoPan)(nil)
|
||||||
|
var _ driver.MoveResult = (*MoPan)(nil)
|
||||||
|
var _ driver.RenameResult = (*MoPan)(nil)
|
||||||
|
var _ driver.Remove = (*MoPan)(nil)
|
||||||
|
var _ driver.CopyResult = (*MoPan)(nil)
|
||||||
|
var _ driver.PutResult = (*MoPan)(nil)
|
37
drivers/mopan/meta.go
Normal file
37
drivers/mopan/meta.go
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
package mopan
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
Phone string `json:"phone" required:"true"`
|
||||||
|
Password string `json:"password" required:"true"`
|
||||||
|
|
||||||
|
RootFolderID string `json:"root_folder_id" default:"-11" required:"true" help:"be careful when using the -11 value, some operations may cause system errors"`
|
||||||
|
|
||||||
|
CloudID string `json:"cloud_id"`
|
||||||
|
|
||||||
|
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
|
||||||
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
|
|
||||||
|
DeviceInfo string `json:"device_info"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (a *Addition) GetRootId() string {
|
||||||
|
return a.RootFolderID
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "MoPan",
|
||||||
|
// DefaultRoot: "root, / or other",
|
||||||
|
CheckStatus: true,
|
||||||
|
Alert: "warning|This network disk may store your password in clear text. Please set your password carefully",
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &MoPan{}
|
||||||
|
})
|
||||||
|
}
|
1
drivers/mopan/types.go
Normal file
1
drivers/mopan/types.go
Normal file
@ -0,0 +1 @@
|
|||||||
|
package mopan
|
58
drivers/mopan/util.go
Normal file
58
drivers/mopan/util.go
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
package mopan
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/foxxorcat/mopan-sdk-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
func fileToObj(f mopan.File) model.Obj {
|
||||||
|
return &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: string(f.ID),
|
||||||
|
Name: f.Name,
|
||||||
|
Size: int64(f.Size),
|
||||||
|
Modified: time.Time(f.LastOpTime),
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{
|
||||||
|
Thumbnail: f.Icon.SmallURL,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func folderToObj(f mopan.Folder) model.Obj {
|
||||||
|
return &model.Object{
|
||||||
|
ID: string(f.ID),
|
||||||
|
Name: f.Name,
|
||||||
|
Modified: time.Time(f.LastOpTime),
|
||||||
|
IsFolder: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func CloneObj(o model.Obj, newID, newName string) model.Obj {
|
||||||
|
if o.IsDir() {
|
||||||
|
return &model.Object{
|
||||||
|
ID: newID,
|
||||||
|
Name: newName,
|
||||||
|
IsFolder: true,
|
||||||
|
Modified: o.ModTime(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
thumb := ""
|
||||||
|
if o, ok := o.(model.Thumb); ok {
|
||||||
|
thumb = o.Thumb()
|
||||||
|
}
|
||||||
|
return &model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: newID,
|
||||||
|
Name: newName,
|
||||||
|
Size: o.GetSize(),
|
||||||
|
Modified: o.ModTime(),
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{
|
||||||
|
Thumbnail: thumb,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
@ -2,8 +2,6 @@ package pikpak
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"crypto/sha1"
|
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
@ -19,7 +17,6 @@ import (
|
|||||||
"github.com/aws/aws-sdk-go/aws/session"
|
"github.com/aws/aws-sdk-go/aws/session"
|
||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
jsoniter "github.com/json-iterator/go"
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -66,7 +63,7 @@ func (d *PikPak) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
|||||||
link := model.Link{
|
link := model.Link{
|
||||||
URL: resp.WebContentLink,
|
URL: resp.WebContentLink,
|
||||||
}
|
}
|
||||||
if len(resp.Medias) > 0 && resp.Medias[0].Link.Url != "" {
|
if !d.DisableMediaLink && len(resp.Medias) > 0 && resp.Medias[0].Link.Url != "" {
|
||||||
log.Debugln("use media link")
|
log.Debugln("use media link")
|
||||||
link.URL = resp.Medias[0].Link.Url
|
link.URL = resp.Medias[0].Link.Url
|
||||||
}
|
}
|
||||||
@ -135,9 +132,8 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
_ = tempFile.Close()
|
_ = tempFile.Close()
|
||||||
_ = os.Remove(tempFile.Name())
|
_ = os.Remove(tempFile.Name())
|
||||||
}()
|
}()
|
||||||
// cal sha1
|
// cal gcid
|
||||||
s := sha1.New()
|
sha1Str, err := getGcid(tempFile, stream.GetSize())
|
||||||
_, err = io.Copy(s, tempFile)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -145,37 +141,33 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
sha1Str := hex.EncodeToString(s.Sum(nil))
|
var resp UploadTaskData
|
||||||
data := base.Json{
|
|
||||||
"kind": "drive#file",
|
|
||||||
"name": stream.GetName(),
|
|
||||||
"size": stream.GetSize(),
|
|
||||||
"hash": strings.ToUpper(sha1Str),
|
|
||||||
"upload_type": "UPLOAD_TYPE_RESUMABLE",
|
|
||||||
"objProvider": base.Json{"provider": "UPLOAD_TYPE_UNKNOWN"},
|
|
||||||
"parent_id": dstDir.GetID(),
|
|
||||||
}
|
|
||||||
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetBody(data)
|
req.SetBody(base.Json{
|
||||||
}, nil)
|
"kind": "drive#file",
|
||||||
|
"name": stream.GetName(),
|
||||||
|
"size": stream.GetSize(),
|
||||||
|
"hash": strings.ToUpper(sha1Str),
|
||||||
|
"upload_type": "UPLOAD_TYPE_RESUMABLE",
|
||||||
|
"objProvider": base.Json{"provider": "UPLOAD_TYPE_UNKNOWN"},
|
||||||
|
"parent_id": dstDir.GetID(),
|
||||||
|
"folder_type": "NORMAL",
|
||||||
|
})
|
||||||
|
}, &resp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if stream.GetSize() == 0 {
|
|
||||||
|
// 秒传成功
|
||||||
|
if resp.Resumable == nil {
|
||||||
log.Debugln(string(res))
|
log.Debugln(string(res))
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
params := jsoniter.Get(res, "resumable").Get("params")
|
|
||||||
endpoint := params.Get("endpoint").ToString()
|
params := resp.Resumable.Params
|
||||||
endpointS := strings.Split(endpoint, ".")
|
endpoint := strings.Join(strings.Split(params.Endpoint, ".")[1:], ".")
|
||||||
endpoint = strings.Join(endpointS[1:], ".")
|
|
||||||
accessKeyId := params.Get("access_key_id").ToString()
|
|
||||||
accessKeySecret := params.Get("access_key_secret").ToString()
|
|
||||||
securityToken := params.Get("security_token").ToString()
|
|
||||||
key := params.Get("key").ToString()
|
|
||||||
bucket := params.Get("bucket").ToString()
|
|
||||||
cfg := &aws.Config{
|
cfg := &aws.Config{
|
||||||
Credentials: credentials.NewStaticCredentials(accessKeyId, accessKeySecret, securityToken),
|
Credentials: credentials.NewStaticCredentials(params.AccessKeyID, params.AccessKeySecret, params.SecurityToken),
|
||||||
Region: aws.String("pikpak"),
|
Region: aws.String("pikpak"),
|
||||||
Endpoint: &endpoint,
|
Endpoint: &endpoint,
|
||||||
}
|
}
|
||||||
@ -185,8 +177,8 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
|||||||
}
|
}
|
||||||
uploader := s3manager.NewUploader(ss)
|
uploader := s3manager.NewUploader(ss)
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &bucket,
|
Bucket: ¶ms.Bucket,
|
||||||
Key: &key,
|
Key: ¶ms.Key,
|
||||||
Body: tempFile,
|
Body: tempFile,
|
||||||
}
|
}
|
||||||
_, err = uploader.UploadWithContext(ctx, input)
|
_, err = uploader.UploadWithContext(ctx, input)
|
||||||
|
@ -7,8 +7,9 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootID
|
driver.RootID
|
||||||
Username string `json:"username" required:"true"`
|
Username string `json:"username" required:"true"`
|
||||||
Password string `json:"password" required:"true"`
|
Password string `json:"password" required:"true"`
|
||||||
|
DisableMediaLink bool `json:"disable_media_link"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -73,3 +73,23 @@ type Media struct {
|
|||||||
IsVisible bool `json:"is_visible"`
|
IsVisible bool `json:"is_visible"`
|
||||||
Category string `json:"category"`
|
Category string `json:"category"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type UploadTaskData struct {
|
||||||
|
UploadType string `json:"upload_type"`
|
||||||
|
//UPLOAD_TYPE_RESUMABLE
|
||||||
|
Resumable *struct {
|
||||||
|
Kind string `json:"kind"`
|
||||||
|
Params struct {
|
||||||
|
AccessKeyID string `json:"access_key_id"`
|
||||||
|
AccessKeySecret string `json:"access_key_secret"`
|
||||||
|
Bucket string `json:"bucket"`
|
||||||
|
Endpoint string `json:"endpoint"`
|
||||||
|
Expiration time.Time `json:"expiration"`
|
||||||
|
Key string `json:"key"`
|
||||||
|
SecurityToken string `json:"security_token"`
|
||||||
|
} `json:"params"`
|
||||||
|
Provider string `json:"provider"`
|
||||||
|
} `json:"resumable"`
|
||||||
|
|
||||||
|
File File `json:"file"`
|
||||||
|
}
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
package pikpak
|
package pikpak
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/hex"
|
||||||
"errors"
|
"errors"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
@ -123,3 +126,28 @@ func (d *PikPak) getFiles(id string) ([]File, error) {
|
|||||||
}
|
}
|
||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getGcid(r io.Reader, size int64) (string, error) {
|
||||||
|
calcBlockSize := func(j int64) int64 {
|
||||||
|
var psize int64 = 0x40000
|
||||||
|
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
||||||
|
psize = psize << 1
|
||||||
|
}
|
||||||
|
return psize
|
||||||
|
}
|
||||||
|
|
||||||
|
hash1 := sha1.New()
|
||||||
|
hash2 := sha1.New()
|
||||||
|
readSize := calcBlockSize(size)
|
||||||
|
for {
|
||||||
|
hash2.Reset()
|
||||||
|
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
||||||
|
if err != io.EOF {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
hash1.Write(hash2.Sum(nil))
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(hash1.Sum(nil)), nil
|
||||||
|
}
|
||||||
|
@ -1,26 +0,0 @@
|
|||||||
package quark
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/alist-org/alist/v3/internal/driver"
|
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
Cookie string `json:"cookie" required:"true"`
|
|
||||||
driver.RootID
|
|
||||||
OrderBy string `json:"order_by" type:"select" options:"none,file_type,file_name,updated_at" default:"none"`
|
|
||||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "Quark",
|
|
||||||
OnlyLocal: true,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
NoOverwriteUpload: true,
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &Quark{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -22,29 +22,31 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Quark struct {
|
type QuarkOrUC struct {
|
||||||
model.Storage
|
model.Storage
|
||||||
Addition
|
Addition
|
||||||
|
config driver.Config
|
||||||
|
conf Conf
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Config() driver.Config {
|
func (d *QuarkOrUC) Config() driver.Config {
|
||||||
return config
|
return d.config
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) GetAddition() driver.Additional {
|
func (d *QuarkOrUC) GetAddition() driver.Additional {
|
||||||
return &d.Addition
|
return &d.Addition
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Init(ctx context.Context) error {
|
func (d *QuarkOrUC) Init(ctx context.Context) error {
|
||||||
_, err := d.request("/config", http.MethodGet, nil, nil)
|
_, err := d.request("/config", http.MethodGet, nil, nil)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Drop(ctx context.Context) error {
|
func (d *QuarkOrUC) Drop(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *QuarkOrUC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
files, err := d.GetFiles(dir.GetID())
|
files, err := d.GetFiles(dir.GetID())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -54,12 +56,12 @@ func (d *Quark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *QuarkOrUC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"fids": []string{file.GetID()},
|
"fids": []string{file.GetID()},
|
||||||
}
|
}
|
||||||
var resp DownResp
|
var resp DownResp
|
||||||
ua := "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch"
|
ua := d.conf.ua
|
||||||
_, err := d.request("/file/download", http.MethodPost, func(req *resty.Request) {
|
_, err := d.request("/file/download", http.MethodPost, func(req *resty.Request) {
|
||||||
req.SetHeader("User-Agent", ua).
|
req.SetHeader("User-Agent", ua).
|
||||||
SetBody(data)
|
SetBody(data)
|
||||||
@ -69,61 +71,63 @@ func (d *Quark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
|||||||
}
|
}
|
||||||
u := resp.Data[0].DownloadUrl
|
u := resp.Data[0].DownloadUrl
|
||||||
start, end := int64(0), file.GetSize()
|
start, end := int64(0), file.GetSize()
|
||||||
return &model.Link{
|
link := model.Link{
|
||||||
Handle: func(w http.ResponseWriter, r *http.Request) error {
|
Header: http.Header{},
|
||||||
if rg := r.Header.Get("Range"); rg != "" {
|
}
|
||||||
parseRange, err := http_range.ParseRange(rg, file.GetSize())
|
if rg := args.Header.Get("Range"); rg != "" {
|
||||||
if err != nil {
|
parseRange, err := http_range.ParseRange(rg, file.GetSize())
|
||||||
return err
|
if err != nil {
|
||||||
}
|
return nil, err
|
||||||
start, end = parseRange[0].Start, parseRange[0].Start+parseRange[0].Length
|
}
|
||||||
w.Header().Set("Content-Range", parseRange[0].ContentRange(file.GetSize()))
|
start, end = parseRange[0].Start, parseRange[0].Start+parseRange[0].Length
|
||||||
w.Header().Set("Content-Length", strconv.FormatInt(parseRange[0].Length, 10))
|
link.Header.Set("Content-Range", parseRange[0].ContentRange(file.GetSize()))
|
||||||
w.WriteHeader(http.StatusPartialContent)
|
link.Header.Set("Content-Length", strconv.FormatInt(parseRange[0].Length, 10))
|
||||||
} else {
|
link.Status = http.StatusPartialContent
|
||||||
w.Header().Set("Content-Length", strconv.FormatInt(file.GetSize(), 10))
|
} else {
|
||||||
w.WriteHeader(http.StatusOK)
|
link.Header.Set("Content-Length", strconv.FormatInt(file.GetSize(), 10))
|
||||||
|
link.Status = http.StatusOK
|
||||||
|
}
|
||||||
|
link.Writer = func(w io.Writer) error {
|
||||||
|
// request 10 MB at a time
|
||||||
|
chunkSize := int64(10 * 1024 * 1024)
|
||||||
|
for start < end {
|
||||||
|
_end := start + chunkSize
|
||||||
|
if _end > end {
|
||||||
|
_end = end
|
||||||
}
|
}
|
||||||
// request 10 MB at a time
|
_range := "bytes=" + strconv.FormatInt(start, 10) + "-" + strconv.FormatInt(_end-1, 10)
|
||||||
chunkSize := int64(10 * 1024 * 1024)
|
start = _end
|
||||||
for start < end {
|
err = func() error {
|
||||||
_end := start + chunkSize
|
req, err := http.NewRequest(http.MethodGet, u, nil)
|
||||||
if _end > end {
|
|
||||||
_end = end
|
|
||||||
}
|
|
||||||
_range := "bytes=" + strconv.FormatInt(start, 10) + "-" + strconv.FormatInt(_end-1, 10)
|
|
||||||
start = _end
|
|
||||||
err = func() error {
|
|
||||||
req, err := http.NewRequest(r.Method, u, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
req.Header.Set("Range", _range)
|
|
||||||
req.Header.Set("User-Agent", ua)
|
|
||||||
req.Header.Set("Cookie", d.Cookie)
|
|
||||||
req.Header.Set("Referer", "https://pan.quark.cn")
|
|
||||||
resp, err := base.HttpClient.Do(req)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer resp.Body.Close()
|
|
||||||
if resp.StatusCode != http.StatusPartialContent {
|
|
||||||
return fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
|
||||||
}
|
|
||||||
_, err = io.Copy(w, resp.Body)
|
|
||||||
return err
|
|
||||||
}()
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
req.Header.Set("Range", _range)
|
||||||
|
req.Header.Set("User-Agent", ua)
|
||||||
|
req.Header.Set("Cookie", d.Cookie)
|
||||||
|
req.Header.Set("Referer", d.conf.referer)
|
||||||
|
resp, err := base.HttpClient.Do(req)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode != http.StatusPartialContent {
|
||||||
|
return fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||||
|
}
|
||||||
|
_, err = io.Copy(w, resp.Body)
|
||||||
|
return err
|
||||||
|
}()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
},
|
}
|
||||||
}, nil
|
return &link, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
func (d *QuarkOrUC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"dir_init_lock": false,
|
"dir_init_lock": false,
|
||||||
"dir_path": "",
|
"dir_path": "",
|
||||||
@ -139,7 +143,7 @@ func (d *Quark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *QuarkOrUC) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"action_type": 1,
|
"action_type": 1,
|
||||||
"exclude_fids": []string{},
|
"exclude_fids": []string{},
|
||||||
@ -152,7 +156,7 @@ func (d *Quark) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
func (d *QuarkOrUC) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"fid": srcObj.GetID(),
|
"fid": srcObj.GetID(),
|
||||||
"file_name": newName,
|
"file_name": newName,
|
||||||
@ -163,11 +167,11 @@ func (d *Quark) Rename(ctx context.Context, srcObj model.Obj, newName string) er
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
func (d *QuarkOrUC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
return errs.NotSupport
|
return errs.NotSupport
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Remove(ctx context.Context, obj model.Obj) error {
|
func (d *QuarkOrUC) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"action_type": 1,
|
"action_type": 1,
|
||||||
"exclude_fids": []string{},
|
"exclude_fids": []string{},
|
||||||
@ -179,7 +183,7 @@ func (d *Quark) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (d *QuarkOrUC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -264,4 +268,4 @@ func (d *Quark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
|||||||
return d.upFinish(pre)
|
return d.upFinish(pre)
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ driver.Driver = (*Quark)(nil)
|
var _ driver.Driver = (*QuarkOrUC)(nil)
|
55
drivers/quark_uc/meta.go
Normal file
55
drivers/quark_uc/meta.go
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
package quark
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
Cookie string `json:"cookie" required:"true"`
|
||||||
|
driver.RootID
|
||||||
|
OrderBy string `json:"order_by" type:"select" options:"none,file_type,file_name,updated_at" default:"none"`
|
||||||
|
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Conf struct {
|
||||||
|
ua string
|
||||||
|
referer string
|
||||||
|
api string
|
||||||
|
pr string
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &QuarkOrUC{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "Quark",
|
||||||
|
OnlyLocal: true,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
},
|
||||||
|
conf: Conf{
|
||||||
|
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
|
||||||
|
referer: "https://pan.quark.cn",
|
||||||
|
api: "https://drive.quark.cn/1/clouddrive",
|
||||||
|
pr: "ucpro",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &QuarkOrUC{
|
||||||
|
config: driver.Config{
|
||||||
|
Name: "UC",
|
||||||
|
OnlyLocal: true,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
},
|
||||||
|
conf: Conf{
|
||||||
|
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) uc-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
|
||||||
|
referer: "https://drive.uc.cn",
|
||||||
|
api: "https://pc-api.uc.cn/1/clouddrive",
|
||||||
|
pr: "UCBrowser",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
@ -22,15 +22,15 @@ import (
|
|||||||
|
|
||||||
// do others that not defined in Driver interface
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
func (d *Quark) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *QuarkOrUC) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
u := "https://drive.quark.cn/1/clouddrive" + pathname
|
u := d.conf.api + pathname
|
||||||
req := base.RestyClient.R()
|
req := base.RestyClient.R()
|
||||||
req.SetHeaders(map[string]string{
|
req.SetHeaders(map[string]string{
|
||||||
"Cookie": d.Cookie,
|
"Cookie": d.Cookie,
|
||||||
"Accept": "application/json, text/plain, */*",
|
"Accept": "application/json, text/plain, */*",
|
||||||
"Referer": "https://pan.quark.cn/",
|
"Referer": d.conf.referer,
|
||||||
})
|
})
|
||||||
req.SetQueryParam("pr", "ucpro")
|
req.SetQueryParam("pr", d.conf.pr)
|
||||||
req.SetQueryParam("fr", "pc")
|
req.SetQueryParam("fr", "pc")
|
||||||
if callback != nil {
|
if callback != nil {
|
||||||
callback(req)
|
callback(req)
|
||||||
@ -55,7 +55,7 @@ func (d *Quark) request(pathname string, method string, callback base.ReqCallbac
|
|||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) GetFiles(parent string) ([]File, error) {
|
func (d *QuarkOrUC) GetFiles(parent string) ([]File, error) {
|
||||||
files := make([]File, 0)
|
files := make([]File, 0)
|
||||||
page := 1
|
page := 1
|
||||||
size := 100
|
size := 100
|
||||||
@ -85,7 +85,7 @@ func (d *Quark) GetFiles(parent string) ([]File, error) {
|
|||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upPre(file model.FileStreamer, parentId string) (UpPreResp, error) {
|
func (d *QuarkOrUC) upPre(file model.FileStreamer, parentId string) (UpPreResp, error) {
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"ccp_hash_update": true,
|
"ccp_hash_update": true,
|
||||||
@ -105,7 +105,7 @@ func (d *Quark) upPre(file model.FileStreamer, parentId string) (UpPreResp, erro
|
|||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upHash(md5, sha1, taskId string) (bool, error) {
|
func (d *QuarkOrUC) upHash(md5, sha1, taskId string) (bool, error) {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"md5": md5,
|
"md5": md5,
|
||||||
"sha1": sha1,
|
"sha1": sha1,
|
||||||
@ -119,8 +119,8 @@ func (d *Quark) upHash(md5, sha1, taskId string) (bool, error) {
|
|||||||
return resp.Data.Finish, err
|
return resp.Data.Finish, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upPart(ctx context.Context, pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
|
func (d *QuarkOrUC) upPart(ctx context.Context, pre UpPreResp, mineType string, partNumber int, bytes []byte) (string, error) {
|
||||||
//func (driver Quark) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
|
//func (driver QuarkOrUC) UpPart(pre UpPreResp, mineType string, partNumber int, bytes []byte, account *model.Account, md5Str, sha1Str string) (string, error) {
|
||||||
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"auth_info": pre.Data.AuthInfo,
|
"auth_info": pre.Data.AuthInfo,
|
||||||
@ -169,7 +169,7 @@ x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
|
|||||||
return res.Header().Get("ETag"), nil
|
return res.Header().Get("ETag"), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upCommit(pre UpPreResp, md5s []string) error {
|
func (d *QuarkOrUC) upCommit(pre UpPreResp, md5s []string) error {
|
||||||
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
timeStr := time.Now().UTC().Format(http.TimeFormat)
|
||||||
log.Debugf("md5s: %+v", md5s)
|
log.Debugf("md5s: %+v", md5s)
|
||||||
bodyBuilder := strings.Builder{}
|
bodyBuilder := strings.Builder{}
|
||||||
@ -236,7 +236,7 @@ x-oss-user-agent:aliyun-sdk-js/6.6.1 Chrome 98.0.4758.80 on Windows 10 64-bit
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *Quark) upFinish(pre UpPreResp) error {
|
func (d *QuarkOrUC) upFinish(pre UpPreResp) error {
|
||||||
data := base.Json{
|
data := base.Json{
|
||||||
"obj_key": pre.Data.ObjKey,
|
"obj_key": pre.Data.ObjKey,
|
||||||
"task_id": pre.Data.TaskId,
|
"task_id": pre.Data.TaskId,
|
@ -53,15 +53,18 @@ func (d *S3) Drop(ctx context.Context) error {
|
|||||||
|
|
||||||
func (d *S3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *S3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
if d.ListObjectVersion == "v2" {
|
if d.ListObjectVersion == "v2" {
|
||||||
return d.listV2(dir.GetPath())
|
return d.listV2(dir.GetPath(), args)
|
||||||
}
|
}
|
||||||
return d.listV1(dir.GetPath())
|
return d.listV1(dir.GetPath(), args)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
path := getKey(file.GetPath(), false)
|
path := getKey(file.GetPath(), false)
|
||||||
filename := stdpath.Base(path)
|
filename := stdpath.Base(path)
|
||||||
disposition := fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, filename, url.PathEscape(filename))
|
disposition := fmt.Sprintf(`attachment; filename*=UTF-8''%s`, url.PathEscape(filename))
|
||||||
|
if d.AddFilenameToDisposition {
|
||||||
|
disposition = fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, filename, url.PathEscape(filename))
|
||||||
|
}
|
||||||
input := &s3.GetObjectInput{
|
input := &s3.GetObjectInput{
|
||||||
Bucket: &d.Bucket,
|
Bucket: &d.Bucket,
|
||||||
Key: &path,
|
Key: &path,
|
||||||
@ -136,11 +139,13 @@ func (d *S3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreame
|
|||||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||||
}
|
}
|
||||||
key := getKey(stdpath.Join(dstDir.GetPath(), stream.GetName()), false)
|
key := getKey(stdpath.Join(dstDir.GetPath(), stream.GetName()), false)
|
||||||
|
contentType := stream.GetMimetype()
|
||||||
log.Debugln("key:", key)
|
log.Debugln("key:", key)
|
||||||
input := &s3manager.UploadInput{
|
input := &s3manager.UploadInput{
|
||||||
Bucket: &d.Bucket,
|
Bucket: &d.Bucket,
|
||||||
Key: &key,
|
Key: &key,
|
||||||
Body: stream,
|
Body: stream,
|
||||||
|
ContentType: &contentType,
|
||||||
}
|
}
|
||||||
_, err := uploader.UploadWithContext(ctx, input)
|
_, err := uploader.UploadWithContext(ctx, input)
|
||||||
return err
|
return err
|
||||||
|
@ -7,17 +7,19 @@ import (
|
|||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
Bucket string `json:"bucket" required:"true"`
|
Bucket string `json:"bucket" required:"true"`
|
||||||
Endpoint string `json:"endpoint" required:"true"`
|
Endpoint string `json:"endpoint" required:"true"`
|
||||||
Region string `json:"region"`
|
Region string `json:"region"`
|
||||||
AccessKeyID string `json:"access_key_id" required:"true"`
|
AccessKeyID string `json:"access_key_id" required:"true"`
|
||||||
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
SecretAccessKey string `json:"secret_access_key" required:"true"`
|
||||||
CustomHost string `json:"custom_host"`
|
SessionToken string `json:"session_token"`
|
||||||
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
CustomHost string `json:"custom_host"`
|
||||||
Placeholder string `json:"placeholder"`
|
SignURLExpire int `json:"sign_url_expire" type:"number" default:"4"`
|
||||||
ForcePathStyle bool `json:"force_path_style"`
|
Placeholder string `json:"placeholder"`
|
||||||
ListObjectVersion string `json:"list_object_version" type:"select" options:"v1,v2" default:"v1"`
|
ForcePathStyle bool `json:"force_path_style"`
|
||||||
RemoveBucket bool `json:"remove_bucket" help:"Remove bucket name from path when using custom host."`
|
ListObjectVersion string `json:"list_object_version" type:"select" options:"v1,v2" default:"v1"`
|
||||||
|
RemoveBucket bool `json:"remove_bucket" help:"Remove bucket name from path when using custom host."`
|
||||||
|
AddFilenameToDisposition bool `json:"add_filename_to_disposition" help:"Add filename to Content-Disposition header."`
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -22,7 +22,7 @@ import (
|
|||||||
|
|
||||||
func (d *S3) initSession() error {
|
func (d *S3) initSession() error {
|
||||||
cfg := &aws.Config{
|
cfg := &aws.Config{
|
||||||
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, ""),
|
Credentials: credentials.NewStaticCredentials(d.AccessKeyID, d.SecretAccessKey, d.SessionToken),
|
||||||
Region: &d.Region,
|
Region: &d.Region,
|
||||||
Endpoint: &d.Endpoint,
|
Endpoint: &d.Endpoint,
|
||||||
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
S3ForcePathStyle: aws.Bool(d.ForcePathStyle),
|
||||||
@ -69,7 +69,7 @@ func getPlaceholderName(placeholder string) string {
|
|||||||
return placeholder
|
return placeholder
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
func (d *S3) listV1(prefix string, args model.ListArgs) ([]model.Obj, error) {
|
||||||
prefix = getKey(prefix, true)
|
prefix = getKey(prefix, true)
|
||||||
log.Debugf("list: %s", prefix)
|
log.Debugf("list: %s", prefix)
|
||||||
files := make([]model.Obj, 0)
|
files := make([]model.Obj, 0)
|
||||||
@ -97,7 +97,7 @@ func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
|||||||
}
|
}
|
||||||
for _, object := range listObjectsResult.Contents {
|
for _, object := range listObjectsResult.Contents {
|
||||||
name := path.Base(*object.Key)
|
name := path.Base(*object.Key)
|
||||||
if name == getPlaceholderName(d.Placeholder) || name == d.Placeholder {
|
if !args.S3ShowPlaceholder && (name == getPlaceholderName(d.Placeholder) || name == d.Placeholder) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
file := model.Object{
|
file := model.Object{
|
||||||
@ -120,7 +120,7 @@ func (d *S3) listV1(prefix string) ([]model.Obj, error) {
|
|||||||
return files, nil
|
return files, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) listV2(prefix string) ([]model.Obj, error) {
|
func (d *S3) listV2(prefix string, args model.ListArgs) ([]model.Obj, error) {
|
||||||
prefix = getKey(prefix, true)
|
prefix = getKey(prefix, true)
|
||||||
files := make([]model.Obj, 0)
|
files := make([]model.Obj, 0)
|
||||||
var continuationToken, startAfter *string
|
var continuationToken, startAfter *string
|
||||||
@ -152,7 +152,7 @@ func (d *S3) listV2(prefix string) ([]model.Obj, error) {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
name := path.Base(*object.Key)
|
name := path.Base(*object.Key)
|
||||||
if name == getPlaceholderName(d.Placeholder) || name == d.Placeholder {
|
if !args.S3ShowPlaceholder && (name == getPlaceholderName(d.Placeholder) || name == d.Placeholder) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
file := model.Object{
|
file := model.Object{
|
||||||
@ -198,7 +198,7 @@ func (d *S3) copyFile(ctx context.Context, src string, dst string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *S3) copyDir(ctx context.Context, src string, dst string) error {
|
func (d *S3) copyDir(ctx context.Context, src string, dst string) error {
|
||||||
objs, err := op.List(ctx, d, src, model.ListArgs{})
|
objs, err := op.List(ctx, d, src, model.ListArgs{S3ShowPlaceholder: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -36,11 +36,14 @@ func (d *Seafile) request(method string, pathname string, callback base.ReqCallb
|
|||||||
if len(noRedirect) > 0 && noRedirect[0] {
|
if len(noRedirect) > 0 && noRedirect[0] {
|
||||||
req = base.NoRedirectClient.R()
|
req = base.NoRedirectClient.R()
|
||||||
}
|
}
|
||||||
var res resty.Response
|
req.SetHeader("Authorization", d.authorization)
|
||||||
|
callback(req)
|
||||||
|
var (
|
||||||
|
res *resty.Response
|
||||||
|
err error
|
||||||
|
)
|
||||||
for i := 0; i < 2; i++ {
|
for i := 0; i < 2; i++ {
|
||||||
req.SetHeader("Authorization", d.authorization)
|
res, err = req.Execute(method, full)
|
||||||
callback(req)
|
|
||||||
res, err := req.Execute(method, full)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -11,6 +11,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/pkg/sftp"
|
"github.com/pkg/sftp"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SFTP struct {
|
type SFTP struct {
|
||||||
@ -39,13 +40,15 @@ func (d *SFTP) Drop(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
func (d *SFTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
log.Debugf("[sftp] list dir: %s", dir.GetPath())
|
||||||
files, err := d.client.ReadDir(dir.GetPath())
|
files, err := d.client.ReadDir(dir.GetPath())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
return utils.SliceConvert(files, func(src os.FileInfo) (model.Obj, error) {
|
objs, err := utils.SliceConvert(files, func(src os.FileInfo) (model.Obj, error) {
|
||||||
return fileToObj(src), nil
|
return d.fileToObj(src, dir.GetPath())
|
||||||
})
|
})
|
||||||
|
return objs, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
@ -2,15 +2,44 @@ package sftp
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
stdpath "path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
func fileToObj(f os.FileInfo) model.Obj {
|
func (d *SFTP) fileToObj(f os.FileInfo, dir string) (model.Obj, error) {
|
||||||
return &model.Object{
|
symlink := f.Mode()&os.ModeSymlink != 0
|
||||||
Name: f.Name(),
|
if !symlink {
|
||||||
Size: f.Size(),
|
return &model.Object{
|
||||||
Modified: f.ModTime(),
|
Name: f.Name(),
|
||||||
IsFolder: f.IsDir(),
|
Size: f.Size(),
|
||||||
|
Modified: f.ModTime(),
|
||||||
|
IsFolder: f.IsDir(),
|
||||||
|
}, nil
|
||||||
}
|
}
|
||||||
|
path := stdpath.Join(dir, f.Name())
|
||||||
|
// set target path
|
||||||
|
target, err := d.client.ReadLink(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if !strings.HasPrefix(target, "/") {
|
||||||
|
target = stdpath.Join(dir, target)
|
||||||
|
}
|
||||||
|
_f, err := d.client.Stat(target)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// set basic info
|
||||||
|
obj := &model.Object{
|
||||||
|
Name: f.Name(),
|
||||||
|
Size: _f.Size(),
|
||||||
|
Modified: _f.ModTime(),
|
||||||
|
IsFolder: _f.IsDir(),
|
||||||
|
Path: target,
|
||||||
|
}
|
||||||
|
log.Debugf("[sftp] obj: %+v, is symlink: %v", obj, symlink)
|
||||||
|
return obj, nil
|
||||||
}
|
}
|
||||||
|
@ -3,15 +3,16 @@ package terbox
|
|||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (d *Terabox) request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
func (d *Terabox) request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||||
@ -139,6 +140,11 @@ func (d *Terabox) linkOfficial(file model.Obj, args model.LinkArgs) (*model.Link
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(resp.Dlink) == 0 {
|
||||||
|
return nil, fmt.Errorf("fid %s no dlink found, errno: %d", file.GetID(), resp.Errno)
|
||||||
|
}
|
||||||
|
|
||||||
res, err := base.NoRedirectClient.R().SetHeader("Cookie", d.Cookie).SetHeader("User-Agent", base.UserAgent).Get(resp.Dlink[0].Dlink)
|
res, err := base.NoRedirectClient.R().SetHeader("Cookie", d.Cookie).SetHeader("User-Agent", base.UserAgent).Get(resp.Dlink[0].Dlink)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -3,7 +3,9 @@ package thunder
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
@ -54,7 +56,7 @@ func (x *Thunder) Init(ctx context.Context) (err error) {
|
|||||||
"j",
|
"j",
|
||||||
"4scKJNdd7F27Hv7tbt",
|
"4scKJNdd7F27Hv7tbt",
|
||||||
},
|
},
|
||||||
DeviceID: utils.GetMD5Encode(x.Username + x.Password),
|
DeviceID: utils.GetMD5EncodeStr(x.Username + x.Password),
|
||||||
ClientID: "Xp6vsxz_7IYVw2BB",
|
ClientID: "Xp6vsxz_7IYVw2BB",
|
||||||
ClientSecret: "Xp6vsy4tN9toTVdMSpomVdXpRmES",
|
ClientSecret: "Xp6vsy4tN9toTVdMSpomVdXpRmES",
|
||||||
ClientVersion: "7.51.0.8196",
|
ClientVersion: "7.51.0.8196",
|
||||||
@ -135,7 +137,7 @@ func (x *ThunderExpert) Init(ctx context.Context) (err error) {
|
|||||||
|
|
||||||
DeviceID: func() string {
|
DeviceID: func() string {
|
||||||
if len(x.DeviceID) != 32 {
|
if len(x.DeviceID) != 32 {
|
||||||
return utils.GetMD5Encode(x.DeviceID)
|
return utils.GetMD5EncodeStr(x.DeviceID)
|
||||||
}
|
}
|
||||||
return x.DeviceID
|
return x.DeviceID
|
||||||
}(),
|
}(),
|
||||||
@ -331,15 +333,32 @@ func (xc *XunLeiCommon) Remove(ctx context.Context, obj model.Obj) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
tempFile, err := utils.CreateTempFile(stream.GetReadCloser())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
_ = tempFile.Close()
|
||||||
|
_ = os.Remove(tempFile.Name())
|
||||||
|
}()
|
||||||
|
|
||||||
|
gcid, err := getGcid(tempFile, stream.GetSize())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, err := tempFile.Seek(0, io.SeekStart); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
var resp UploadTaskResponse
|
var resp UploadTaskResponse
|
||||||
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
_, err = xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
|
||||||
r.SetContext(ctx)
|
r.SetContext(ctx)
|
||||||
r.SetBody(&base.Json{
|
r.SetBody(&base.Json{
|
||||||
"kind": FILE,
|
"kind": FILE,
|
||||||
"parent_id": dstDir.GetID(),
|
"parent_id": dstDir.GetID(),
|
||||||
"name": stream.GetName(),
|
"name": stream.GetName(),
|
||||||
"size": stream.GetSize(),
|
"size": stream.GetSize(),
|
||||||
"hash": "1CF254FBC456E1B012CD45C546636AA62CF8350E",
|
"hash": gcid,
|
||||||
"upload_type": UPLOAD_TYPE_RESUMABLE,
|
"upload_type": UPLOAD_TYPE_RESUMABLE,
|
||||||
})
|
})
|
||||||
}, &resp)
|
}, &resp)
|
||||||
@ -362,7 +381,7 @@ func (xc *XunLeiCommon) Put(ctx context.Context, dstDir model.Obj, stream model.
|
|||||||
Bucket: aws.String(param.Bucket),
|
Bucket: aws.String(param.Bucket),
|
||||||
Key: aws.String(param.Key),
|
Key: aws.String(param.Key),
|
||||||
Expires: aws.Time(param.Expiration),
|
Expires: aws.Time(param.Expiration),
|
||||||
Body: stream,
|
Body: tempFile,
|
||||||
})
|
})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -78,7 +78,7 @@ type Addition struct {
|
|||||||
|
|
||||||
// 登录特征,用于判断是否重新登录
|
// 登录特征,用于判断是否重新登录
|
||||||
func (i *Addition) GetIdentity() string {
|
func (i *Addition) GetIdentity() string {
|
||||||
return utils.GetMD5Encode(i.Username + i.Password)
|
return utils.GetMD5EncodeStr(i.Username + i.Password)
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = driver.Config{
|
var config = driver.Config{
|
||||||
|
@ -1,7 +1,10 @@
|
|||||||
package thunder
|
package thunder
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/sha1"
|
||||||
|
"encoding/hex"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"time"
|
"time"
|
||||||
@ -97,7 +100,7 @@ func (c *Common) GetCaptchaSign() (timestamp, sign string) {
|
|||||||
timestamp = fmt.Sprint(time.Now().UnixMilli())
|
timestamp = fmt.Sprint(time.Now().UnixMilli())
|
||||||
str := fmt.Sprint(c.ClientID, c.ClientVersion, c.PackageName, c.DeviceID, timestamp)
|
str := fmt.Sprint(c.ClientID, c.ClientVersion, c.PackageName, c.DeviceID, timestamp)
|
||||||
for _, algorithm := range c.Algorithms {
|
for _, algorithm := range c.Algorithms {
|
||||||
str = utils.GetMD5Encode(str + algorithm)
|
str = utils.GetMD5EncodeStr(str + algorithm)
|
||||||
}
|
}
|
||||||
sign = "1." + str
|
sign = "1." + str
|
||||||
return
|
return
|
||||||
@ -171,3 +174,29 @@ func (c *Common) Request(url, method string, callback base.ReqCallback, resp int
|
|||||||
|
|
||||||
return res.Body(), nil
|
return res.Body(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 计算文件Gcid
|
||||||
|
func getGcid(r io.Reader, size int64) (string, error) {
|
||||||
|
calcBlockSize := func(j int64) int64 {
|
||||||
|
var psize int64 = 0x40000
|
||||||
|
for float64(j)/float64(psize) > 0x200 && psize < 0x200000 {
|
||||||
|
psize = psize << 1
|
||||||
|
}
|
||||||
|
return psize
|
||||||
|
}
|
||||||
|
|
||||||
|
hash1 := sha1.New()
|
||||||
|
hash2 := sha1.New()
|
||||||
|
readSize := calcBlockSize(size)
|
||||||
|
for {
|
||||||
|
hash2.Reset()
|
||||||
|
if n, err := io.CopyN(hash2, r, readSize); err != nil && n == 0 {
|
||||||
|
if err != io.EOF {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
hash1.Write(hash2.Sum(nil))
|
||||||
|
}
|
||||||
|
return hex.EncodeToString(hash1.Sum(nil)), nil
|
||||||
|
}
|
||||||
|
@ -81,7 +81,7 @@ func (d *USS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
|||||||
expireAt := time.Now().Add(downExp).Unix()
|
expireAt := time.Now().Add(downExp).Unix()
|
||||||
upd := url.QueryEscape(path.Base(file.GetPath()))
|
upd := url.QueryEscape(path.Base(file.GetPath()))
|
||||||
signStr := strings.Join([]string{d.OperatorPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
|
signStr := strings.Join([]string{d.OperatorPassword, fmt.Sprint(expireAt), fmt.Sprintf("/%s", key)}, "&")
|
||||||
upt := utils.GetMD5Encode(signStr)[12:20] + fmt.Sprint(expireAt)
|
upt := utils.GetMD5EncodeStr(signStr)[12:20] + fmt.Sprint(expireAt)
|
||||||
link := fmt.Sprintf("%s?_upd=%s&_upt=%s", u, upd, upt)
|
link := fmt.Sprintf("%s?_upd=%s&_upt=%s", u, upd, upt)
|
||||||
return &model.Link{URL: link}, nil
|
return &model.Link{URL: link}, nil
|
||||||
}
|
}
|
||||||
|
161
drivers/wopan/driver.go
Normal file
161
drivers/wopan/driver.go
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/Xhofe/wopan-sdk-go"
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Wopan struct {
|
||||||
|
model.Storage
|
||||||
|
Addition
|
||||||
|
client *wopan.WoClient
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Config() driver.Config {
|
||||||
|
return config
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) GetAddition() driver.Additional {
|
||||||
|
return &d.Addition
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Init(ctx context.Context) error {
|
||||||
|
d.client = wopan.DefaultWithRefreshToken(d.RefreshToken)
|
||||||
|
d.client.SetAccessToken(d.AccessToken)
|
||||||
|
d.client.OnRefreshToken(func(accessToken, refreshToken string) {
|
||||||
|
d.AccessToken = accessToken
|
||||||
|
d.RefreshToken = refreshToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
})
|
||||||
|
return d.client.InitData()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Drop(ctx context.Context) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||||
|
var res []model.Obj
|
||||||
|
pageNum := 0
|
||||||
|
pageSize := 100
|
||||||
|
for {
|
||||||
|
data, err := d.client.QueryAllFiles(d.getSpaceType(), dir.GetID(), pageNum, pageSize, 0, d.FamilyID, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
objs, err := utils.SliceConvert(data.Files, fileToObj)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
res = append(res, objs...)
|
||||||
|
if len(data.Files) < pageSize {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pageNum++
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||||
|
if f, ok := file.(*Object); ok {
|
||||||
|
res, err := d.client.GetDownloadUrlV2([]string{f.FID}, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &model.Link{
|
||||||
|
URL: res.List[0].DownloadUrl,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("unable to convert file to Object")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||||
|
_, err := d.client.CreateDirectory(d.getSpaceType(), parentDir.GetID(), dirName, d.FamilyID, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
dirList := make([]string, 0)
|
||||||
|
fileList := make([]string, 0)
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
dirList = append(dirList, srcObj.GetID())
|
||||||
|
} else {
|
||||||
|
fileList = append(fileList, srcObj.GetID())
|
||||||
|
}
|
||||||
|
return d.client.MoveFile(dirList, fileList, dstDir.GetID(),
|
||||||
|
d.getSpaceType(), d.getSpaceType(),
|
||||||
|
d.FamilyID, d.FamilyID, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||||
|
_type := 1
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
_type = 0
|
||||||
|
}
|
||||||
|
return d.client.RenameFileOrDirectory(d.getSpaceType(), _type, srcObj.GetID(), newName, d.FamilyID, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||||
|
dirList := make([]string, 0)
|
||||||
|
fileList := make([]string, 0)
|
||||||
|
if srcObj.IsDir() {
|
||||||
|
dirList = append(dirList, srcObj.GetID())
|
||||||
|
} else {
|
||||||
|
fileList = append(fileList, srcObj.GetID())
|
||||||
|
}
|
||||||
|
return d.client.CopyFile(dirList, fileList, dstDir.GetID(),
|
||||||
|
d.getSpaceType(), d.getSpaceType(),
|
||||||
|
d.FamilyID, d.FamilyID, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Remove(ctx context.Context, obj model.Obj) error {
|
||||||
|
dirList := make([]string, 0)
|
||||||
|
fileList := make([]string, 0)
|
||||||
|
if obj.IsDir() {
|
||||||
|
dirList = append(dirList, obj.GetID())
|
||||||
|
} else {
|
||||||
|
fileList = append(fileList, obj.GetID())
|
||||||
|
}
|
||||||
|
return d.client.DeleteFile(d.getSpaceType(), dirList, fileList, func(req *resty.Request) {
|
||||||
|
req.SetContext(ctx)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||||
|
_, err := d.client.Upload2C(d.getSpaceType(), wopan.Upload2CFile{
|
||||||
|
Name: stream.GetName(),
|
||||||
|
Size: stream.GetSize(),
|
||||||
|
Content: stream,
|
||||||
|
ContentType: stream.GetMimetype(),
|
||||||
|
}, dstDir.GetID(), d.FamilyID, wopan.Upload2COption{
|
||||||
|
OnProgress: func(current, total int64) {
|
||||||
|
up(int(100 * current / total))
|
||||||
|
},
|
||||||
|
})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (d *Wopan) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||||
|
// return nil, errs.NotSupport
|
||||||
|
//}
|
||||||
|
|
||||||
|
var _ driver.Driver = (*Wopan)(nil)
|
37
drivers/wopan/meta.go
Normal file
37
drivers/wopan/meta.go
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/alist-org/alist/v3/internal/driver"
|
||||||
|
"github.com/alist-org/alist/v3/internal/op"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Addition struct {
|
||||||
|
// Usually one of two
|
||||||
|
driver.RootID
|
||||||
|
// define other
|
||||||
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
|
FamilyID string `json:"family_id" help:"Keep it empty if you want to use your personal drive"`
|
||||||
|
SortRule string `json:"sort_rule" type:"select" options:"name_asc,name_desc,time_asc,time_desc,size_asc,size_desc" default:"name_asc"`
|
||||||
|
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
}
|
||||||
|
|
||||||
|
var config = driver.Config{
|
||||||
|
Name: "WoPan",
|
||||||
|
LocalSort: false,
|
||||||
|
OnlyLocal: false,
|
||||||
|
OnlyProxy: false,
|
||||||
|
NoCache: false,
|
||||||
|
NoUpload: false,
|
||||||
|
NeedMs: false,
|
||||||
|
DefaultRoot: "0",
|
||||||
|
CheckStatus: false,
|
||||||
|
Alert: "",
|
||||||
|
NoOverwriteUpload: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
op.RegisterDriver(func() driver.Driver {
|
||||||
|
return &Wopan{}
|
||||||
|
})
|
||||||
|
}
|
34
drivers/wopan/types.go
Normal file
34
drivers/wopan/types.go
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Xhofe/wopan-sdk-go"
|
||||||
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Object struct {
|
||||||
|
model.ObjThumb
|
||||||
|
FID string
|
||||||
|
}
|
||||||
|
|
||||||
|
func fileToObj(file wopan.File) (model.Obj, error) {
|
||||||
|
t, err := getTime(file.CreateTime)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &Object{
|
||||||
|
ObjThumb: model.ObjThumb{
|
||||||
|
Object: model.Object{
|
||||||
|
ID: file.Id,
|
||||||
|
//Path: "",
|
||||||
|
Name: file.Name,
|
||||||
|
Size: file.Size,
|
||||||
|
Modified: t,
|
||||||
|
IsFolder: file.Type == 0,
|
||||||
|
},
|
||||||
|
Thumbnail: model.Thumbnail{
|
||||||
|
Thumbnail: file.ThumbUrl,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
FID: file.Fid,
|
||||||
|
}, nil
|
||||||
|
}
|
40
drivers/wopan/util.go
Normal file
40
drivers/wopan/util.go
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
package template
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/Xhofe/wopan-sdk-go"
|
||||||
|
)
|
||||||
|
|
||||||
|
// do others that not defined in Driver interface
|
||||||
|
|
||||||
|
func (d *Wopan) getSortRule() int {
|
||||||
|
switch d.SortRule {
|
||||||
|
case "name_asc":
|
||||||
|
return wopan.SortNameAsc
|
||||||
|
case "name_desc":
|
||||||
|
return wopan.SortNameDesc
|
||||||
|
case "time_asc":
|
||||||
|
return wopan.SortTimeAsc
|
||||||
|
case "time_desc":
|
||||||
|
return wopan.SortTimeDesc
|
||||||
|
case "size_asc":
|
||||||
|
return wopan.SortSizeAsc
|
||||||
|
case "size_desc":
|
||||||
|
return wopan.SortSizeDesc
|
||||||
|
default:
|
||||||
|
return wopan.SortNameAsc
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (d *Wopan) getSpaceType() string {
|
||||||
|
if d.FamilyID == "" {
|
||||||
|
return wopan.SpaceTypePersonal
|
||||||
|
}
|
||||||
|
return wopan.SpaceTypeFamily
|
||||||
|
}
|
||||||
|
|
||||||
|
// 20230607214351
|
||||||
|
func getTime(str string) (time.Time, error) {
|
||||||
|
return time.Parse("20060102150405", str)
|
||||||
|
}
|
72
go.mod
72
go.mod
@ -5,36 +5,41 @@ go 1.20
|
|||||||
require (
|
require (
|
||||||
github.com/SheltonZhu/115driver v1.0.14
|
github.com/SheltonZhu/115driver v1.0.14
|
||||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a
|
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a
|
||||||
|
github.com/Xhofe/wopan-sdk-go v0.1.1
|
||||||
|
github.com/avast/retry-go v3.0.0+incompatible
|
||||||
github.com/aws/aws-sdk-go v1.44.262
|
github.com/aws/aws-sdk-go v1.44.262
|
||||||
github.com/blevesearch/bleve/v2 v2.3.8
|
github.com/blevesearch/bleve/v2 v2.3.9
|
||||||
github.com/caarlos0/env/v7 v7.1.0
|
github.com/caarlos0/env/v9 v9.0.0
|
||||||
|
github.com/coreos/go-oidc v2.2.1+incompatible
|
||||||
github.com/deckarep/golang-set/v2 v2.3.0
|
github.com/deckarep/golang-set/v2 v2.3.0
|
||||||
github.com/disintegration/imaging v1.6.2
|
github.com/disintegration/imaging v1.6.2
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564
|
||||||
|
github.com/foxxorcat/mopan-sdk-go v0.1.1
|
||||||
github.com/gin-contrib/cors v1.4.0
|
github.com/gin-contrib/cors v1.4.0
|
||||||
github.com/gin-gonic/gin v1.9.0
|
github.com/gin-gonic/gin v1.9.1
|
||||||
github.com/go-resty/resty/v2 v2.7.0
|
github.com/go-resty/resty/v2 v2.7.0
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0
|
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||||
github.com/google/uuid v1.3.0
|
github.com/google/uuid v1.3.0
|
||||||
github.com/gorilla/websocket v1.5.0
|
github.com/gorilla/websocket v1.5.0
|
||||||
github.com/hirochachacha/go-smb2 v1.1.0
|
github.com/hirochachacha/go-smb2 v1.1.0
|
||||||
github.com/ipfs/go-ipfs-api v0.6.0
|
github.com/ipfs/go-ipfs-api v0.6.0
|
||||||
github.com/jlaffaye/ftp v0.1.0
|
github.com/jlaffaye/ftp v0.2.0
|
||||||
github.com/json-iterator/go v1.1.12
|
github.com/json-iterator/go v1.1.12
|
||||||
github.com/maruel/natural v1.1.0
|
github.com/maruel/natural v1.1.0
|
||||||
github.com/natefinch/lumberjack v2.0.0+incompatible
|
github.com/natefinch/lumberjack v2.0.0+incompatible
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/pkg/sftp v1.13.5
|
github.com/pkg/sftp v1.13.5
|
||||||
github.com/pquerna/otp v1.4.0
|
github.com/pquerna/otp v1.4.0
|
||||||
github.com/sirupsen/logrus v1.9.0
|
github.com/sirupsen/logrus v1.9.3
|
||||||
github.com/spf13/cobra v1.7.0
|
github.com/spf13/cobra v1.7.0
|
||||||
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca
|
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca
|
||||||
github.com/u2takey/ffmpeg-go v0.4.1
|
github.com/u2takey/ffmpeg-go v0.4.1
|
||||||
github.com/upyun/go-sdk/v3 v3.0.4
|
github.com/upyun/go-sdk/v3 v3.0.4
|
||||||
github.com/winfsp/cgofuse v1.5.0
|
github.com/winfsp/cgofuse v1.5.0
|
||||||
golang.org/x/crypto v0.9.0
|
golang.org/x/crypto v0.11.0
|
||||||
golang.org/x/image v0.7.0
|
golang.org/x/image v0.9.0
|
||||||
golang.org/x/net v0.10.0
|
golang.org/x/net v0.12.0
|
||||||
|
golang.org/x/oauth2 v0.10.0
|
||||||
gorm.io/driver/mysql v1.4.7
|
gorm.io/driver/mysql v1.4.7
|
||||||
gorm.io/driver/postgres v1.4.8
|
gorm.io/driver/postgres v1.4.8
|
||||||
gorm.io/driver/sqlite v1.4.4
|
gorm.io/driver/sqlite v1.4.4
|
||||||
@ -43,7 +48,8 @@ require (
|
|||||||
|
|
||||||
require (
|
require (
|
||||||
github.com/BurntSushi/toml v0.3.1 // indirect
|
github.com/BurntSushi/toml v0.3.1 // indirect
|
||||||
github.com/RoaringBitmap/roaring v0.9.4 // indirect
|
github.com/RoaringBitmap/roaring v1.2.3 // indirect
|
||||||
|
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4 // indirect
|
||||||
github.com/aead/ecdh v0.2.0 // indirect
|
github.com/aead/ecdh v0.2.0 // indirect
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible // indirect
|
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible // indirect
|
||||||
github.com/andreburgaud/crypt2go v1.1.0 // indirect
|
github.com/andreburgaud/crypt2go v1.1.0 // indirect
|
||||||
@ -54,32 +60,33 @@ require (
|
|||||||
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
|
github.com/blevesearch/go-porterstemmer v1.0.3 // indirect
|
||||||
github.com/blevesearch/gtreap v0.1.1 // indirect
|
github.com/blevesearch/gtreap v0.1.1 // indirect
|
||||||
github.com/blevesearch/mmap-go v1.0.4 // indirect
|
github.com/blevesearch/mmap-go v1.0.4 // indirect
|
||||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 // indirect
|
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 // indirect
|
||||||
github.com/blevesearch/segment v0.9.1 // indirect
|
github.com/blevesearch/segment v0.9.1 // indirect
|
||||||
github.com/blevesearch/snowballstem v0.9.0 // indirect
|
github.com/blevesearch/snowballstem v0.9.0 // indirect
|
||||||
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
|
github.com/blevesearch/upsidedown_store_api v1.0.2 // indirect
|
||||||
github.com/blevesearch/vellum v1.0.9 // indirect
|
github.com/blevesearch/vellum v1.0.10 // indirect
|
||||||
github.com/blevesearch/zapx/v11 v11.3.7 // indirect
|
github.com/blevesearch/zapx/v11 v11.3.9 // indirect
|
||||||
github.com/blevesearch/zapx/v12 v12.3.7 // indirect
|
github.com/blevesearch/zapx/v12 v12.3.9 // indirect
|
||||||
github.com/blevesearch/zapx/v13 v13.3.7 // indirect
|
github.com/blevesearch/zapx/v13 v13.3.9 // indirect
|
||||||
github.com/blevesearch/zapx/v14 v14.3.7 // indirect
|
github.com/blevesearch/zapx/v14 v14.3.9 // indirect
|
||||||
github.com/blevesearch/zapx/v15 v15.3.10 // indirect
|
github.com/blevesearch/zapx/v15 v15.3.12 // indirect
|
||||||
github.com/bluele/gcache v0.0.2 // indirect
|
github.com/bluele/gcache v0.0.2 // indirect
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc // indirect
|
||||||
github.com/bytedance/sonic v1.8.0 // indirect
|
github.com/bytedance/sonic v1.9.1 // indirect
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect
|
||||||
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 // indirect
|
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 // indirect
|
||||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
|
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.1.0 // indirect
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||||
github.com/gaoyb7/115drive-webdav v0.1.8 // indirect
|
github.com/gaoyb7/115drive-webdav v0.1.8 // indirect
|
||||||
github.com/geoffgarside/ber v1.1.0 // indirect
|
github.com/geoffgarside/ber v1.1.0 // indirect
|
||||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||||
github.com/go-playground/locales v0.14.1 // indirect
|
github.com/go-playground/locales v0.14.1 // indirect
|
||||||
github.com/go-playground/universal-translator v0.18.1 // indirect
|
github.com/go-playground/universal-translator v0.18.1 // indirect
|
||||||
github.com/go-playground/validator/v10 v10.11.2 // indirect
|
github.com/go-playground/validator/v10 v10.14.0 // indirect
|
||||||
github.com/go-sql-driver/mysql v1.7.0 // indirect
|
github.com/go-sql-driver/mysql v1.7.0 // indirect
|
||||||
github.com/goccy/go-json v0.10.0 // indirect
|
github.com/goccy/go-json v0.10.2 // indirect
|
||||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect
|
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 // indirect
|
||||||
github.com/golang/protobuf v1.5.2 // indirect
|
github.com/golang/protobuf v1.5.3 // indirect
|
||||||
github.com/golang/snappy v0.0.4 // indirect
|
github.com/golang/snappy v0.0.4 // indirect
|
||||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||||
@ -92,13 +99,13 @@ require (
|
|||||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||||
github.com/jinzhu/now v1.1.5 // indirect
|
github.com/jinzhu/now v1.1.5 // indirect
|
||||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||||
github.com/klauspost/cpuid/v2 v2.2.3 // indirect
|
github.com/klauspost/cpuid/v2 v2.2.4 // indirect
|
||||||
github.com/kr/fs v0.1.0 // indirect
|
github.com/kr/fs v0.1.0 // indirect
|
||||||
github.com/leodido/go-urn v1.2.1 // indirect
|
github.com/leodido/go-urn v1.2.4 // indirect
|
||||||
github.com/libp2p/go-buffer-pool v0.1.0 // indirect
|
github.com/libp2p/go-buffer-pool v0.1.0 // indirect
|
||||||
github.com/libp2p/go-flow-metrics v0.1.0 // indirect
|
github.com/libp2p/go-flow-metrics v0.1.0 // indirect
|
||||||
github.com/libp2p/go-libp2p v0.26.3 // indirect
|
github.com/libp2p/go-libp2p v0.26.3 // indirect
|
||||||
github.com/mattn/go-isatty v0.0.17 // indirect
|
github.com/mattn/go-isatty v0.0.19 // indirect
|
||||||
github.com/mattn/go-sqlite3 v1.14.15 // indirect
|
github.com/mattn/go-sqlite3 v1.14.15 // indirect
|
||||||
github.com/minio/sha256-simd v1.0.0 // indirect
|
github.com/minio/sha256-simd v1.0.0 // indirect
|
||||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||||
@ -115,22 +122,25 @@ require (
|
|||||||
github.com/multiformats/go-multistream v0.4.1 // indirect
|
github.com/multiformats/go-multistream v0.4.1 // indirect
|
||||||
github.com/multiformats/go-varint v0.0.7 // indirect
|
github.com/multiformats/go-varint v0.0.7 // indirect
|
||||||
github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77 // indirect
|
github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77 // indirect
|
||||||
github.com/pelletier/go-toml/v2 v2.0.6 // indirect
|
github.com/pelletier/go-toml/v2 v2.0.8 // indirect
|
||||||
github.com/pierrec/lz4/v4 v4.1.17 // indirect
|
github.com/pierrec/lz4/v4 v4.1.17 // indirect
|
||||||
|
github.com/pquerna/cachecontrol v0.1.0 // indirect
|
||||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e // indirect
|
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e // indirect
|
||||||
github.com/spaolacci/murmur3 v1.1.0 // indirect
|
github.com/spaolacci/murmur3 v1.1.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
|
||||||
github.com/u2takey/go-utils v0.3.1 // indirect
|
github.com/u2takey/go-utils v0.3.1 // indirect
|
||||||
github.com/ugorji/go/codec v1.2.9 // indirect
|
github.com/ugorji/go/codec v1.2.11 // indirect
|
||||||
github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c // indirect
|
github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c // indirect
|
||||||
go.etcd.io/bbolt v1.3.5 // indirect
|
go.etcd.io/bbolt v1.3.7 // indirect
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 // indirect
|
golang.org/x/arch v0.3.0 // indirect
|
||||||
golang.org/x/sys v0.8.0 // indirect
|
golang.org/x/sys v0.10.0 // indirect
|
||||||
golang.org/x/text v0.9.0 // indirect
|
golang.org/x/text v0.11.0 // indirect
|
||||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af // indirect
|
golang.org/x/time v0.3.0 // indirect
|
||||||
google.golang.org/protobuf v1.28.1 // indirect
|
google.golang.org/appengine v1.6.7 // indirect
|
||||||
|
google.golang.org/protobuf v1.31.0 // indirect
|
||||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
||||||
|
gopkg.in/square/go-jose.v2 v2.6.0 // indirect
|
||||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||||
lukechampine.com/blake3 v1.1.7 // indirect
|
lukechampine.com/blake3 v1.1.7 // indirect
|
||||||
)
|
)
|
||||||
|
167
go.sum
167
go.sum
@ -1,30 +1,32 @@
|
|||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
github.com/RoaringBitmap/roaring v0.9.4 h1:ckvZSX5gwCRaJYBNe7syNawCU5oruY9gQmjXlp4riwo=
|
github.com/RoaringBitmap/roaring v1.2.3 h1:yqreLINqIrX22ErkKI0vY47/ivtJr6n+kMhVOVmhWBY=
|
||||||
github.com/RoaringBitmap/roaring v0.9.4/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA=
|
github.com/RoaringBitmap/roaring v1.2.3/go.mod h1:plvDsJQpxOC5bw8LRteu/MLWHsHez/3y6cubLI4/1yE=
|
||||||
github.com/SheltonZhu/115driver v1.0.14 h1:uW3dl8J9KDMw+3gPxQdhTysoGhw0/uI1484GT9xhfU4=
|
github.com/SheltonZhu/115driver v1.0.14 h1:uW3dl8J9KDMw+3gPxQdhTysoGhw0/uI1484GT9xhfU4=
|
||||||
github.com/SheltonZhu/115driver v1.0.14/go.mod h1:00ixivHH5HqDj4S7kAWbkuUrjtsJTxc7cGv5RMw3RVs=
|
github.com/SheltonZhu/115driver v1.0.14/go.mod h1:00ixivHH5HqDj4S7kAWbkuUrjtsJTxc7cGv5RMw3RVs=
|
||||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a h1:RenIAa2q4H8UcS/cqmwdT1WCWIAH5aumP8m8RpbqVsE=
|
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a h1:RenIAa2q4H8UcS/cqmwdT1WCWIAH5aumP8m8RpbqVsE=
|
||||||
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a/go.mod h1:sSBbaOg90XwWKtpT56kVujF0bIeVITnPlssLclogS04=
|
github.com/Xhofe/go-cache v0.0.0-20220723083548-714439c8af9a/go.mod h1:sSBbaOg90XwWKtpT56kVujF0bIeVITnPlssLclogS04=
|
||||||
|
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4 h1:WnvifFgYyogPz2ZFvaVLk4gI/Co0paF92FmxSR6U1zY=
|
||||||
|
github.com/Xhofe/rateg v0.0.0-20230728072201-251a4e1adad4/go.mod h1:8pWlL2rpusvx7Xa6yYaIWOJ8bR3gPdFBUT7OystyGOY=
|
||||||
|
github.com/Xhofe/wopan-sdk-go v0.1.1 h1:dSrTxNYclqNuo9libjtC+R6C4RCen/inh/dUXd12vpM=
|
||||||
|
github.com/Xhofe/wopan-sdk-go v0.1.1/go.mod h1:xWcUS7PoFLDD9gy2BK2VQfilEsZngLMz2Vkx3oF2zJY=
|
||||||
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
|
github.com/aead/ecdh v0.2.0 h1:pYop54xVaq/CEREFEcukHRZfTdjiWvYIsZDXXrBapQQ=
|
||||||
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
|
github.com/aead/ecdh v0.2.0/go.mod h1:a9HHtXuSo8J1Js1MwLQx2mBhkXMT6YwUmVVEY4tTB8U=
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible h1:QoRMR0TCctLDqBCMyOu1eXdZyMw3F7uGA9qPn2J4+R8=
|
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible h1:QoRMR0TCctLDqBCMyOu1eXdZyMw3F7uGA9qPn2J4+R8=
|
||||||
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
|
github.com/aliyun/aliyun-oss-go-sdk v2.2.5+incompatible/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8=
|
||||||
github.com/andreburgaud/crypt2go v1.1.0 h1:eitZxTPY1krUsxinsng3Qvt/Ud7q/aQmmYRh8p4hyPw=
|
github.com/andreburgaud/crypt2go v1.1.0 h1:eitZxTPY1krUsxinsng3Qvt/Ud7q/aQmmYRh8p4hyPw=
|
||||||
github.com/andreburgaud/crypt2go v1.1.0/go.mod h1:4qhZPzarj1dCIRmCkpdgCklwp+hBq9yEt0zPe9Ayuhc=
|
github.com/andreburgaud/crypt2go v1.1.0/go.mod h1:4qhZPzarj1dCIRmCkpdgCklwp+hBq9yEt0zPe9Ayuhc=
|
||||||
|
github.com/avast/retry-go v3.0.0+incompatible h1:4SOWQ7Qs+oroOTQOYnAHqelpCO0biHSxpiH9JdtuBj0=
|
||||||
|
github.com/avast/retry-go v3.0.0+incompatible/go.mod h1:XtSnn+n/sHqQIpZ10K1qAevBhOOCWBLXXy3hyiqqBrY=
|
||||||
github.com/aws/aws-sdk-go v1.38.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
github.com/aws/aws-sdk-go v1.38.20/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
|
||||||
github.com/aws/aws-sdk-go v1.44.194 h1:1ZDK+QDcc5oRbZGgRZSz561eR8XVizXCeGpoZKo33NU=
|
|
||||||
github.com/aws/aws-sdk-go v1.44.194/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
|
||||||
github.com/aws/aws-sdk-go v1.44.262 h1:gyXpcJptWoNkK+DiAiaBltlreoWKQXjAIh6FRh60F+I=
|
github.com/aws/aws-sdk-go v1.44.262 h1:gyXpcJptWoNkK+DiAiaBltlreoWKQXjAIh6FRh60F+I=
|
||||||
github.com/aws/aws-sdk-go v1.44.262/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
github.com/aws/aws-sdk-go v1.44.262/go.mod h1:aVsgQcEevwlmQ7qHE9I3h+dtQgpqhFB+i8Phjh7fkwI=
|
||||||
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
|
||||||
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||||
github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA=
|
github.com/bits-and-blooms/bitset v1.2.0 h1:Kn4yilvwNtMACtf1eYDlG8H77R07mZSPbMjLyS07ChA=
|
||||||
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
||||||
github.com/blevesearch/bleve/v2 v2.3.7 h1:nIfIrhv28tvgBpbVF8Dq7/U1zW/YiwSqg/PBgE3x8bo=
|
github.com/blevesearch/bleve/v2 v2.3.9 h1:pUMvK0mxAexqasZcVj8lazmWnEW5XiV0tASIqANiNTQ=
|
||||||
github.com/blevesearch/bleve/v2 v2.3.7/go.mod h1:2tToYD6mDeseIA13jcZiEEqYrVLg6xdk0v6+F7dWquU=
|
github.com/blevesearch/bleve/v2 v2.3.9/go.mod h1:1PibElcjlQMQHF9uS9mRv58ODQgj4pCWHA1Wfd+qagU=
|
||||||
github.com/blevesearch/bleve/v2 v2.3.8 h1:IqFyMJ73n4gY8AmVqM8Sa6EtAZ5beE8yramVqCvs2kQ=
|
|
||||||
github.com/blevesearch/bleve/v2 v2.3.8/go.mod h1:Lh9aZEHrLKxwPnW4z4lsBEGnflZQ1V/aWP/t+htsiDw=
|
|
||||||
github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
|
github.com/blevesearch/bleve_index_api v1.0.5 h1:Lc986kpC4Z0/n1g3gg8ul7H+lxgOQPcXb9SxvQGu+tw=
|
||||||
github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
|
github.com/blevesearch/bleve_index_api v1.0.5/go.mod h1:YXMDwaXFFXwncRS8UobWs7nvo0DmusriM1nztTlj1ms=
|
||||||
github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
|
github.com/blevesearch/geo v0.1.17 h1:AguzI6/5mHXapzB0gE9IKWo+wWPHZmXZoscHcjFgAFA=
|
||||||
@ -35,41 +37,41 @@ github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZG
|
|||||||
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
|
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
|
||||||
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
|
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
|
||||||
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
|
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
|
||||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4 h1:LmGmo5twU3gV+natJbKmOktS9eMhokPGKWuR+jX84vk=
|
github.com/blevesearch/scorch_segment_api/v2 v2.1.5 h1:1g713kpCQZ8u4a3stRGBfrwVOuGRnmxOVU5MQkUPrHU=
|
||||||
github.com/blevesearch/scorch_segment_api/v2 v2.1.4/go.mod h1:PgVnbbg/t1UkgezPDu8EHLi1BHQ17xUwsFdU6NnOYS0=
|
github.com/blevesearch/scorch_segment_api/v2 v2.1.5/go.mod h1:f2nOkKS1HcjgIWZgDAErgBdxmr2eyt0Kn7IY+FU1Xe4=
|
||||||
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
|
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
|
||||||
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
|
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
|
||||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||||
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
|
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
|
||||||
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
|
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
|
||||||
github.com/blevesearch/vellum v1.0.9 h1:PL+NWVk3dDGPCV0hoDu9XLLJgqU4E5s/dOeEJByQ2uQ=
|
github.com/blevesearch/vellum v1.0.10 h1:HGPJDT2bTva12hrHepVT3rOyIKFFF4t7Gf6yMxyMIPI=
|
||||||
github.com/blevesearch/vellum v1.0.9/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
github.com/blevesearch/vellum v1.0.10/go.mod h1:ul1oT0FhSMDIExNjIxHqJoGpVrBpKCdgDQNxfqgJt7k=
|
||||||
github.com/blevesearch/zapx/v11 v11.3.7 h1:Y6yIAF/DVPiqZUA/jNgSLXmqewfzwHzuwfKyfdG+Xaw=
|
github.com/blevesearch/zapx/v11 v11.3.9 h1:y3ijS4h4MJdmQ07MHASxat4owAixreK2xdo76w9ncrw=
|
||||||
github.com/blevesearch/zapx/v11 v11.3.7/go.mod h1:Xk9Z69AoAWIOvWudNDMlxJDqSYGf90LS0EfnaAIvXCA=
|
github.com/blevesearch/zapx/v11 v11.3.9/go.mod h1:jcAYnQwlr+LqD2vLjDWjWiZDXDXGFqPbpPDRTd3XmS4=
|
||||||
github.com/blevesearch/zapx/v12 v12.3.7 h1:DfQ6rsmZfEK4PzzJJRXjiM6AObG02+HWvprlXQ1Y7eI=
|
github.com/blevesearch/zapx/v12 v12.3.9 h1:MXGLlZ03oxXH3DMJTZaBaRj2xb6t4wQVZeZK/wu1M6w=
|
||||||
github.com/blevesearch/zapx/v12 v12.3.7/go.mod h1:SgEtYIBGvM0mgIBn2/tQE/5SdrPXaJUaT/kVqpAPxm0=
|
github.com/blevesearch/zapx/v12 v12.3.9/go.mod h1:QXCMwmOkdLnMDgTN1P4CcuX5F851iUOtOwXbw0HMBYs=
|
||||||
github.com/blevesearch/zapx/v13 v13.3.7 h1:igIQg5eKmjw168I7av0Vtwedf7kHnQro/M+ubM4d2l8=
|
github.com/blevesearch/zapx/v13 v13.3.9 h1:+VAz9V0VmllHXlZV4DCvfYj0nqaZHgF3MeEHwOyRBwQ=
|
||||||
github.com/blevesearch/zapx/v13 v13.3.7/go.mod h1:yyrB4kJ0OT75UPZwT/zS+Ru0/jYKorCOOSY5dBzAy+s=
|
github.com/blevesearch/zapx/v13 v13.3.9/go.mod h1:s+WjNp4WSDtrBVBpa37DUOd7S/Gr/jTZ7ST/MbCVj/0=
|
||||||
github.com/blevesearch/zapx/v14 v14.3.7 h1:gfe+fbWslDWP/evHLtp/GOvmNM3sw1BbqD7LhycBX20=
|
github.com/blevesearch/zapx/v14 v14.3.9 h1:wuqxATgsTCNHM9xsOFOeFp8H2heZ/gMX/tsl9lRK8U4=
|
||||||
github.com/blevesearch/zapx/v14 v14.3.7/go.mod h1:9J/RbOkqZ1KSjmkOes03AkETX7hrXT0sFMpWH4ewC4w=
|
github.com/blevesearch/zapx/v14 v14.3.9/go.mod h1:MWZ4v8AzFBRurhDzkLvokFW8ljcq9Evm27mkWe8OGbM=
|
||||||
github.com/blevesearch/zapx/v15 v15.3.9 h1:/s9zqKxFaZKQTTcMO2b/Tup0ch5MSztlvw+frVDfIBk=
|
github.com/blevesearch/zapx/v15 v15.3.12 h1:w/kU9aHyfMDEdwHGZzCiakC3HZ9z5gYlXaALDC4Dct8=
|
||||||
github.com/blevesearch/zapx/v15 v15.3.9/go.mod h1:m7Y6m8soYUvS7MjN9eKlz1xrLCcmqfFadmu7GhWIrLY=
|
github.com/blevesearch/zapx/v15 v15.3.12/go.mod h1:tx53gDJS/7Oa3Je820cmVurqCuJ4dqdAy1kiDMV/IUo=
|
||||||
github.com/blevesearch/zapx/v15 v15.3.10 h1:bQ9ZxJCj6rKp873EuVJu2JPxQ+EWQZI1cjJGeroovaQ=
|
|
||||||
github.com/blevesearch/zapx/v15 v15.3.10/go.mod h1:m7Y6m8soYUvS7MjN9eKlz1xrLCcmqfFadmu7GhWIrLY=
|
|
||||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc h1:biVzkmvwrH8WK8raXaxBx6fRVTlJILwEwQGL1I/ByEI=
|
||||||
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||||
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM=
|
||||||
github.com/bytedance/sonic v1.8.0 h1:ea0Xadu+sHlu7x5O3gKhRpQ1IKiMrSiHttPF0ybECuA=
|
github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s=
|
||||||
github.com/bytedance/sonic v1.8.0/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U=
|
||||||
github.com/caarlos0/env/v7 v7.1.0 h1:9lzTF5amyQeWHZzuZeKlCb5FWSUxpG1js43mhbY8ozg=
|
github.com/caarlos0/env/v9 v9.0.0 h1:SI6JNsOA+y5gj9njpgybykATIylrRMklbs5ch6wO6pc=
|
||||||
github.com/caarlos0/env/v7 v7.1.0/go.mod h1:LPPWniDUq4JaO6Q41vtlyikhMknqymCLBw0eX4dcH1E=
|
github.com/caarlos0/env/v9 v9.0.0/go.mod h1:ye5mlCVMYh6tZ+vCgrs/B95sj88cg5Tlnc0XIzgZ020=
|
||||||
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
|
github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams=
|
||||||
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk=
|
||||||
|
github.com/coreos/go-oidc v2.2.1+incompatible h1:mh48q/BqXqgjVHpy2ZY7WnWAbenxRjsz9N1i1YxjHAk=
|
||||||
|
github.com/coreos/go-oidc v2.2.1+incompatible/go.mod h1:CgnwVTmzoESiwO9qyAFEMiHoZ1nMCKZlZ9V6mm3/LKc=
|
||||||
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
github.com/cpuguy83/go-md2man/v2 v2.0.2/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||||
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 h1:HVTnpeuvF6Owjd5mniCL8DEXo7uYXdQEmOP4FJbV5tg=
|
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3 h1:HVTnpeuvF6Owjd5mniCL8DEXo7uYXdQEmOP4FJbV5tg=
|
||||||
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE=
|
github.com/crackcomm/go-gitignore v0.0.0-20170627025303-887ab5e44cc3/go.mod h1:p1d6YEZWvFzEh4KLyvBcVSnrfNDDvK2zfK/4x2v/4pE=
|
||||||
@ -86,7 +88,11 @@ github.com/disintegration/imaging v1.6.2 h1:w1LecBlG2Lnp8B3jk5zSuNqd7b4DXhcjwek1
|
|||||||
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
github.com/disintegration/imaging v1.6.2/go.mod h1:44/5580QXChDfwIclfc/PCwrr44amcmDAg8hxG0Ewe4=
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564 h1:I6KUy4CI6hHjqnyJLNCEi7YHVMkwwtfSr2k9splgdSM=
|
||||||
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
|
github.com/dustinxie/ecc v0.0.0-20210511000915-959544187564/go.mod h1:yekO+3ZShy19S+bsmnERmznGy9Rfg6dWWWpiGJjNAz8=
|
||||||
|
github.com/foxxorcat/mopan-sdk-go v0.1.1 h1:JYMeCu4PFpqgHapvOz4jPMT7CxR6Yebu3aWkgGMDeIU=
|
||||||
|
github.com/foxxorcat/mopan-sdk-go v0.1.1/go.mod h1:LpBPmwezjQNyhaNo3HGzgFtQbhvxmF5ZybSVuKi7OVA=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
|
||||||
|
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||||
github.com/gaoyb7/115drive-webdav v0.1.8 h1:EJt4PSmcbvBY4KUh2zSo5p6fN9LZFNkIzuKejipubVw=
|
github.com/gaoyb7/115drive-webdav v0.1.8 h1:EJt4PSmcbvBY4KUh2zSo5p6fN9LZFNkIzuKejipubVw=
|
||||||
github.com/gaoyb7/115drive-webdav v0.1.8/go.mod h1:BKbeY6j8SKs3+rzBFFALznGxbPmefEm3vA+dGhqgOGU=
|
github.com/gaoyb7/115drive-webdav v0.1.8/go.mod h1:BKbeY6j8SKs3+rzBFFALznGxbPmefEm3vA+dGhqgOGU=
|
||||||
github.com/geoffgarside/ber v1.1.0 h1:qTmFG4jJbwiSzSXoNJeHcOprVzZ8Ulde2Rrrifu5U9w=
|
github.com/geoffgarside/ber v1.1.0 h1:qTmFG4jJbwiSzSXoNJeHcOprVzZ8Ulde2Rrrifu5U9w=
|
||||||
@ -96,8 +102,8 @@ github.com/gin-contrib/cors v1.4.0/go.mod h1:bs9pNM0x/UsmHPBWT2xZz9ROh8xYjYkiURU
|
|||||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||||
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
|
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
|
||||||
github.com/gin-gonic/gin v1.9.0 h1:OjyFBKICoexlu99ctXNR2gg+c5pKrKMuyjgARg9qeY8=
|
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||||
github.com/gin-gonic/gin v1.9.0/go.mod h1:W1Me9+hsUSyj3CePGrd1/QrKJMSJ1Tu/0hFEH89961k=
|
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||||
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
|
github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
|
||||||
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4=
|
||||||
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s=
|
||||||
@ -109,27 +115,28 @@ github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJn
|
|||||||
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY=
|
||||||
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos=
|
||||||
github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
|
github.com/go-playground/validator/v10 v10.11.0/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU=
|
||||||
github.com/go-playground/validator/v10 v10.11.2 h1:q3SHpufmypg+erIExEKUmsgmhDTyhcJ38oeKGACXohU=
|
github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js=
|
||||||
github.com/go-playground/validator/v10 v10.11.2/go.mod h1:NieE624vt4SCTJtD87arVLvdmjPAeV8BQlHtMnw9D7s=
|
github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
|
||||||
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
|
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
|
||||||
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
|
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
|
||||||
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
|
github.com/go-sql-driver/mysql v1.7.0 h1:ueSltNNllEqE3qcWBTD0iQd3IpL/6U+mJxLkazJ7YPc=
|
||||||
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
github.com/go-sql-driver/mysql v1.7.0/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
||||||
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/goccy/go-json v0.10.0 h1:mXKd9Qw4NuzShiRlOXKews24ufknHO7gx30lsDyokKA=
|
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||||
github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||||
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||||
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
|
||||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo=
|
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo=
|
||||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||||
|
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||||
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
|
github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg=
|
||||||
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
|
||||||
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM=
|
||||||
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||||
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
|
|
||||||
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||||
|
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
|
||||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||||
@ -164,8 +171,8 @@ github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkr
|
|||||||
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
github.com/jinzhu/now v1.1.5 h1:/o9tlHleP7gOFmsnYNz3RGnqzefHA47wQpKrrdTIwXQ=
|
||||||
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
github.com/jinzhu/now v1.1.5/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||||
github.com/jlaffaye/ftp v0.1.0 h1:DLGExl5nBoSFoNshAUHwXAezXwXBvFdx7/qwhucWNSE=
|
github.com/jlaffaye/ftp v0.2.0 h1:lXNvW7cBu7R/68bknOX3MrRIIqZ61zELs1P2RAiA3lg=
|
||||||
github.com/jlaffaye/ftp v0.1.0/go.mod h1:hhq4G4crv+nW2qXtNYcuzLeOudG92Ps37HEKeg2e3lE=
|
github.com/jlaffaye/ftp v0.2.0/go.mod h1:is2Ds5qkhceAPy2xD6RLI6hmp/qysSoymZ+Z2uTnspI=
|
||||||
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
|
||||||
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
|
||||||
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
github.com/jmespath/go-jmespath/internal/testify v1.5.1 h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=
|
||||||
@ -177,8 +184,8 @@ github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQL
|
|||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.4/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.3 h1:sxCkb+qR91z4vsqw4vGGZlDgPz3G7gjaLyK3V8y70BU=
|
github.com/klauspost/cpuid/v2 v2.2.4 h1:acbojRNwl3o09bUq+yDCtZFc1aiwaAAxtcn8YkZXnvk=
|
||||||
github.com/klauspost/cpuid/v2 v2.2.3/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
github.com/klauspost/cpuid/v2 v2.2.4/go.mod h1:RVVoqg1df56z8g3pUjL/3lE5UfnlrJX8tyFgg4nqhuY=
|
||||||
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
||||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||||
@ -189,8 +196,9 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
|||||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||||
github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w=
|
|
||||||
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
|
||||||
|
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||||
|
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
|
||||||
github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8=
|
github.com/libp2p/go-buffer-pool v0.1.0 h1:oK4mSFcQz7cTQIfqbe4MIj9gLW+mnanjyFtc6cdF0Y8=
|
||||||
github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg=
|
github.com/libp2p/go-buffer-pool v0.1.0/go.mod h1:N+vh8gMqimBzdKkSMVuydVDq+UV5QTWy5HSiZacSbPg=
|
||||||
github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM=
|
github.com/libp2p/go-flow-metrics v0.1.0 h1:0iPhMI8PskQwzh57jB9WxIuIOQ0r+15PChFGkx3Q3WM=
|
||||||
@ -200,10 +208,11 @@ github.com/libp2p/go-libp2p v0.26.3/go.mod h1:x75BN32YbwuY0Awm2Uix4d4KOz+/4piInk
|
|||||||
github.com/maruel/natural v1.1.0 h1:2z1NgP/Vae+gYrtC0VuvrTJ6U35OuyUqDdfluLqMWuQ=
|
github.com/maruel/natural v1.1.0 h1:2z1NgP/Vae+gYrtC0VuvrTJ6U35OuyUqDdfluLqMWuQ=
|
||||||
github.com/maruel/natural v1.1.0/go.mod h1:eFVhYCcUOfZFxXoDZam8Ktya72wa79fNC3lc/leA0DQ=
|
github.com/maruel/natural v1.1.0/go.mod h1:eFVhYCcUOfZFxXoDZam8Ktya72wa79fNC3lc/leA0DQ=
|
||||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||||
github.com/mattn/go-isatty v0.0.17 h1:BTarxUcIeDqL27Mc+vyvdWYSL28zpIhv3RoTdsLMPng=
|
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
|
||||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
|
||||||
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
|
github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI=
|
||||||
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg=
|
||||||
|
github.com/minio/blake2b-simd v0.0.0-20160723061019-3f5f724cb5b1/go.mod h1:pD8RvIylQ358TN4wwqatJ8rNavkEINozVn9DtGI3dfQ=
|
||||||
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
|
github.com/minio/sha256-simd v1.0.0 h1:v1ta+49hkWZyvaKwrQB8elexRqm6Y0aMLjCNsrYxo6g=
|
||||||
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
|
github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy5RFepssQM=
|
||||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||||
@ -241,8 +250,8 @@ github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77 h1:dg/Eaa
|
|||||||
github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77/go.mod h1:FD9a09Vw07CSMTdT0Y7ttStOa1WZsnPBslliMw2DkeM=
|
github.com/orzogc/fake115uploader v0.3.3-0.20221009101310-08b764073b77/go.mod h1:FD9a09Vw07CSMTdT0Y7ttStOa1WZsnPBslliMw2DkeM=
|
||||||
github.com/panjf2000/ants/v2 v2.4.2/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A=
|
github.com/panjf2000/ants/v2 v2.4.2/go.mod h1:f6F0NZVFsGCp5A7QW/Zj/m92atWwOkY0OIhFxRNFr4A=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.6 h1:nrzqCb7j9cDFj2coyLNLaZuJTLjWjlaz6nvTvIwycIU=
|
github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ=
|
||||||
github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek=
|
github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4=
|
||||||
github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc=
|
github.com/pierrec/lz4/v4 v4.1.17 h1:kV4Ip+/hUBC+8T6+2EgburRtkE9ef4nbY3f4dFhGjMc=
|
||||||
github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
github.com/pierrec/lz4/v4 v4.1.17/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||||
@ -253,6 +262,8 @@ github.com/pkg/sftp v1.13.5 h1:a3RLUqkyjYRtBTZJZ1VRrKbN3zhuPLlUc3sphVz81go=
|
|||||||
github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg=
|
github.com/pkg/sftp v1.13.5/go.mod h1:wHDZ0IZX6JcBYRK1TH9bcVq8G7TLpVHYIGJRFnmPfxg=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
|
github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc=
|
||||||
|
github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI=
|
||||||
github.com/pquerna/otp v1.4.0 h1:wZvl1TIVxKRThZIBiwOOHOGP/1+nZyWBil9Y2XNEDzg=
|
github.com/pquerna/otp v1.4.0 h1:wZvl1TIVxKRThZIBiwOOHOGP/1+nZyWBil9Y2XNEDzg=
|
||||||
github.com/pquerna/otp v1.4.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
github.com/pquerna/otp v1.4.0/go.mod h1:dkJfzwRKNiegxyNb54X/3fLwhCynbMspSyWKnvi1AEg=
|
||||||
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
|
||||||
@ -260,8 +271,8 @@ github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUA
|
|||||||
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
|
||||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
github.com/sirupsen/logrus v1.8.1/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
|
||||||
github.com/sirupsen/logrus v1.9.0 h1:trlNQbNUG3OdDrDil03MCb1H2o9nJ1x4/5LYw7byDE0=
|
github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ=
|
||||||
github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
|
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e h1:MRM5ITcdelLK2j1vwZ3Je0FKVCfqOLp5zO6trqMLYs0=
|
||||||
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
|
github.com/skip2/go-qrcode v0.0.0-20200617195104-da1b6568686e/go.mod h1:XV66xRDqSt+GTGFMVlhk3ULuV0y9ZmzeVGR4mloJI3M=
|
||||||
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||||
@ -284,7 +295,9 @@ github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
|||||||
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
|
github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||||
|
github.com/stretchr/testify v1.8.3 h1:RP3t2pwF7cMEbC1dqtB6poj3niw/9gnV4Cjg5oW5gtY=
|
||||||
|
github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||||
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca h1:I9rVnNXdIkij4UvMT7OmKhH9sOIvS8iXkxfPdnn9wQA=
|
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca h1:I9rVnNXdIkij4UvMT7OmKhH9sOIvS8iXkxfPdnn9wQA=
|
||||||
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca/go.mod h1:suDIky6yrK07NnaBadCB4sS0CqFOvUK91lH7CR+JlDA=
|
github.com/t3rm1n4l/go-mega v0.0.0-20230228171823-a01a2cda13ca/go.mod h1:suDIky6yrK07NnaBadCB4sS0CqFOvUK91lH7CR+JlDA=
|
||||||
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
|
||||||
@ -295,8 +308,8 @@ github.com/u2takey/go-utils v0.3.1 h1:TaQTgmEZZeDHQFYfd+AdUT1cT4QJgJn/XVPELhHw4y
|
|||||||
github.com/u2takey/go-utils v0.3.1/go.mod h1:6e+v5vEZ/6gu12w/DC2ixZdZtCrNokVxD0JUklcqdCs=
|
github.com/u2takey/go-utils v0.3.1/go.mod h1:6e+v5vEZ/6gu12w/DC2ixZdZtCrNokVxD0JUklcqdCs=
|
||||||
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
|
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
|
||||||
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
|
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
|
||||||
github.com/ugorji/go/codec v1.2.9 h1:rmenucSohSTiyL09Y+l2OCk+FrMxGMzho2+tjr5ticU=
|
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||||
github.com/ugorji/go/codec v1.2.9/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||||
github.com/upyun/go-sdk/v3 v3.0.4 h1:2DCJa/Yi7/3ZybT9UCPATSzvU3wpPPxhXinNlb1Hi8Q=
|
github.com/upyun/go-sdk/v3 v3.0.4 h1:2DCJa/Yi7/3ZybT9UCPATSzvU3wpPPxhXinNlb1Hi8Q=
|
||||||
github.com/upyun/go-sdk/v3 v3.0.4/go.mod h1:P/SnuuwhrIgAVRd/ZpzDWqCsBAf/oHg7UggbAxyZa0E=
|
github.com/upyun/go-sdk/v3 v3.0.4/go.mod h1:P/SnuuwhrIgAVRd/ZpzDWqCsBAf/oHg7UggbAxyZa0E=
|
||||||
github.com/valyala/fastjson v1.6.3 h1:tAKFnnwmeMGPbwJ7IwxcTPCNr3uIzoIj3/Fh90ra4xc=
|
github.com/valyala/fastjson v1.6.3 h1:tAKFnnwmeMGPbwJ7IwxcTPCNr3uIzoIj3/Fh90ra4xc=
|
||||||
@ -305,11 +318,12 @@ github.com/whyrusleeping/tar-utils v0.0.0-20180509141711-8c6c8ba81d5c/go.mod h1:
|
|||||||
github.com/winfsp/cgofuse v1.5.0 h1:MsBP7Mi/LiJf/7/F3O/7HjjR009ds6KCdqXzKpZSWxI=
|
github.com/winfsp/cgofuse v1.5.0 h1:MsBP7Mi/LiJf/7/F3O/7HjjR009ds6KCdqXzKpZSWxI=
|
||||||
github.com/winfsp/cgofuse v1.5.0/go.mod h1:h3awhoUOcn2VYVKCwDaYxSLlZwnyK+A8KaDoLUp2lbU=
|
github.com/winfsp/cgofuse v1.5.0/go.mod h1:h3awhoUOcn2VYVKCwDaYxSLlZwnyK+A8KaDoLUp2lbU=
|
||||||
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
|
||||||
go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=
|
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
||||||
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
|
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
||||||
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
|
gocv.io/x/gocv v0.25.0/go.mod h1:Rar2PS6DV+T4FL+PM535EImD/h13hGVaHhnCu1xarBs=
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670 h1:18EFjUmQOcUvxNYSkA6jO9VAiXCnxFY6NyDX0bHDmkU=
|
|
||||||
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
|
golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k=
|
||||||
|
golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||||
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
@ -320,16 +334,15 @@ golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0
|
|||||||
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||||
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
||||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||||
golang.org/x/crypto v0.8.0 h1:pd9TJtTueMTVQXzk8E2XESSMQDj/U7OUu0PqJqPXQjQ=
|
golang.org/x/crypto v0.11.0 h1:6Ewdq3tDic1mg5xRO4milcWCfMVQhI4NkqWWvqejpuA=
|
||||||
golang.org/x/crypto v0.8.0/go.mod h1:mRqEX+O9/h5TFCrQhkgjo2yKi0yYA+9ecGkdQoHrywE=
|
golang.org/x/crypto v0.11.0/go.mod h1:xgJhtzW8F9jGdVFWZESrid1U1bjeNy4zgy5cRr/CIio=
|
||||||
golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g=
|
|
||||||
golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0=
|
|
||||||
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
golang.org/x/image v0.0.0-20191009234506-e7c1f5e7dbb8/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||||
golang.org/x/image v0.7.0 h1:gzS29xtG1J5ybQlv0PuyfE3nmc6R4qB73m6LUUmvFuw=
|
golang.org/x/image v0.9.0 h1:QrzfX26snvCM20hIhBwuHI/ThTg18b/+kcKdXHvnR+g=
|
||||||
golang.org/x/image v0.7.0/go.mod h1:nd/q4ef1AKKYl/4kft7g+6UyGbdiqWqTP1ZAbRoV7Rg=
|
golang.org/x/image v0.9.0/go.mod h1:jtrku+n79PfroUbvDdeUWMAI+heR786BofxrbiSF+J0=
|
||||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||||
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
|
||||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||||
|
golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks=
|
||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||||
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
|
||||||
@ -338,17 +351,16 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qx
|
|||||||
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
|
||||||
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
golang.org/x/net v0.1.0/go.mod h1:Cx3nUiGt4eDBEyega/BKRp+/AlGL8hYe7U9odMt2Cco=
|
||||||
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
|
||||||
golang.org/x/net v0.9.0 h1:aWJ/m6xSmxWBx+V0XRHTlrYrPG56jKsLdTFmsSsCzOM=
|
golang.org/x/net v0.12.0 h1:cfawfvKITfUsFCeJIHJrbSxpeu/E81khclypR0GVT50=
|
||||||
golang.org/x/net v0.9.0/go.mod h1:d48xBJpPfHeWQsugry2m+kC02ZBRGRgulfHnEXEuWns=
|
golang.org/x/net v0.12.0/go.mod h1:zEVYFnQC7m/vmpQFELhcD1EWkZlX69l4oqgmer6hfKA=
|
||||||
golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M=
|
golang.org/x/oauth2 v0.10.0 h1:zHCpF2Khkwy4mMB4bv0U37YtJdTGW8jI0glAApi0Kh8=
|
||||||
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
|
golang.org/x/oauth2 v0.10.0/go.mod h1:kTpgurOux7LqtuxjuyZa4Gj2gdezIt/jQtGnNFfypQI=
|
||||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
|
||||||
golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200602225109-6fdc65e7d980/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
@ -362,18 +374,16 @@ golang.org/x/sys v0.0.0-20220702020025-31831981b65f/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||||||
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220704084225-05e143d24a9e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0 h1:3jlCCIQZPdOYu1h8BkNvLz8Kgwtae2cagcG/VamtZRU=
|
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
golang.org/x/sys v0.10.0 h1:SqMFp9UcQJZa+pmYuAKjd9xq1f0j5rLcDIk0mj4qAsA=
|
||||||
golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU=
|
golang.org/x/sys v0.10.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||||
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
|
||||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||||
golang.org/x/term v0.7.0 h1:BEvjmm5fURWqcfbSKTdpkDXYBrUS1c0m8agp14W48vQ=
|
golang.org/x/term v0.10.0 h1:3R7pNqamzBraeqj/Tj8qt1aQ2HpmlC+Cx/qL/7hn4/c=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||||
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||||
@ -381,12 +391,14 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||||
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
|
||||||
golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE=
|
golang.org/x/text v0.11.0 h1:LAntKIrcmeSKERyiOh0XMV39LXS8IE9UL2yP7+f5ij4=
|
||||||
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
|
golang.org/x/text v0.11.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
|
||||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20220722155302-e5dcc9cfc0b9/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af h1:Yx9k8YCG3dvF87UAn2tu2HQLf2dt/eR1bXxpLMWeH+Y=
|
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af h1:Yx9k8YCG3dvF87UAn2tu2HQLf2dt/eR1bXxpLMWeH+Y=
|
||||||
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
golang.org/x/time v0.0.0-20220922220347-f3bd1da661af/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
|
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||||
|
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
@ -394,12 +406,13 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
|
|||||||
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
|
google.golang.org/appengine v1.6.7 h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=
|
||||||
|
google.golang.org/appengine v1.6.7/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||||
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
|
google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8=
|
||||||
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||||
@ -407,6 +420,8 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EV
|
|||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
|
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
|
||||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
||||||
|
gopkg.in/square/go-jose.v2 v2.6.0 h1:NGk74WTnPKBNUhNzQX7PYcTLUjoq7mzKk2OKbvwk2iI=
|
||||||
|
gopkg.in/square/go-jose.v2 v2.6.0/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
"github.com/alist-org/alist/v3/drivers/base"
|
"github.com/alist-org/alist/v3/drivers/base"
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
"github.com/caarlos0/env/v7"
|
"github.com/caarlos0/env/v9"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -87,7 +87,7 @@ func confFromEnv() {
|
|||||||
prefix = ""
|
prefix = ""
|
||||||
}
|
}
|
||||||
log.Infof("load config from env with prefix: %s", prefix)
|
log.Infof("load config from env with prefix: %s", prefix)
|
||||||
if err := env.Parse(conf.Conf, env.Options{
|
if err := env.ParseWithOptions(conf.Conf, env.Options{
|
||||||
Prefix: prefix,
|
Prefix: prefix,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
log.Fatalf("load config from env error: %+v", err)
|
log.Fatalf("load config from env error: %+v", err)
|
||||||
|
@ -154,13 +154,16 @@ func InitialSettings() []model.SettingItem {
|
|||||||
|
|
||||||
// SSO settings
|
// SSO settings
|
||||||
{Key: conf.SSOLoginEnabled, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PUBLIC},
|
{Key: conf.SSOLoginEnabled, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PUBLIC},
|
||||||
{Key: conf.SSOLoginplatform, Type: conf.TypeSelect, Options: "Casdoor,Github,Microsoft,Google,Dingtalk", Group: model.SSO, Flag: model.PUBLIC},
|
{Key: conf.SSOLoginPlatform, Type: conf.TypeSelect, Options: "Casdoor,Github,Microsoft,Google,Dingtalk,OIDC", Group: model.SSO, Flag: model.PUBLIC},
|
||||||
{Key: conf.SSOClientId, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOClientId, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
{Key: conf.SSOClientSecret, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOClientSecret, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
{Key: conf.SSOOrganizationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOOrganizationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
{Key: conf.SSOApplicationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOApplicationName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
{Key: conf.SSOEndpointName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOEndpointName, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
{Key: conf.SSOJwtPublicKey, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
{Key: conf.SSOJwtPublicKey, Value: "", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
|
{Key: conf.SSOAutoRegister, Value: "false", Type: conf.TypeBool, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
|
{Key: conf.SSODefaultDir, Value: "/", Type: conf.TypeString, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
|
{Key: conf.SSODefaultPermission, Value: "0", Type: conf.TypeNumber, Group: model.SSO, Flag: model.PRIVATE},
|
||||||
|
|
||||||
// qbittorrent settings
|
// qbittorrent settings
|
||||||
{Key: conf.QbittorrentUrl, Value: "http://admin:adminadmin@localhost:8080/", Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
|
{Key: conf.QbittorrentUrl, Value: "http://admin:adminadmin@localhost:8080/", Type: conf.TypeString, Group: model.SINGLE, Flag: model.PRIVATE},
|
||||||
|
@ -20,9 +20,14 @@ type Database struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Scheme struct {
|
type Scheme struct {
|
||||||
Https bool `json:"https" env:"HTTPS"`
|
Address string `json:"address" env:"ADDR"`
|
||||||
CertFile string `json:"cert_file" env:"CERT_FILE"`
|
HttpPort int `json:"http_port" env:"HTTP_PORT"`
|
||||||
KeyFile string `json:"key_file" env:"KEY_FILE"`
|
HttpsPort int `json:"https_port" env:"HTTPS_PORT"`
|
||||||
|
ForceHttps bool `json:"force_https" env:"FORCE_HTTPS"`
|
||||||
|
CertFile string `json:"cert_file" env:"CERT_FILE"`
|
||||||
|
KeyFile string `json:"key_file" env:"KEY_FILE"`
|
||||||
|
UnixFile string `json:"unix_file" env:"UNIX_FILE"`
|
||||||
|
UnixFilePerm string `json:"unix_file_perm" env:"UNIX_FILE_PERM"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type LogConfig struct {
|
type LogConfig struct {
|
||||||
@ -36,8 +41,6 @@ type LogConfig struct {
|
|||||||
|
|
||||||
type Config struct {
|
type Config struct {
|
||||||
Force bool `json:"force" env:"FORCE"`
|
Force bool `json:"force" env:"FORCE"`
|
||||||
Address string `json:"address" env:"ADDR"`
|
|
||||||
Port int `json:"port" env:"PORT"`
|
|
||||||
SiteURL string `json:"site_url" env:"SITE_URL"`
|
SiteURL string `json:"site_url" env:"SITE_URL"`
|
||||||
Cdn string `json:"cdn" env:"CDN"`
|
Cdn string `json:"cdn" env:"CDN"`
|
||||||
JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
|
JwtSecret string `json:"jwt_secret" env:"JWT_SECRET"`
|
||||||
@ -47,6 +50,7 @@ type Config struct {
|
|||||||
TempDir string `json:"temp_dir" env:"TEMP_DIR"`
|
TempDir string `json:"temp_dir" env:"TEMP_DIR"`
|
||||||
BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
|
BleveDir string `json:"bleve_dir" env:"BLEVE_DIR"`
|
||||||
Log LogConfig `json:"log"`
|
Log LogConfig `json:"log"`
|
||||||
|
DelayedStart int `json:"delayed_start" env:"DELAYED_START"`
|
||||||
MaxConnections int `json:"max_connections" env:"MAX_CONNECTIONS"`
|
MaxConnections int `json:"max_connections" env:"MAX_CONNECTIONS"`
|
||||||
TlsInsecureSkipVerify bool `json:"tls_insecure_skip_verify" env:"TLS_INSECURE_SKIP_VERIFY"`
|
TlsInsecureSkipVerify bool `json:"tls_insecure_skip_verify" env:"TLS_INSECURE_SKIP_VERIFY"`
|
||||||
}
|
}
|
||||||
@ -57,8 +61,15 @@ func DefaultConfig() *Config {
|
|||||||
logPath := filepath.Join(flags.DataDir, "log/log.log")
|
logPath := filepath.Join(flags.DataDir, "log/log.log")
|
||||||
dbPath := filepath.Join(flags.DataDir, "data.db")
|
dbPath := filepath.Join(flags.DataDir, "data.db")
|
||||||
return &Config{
|
return &Config{
|
||||||
Address: "0.0.0.0",
|
Scheme: Scheme{
|
||||||
Port: 5244,
|
Address: "0.0.0.0",
|
||||||
|
UnixFile: "",
|
||||||
|
HttpPort: 5244,
|
||||||
|
HttpsPort: -1,
|
||||||
|
ForceHttps: false,
|
||||||
|
CertFile: "",
|
||||||
|
KeyFile: "",
|
||||||
|
},
|
||||||
JwtSecret: random.String(16),
|
JwtSecret: random.String(16),
|
||||||
TokenExpiresIn: 48,
|
TokenExpiresIn: 48,
|
||||||
TempDir: tempDir,
|
TempDir: tempDir,
|
||||||
|
@ -57,14 +57,17 @@ const (
|
|||||||
IndexProgress = "index_progress"
|
IndexProgress = "index_progress"
|
||||||
|
|
||||||
//SSO
|
//SSO
|
||||||
SSOClientId = "sso_client_id"
|
SSOClientId = "sso_client_id"
|
||||||
SSOClientSecret = "sso_client_secret"
|
SSOClientSecret = "sso_client_secret"
|
||||||
SSOLoginEnabled = "sso_login_enabled"
|
SSOLoginEnabled = "sso_login_enabled"
|
||||||
SSOLoginplatform = "sso_login_platform"
|
SSOLoginPlatform = "sso_login_platform"
|
||||||
SSOOrganizationName = "sso_organization_name"
|
SSOOrganizationName = "sso_organization_name"
|
||||||
SSOApplicationName = "sso_application_name"
|
SSOApplicationName = "sso_application_name"
|
||||||
SSOEndpointName = "sso_endpoint_name"
|
SSOEndpointName = "sso_endpoint_name"
|
||||||
SSOJwtPublicKey = "sso_jwt_public_key"
|
SSOJwtPublicKey = "sso_jwt_public_key"
|
||||||
|
SSOAutoRegister = "sso_auto_register"
|
||||||
|
SSODefaultDir = "sso_default_dir"
|
||||||
|
SSODefaultPermission = "sso_default_permission"
|
||||||
|
|
||||||
// qbittorrent
|
// qbittorrent
|
||||||
QbittorrentUrl = "qbittorrent_url"
|
QbittorrentUrl = "qbittorrent_url"
|
||||||
|
@ -74,7 +74,7 @@ func SearchNode(req model.SearchReq, useFullText bool) ([]model.SearchNode, int6
|
|||||||
}
|
}
|
||||||
var count int64
|
var count int64
|
||||||
if err := searchDB.Count(&count).Error; err != nil {
|
if err := searchDB.Count(&count).Error; err != nil {
|
||||||
return nil, 0, errors.Wrapf(err, "failed get users count")
|
return nil, 0, errors.Wrapf(err, "failed get search items count")
|
||||||
}
|
}
|
||||||
var files []model.SearchNode
|
var files []model.SearchNode
|
||||||
if err := searchDB.Offset((req.Page - 1) * req.PerPage).Limit(req.PerPage).Find(&files).Error; err != nil {
|
if err := searchDB.Offset((req.Page - 1) * req.PerPage).Limit(req.PerPage).Find(&files).Error; err != nil {
|
||||||
|
@ -3,6 +3,7 @@ package fs
|
|||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"net/http"
|
||||||
stdpath "path"
|
stdpath "path"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
|
|
||||||
@ -87,7 +88,9 @@ func copyFileBetween2Storages(tsk *task.Task[uint64], srcStorage, dstStorage dri
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.WithMessagef(err, "failed get src [%s] file", srcFilePath)
|
return errors.WithMessagef(err, "failed get src [%s] file", srcFilePath)
|
||||||
}
|
}
|
||||||
link, _, err := op.Link(tsk.Ctx, srcStorage, srcFilePath, model.LinkArgs{})
|
link, _, err := op.Link(tsk.Ctx, srcStorage, srcFilePath, model.LinkArgs{
|
||||||
|
Header: http.Header{},
|
||||||
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.WithMessagef(err, "failed get [%s] link", srcFilePath)
|
return errors.WithMessagef(err, "failed get [%s] link", srcFilePath)
|
||||||
}
|
}
|
||||||
|
@ -36,7 +36,7 @@ func putAsTask(dstDirPath string, file *model.FileStream) error {
|
|||||||
UploadTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
|
UploadTaskManager.Submit(task.WithCancelCtx(&task.Task[uint64]{
|
||||||
Name: fmt.Sprintf("upload %s to [%s](%s)", file.GetName(), storage.GetStorage().MountPath, dstDirActualPath),
|
Name: fmt.Sprintf("upload %s to [%s](%s)", file.GetName(), storage.GetStorage().MountPath, dstDirActualPath),
|
||||||
Func: func(task *task.Task[uint64]) error {
|
Func: func(task *task.Task[uint64]) error {
|
||||||
return op.Put(task.Ctx, storage, dstDirActualPath, file, nil, true)
|
return op.Put(task.Ctx, storage, dstDirActualPath, file, task.SetProgress, true)
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
return nil
|
return nil
|
||||||
|
@ -8,32 +8,15 @@ import (
|
|||||||
stdpath "path"
|
stdpath "path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/drivers/base"
|
|
||||||
"github.com/alist-org/alist/v3/internal/conf"
|
"github.com/alist-org/alist/v3/internal/conf"
|
||||||
"github.com/alist-org/alist/v3/internal/model"
|
"github.com/alist-org/alist/v3/internal/model"
|
||||||
"github.com/alist-org/alist/v3/internal/op"
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
|
"github.com/alist-org/alist/v3/server/common"
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
func ClearCache(path string) {
|
|
||||||
storage, actualPath, err := op.GetStorageAndActualPath(path)
|
|
||||||
if err != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
op.ClearCache(storage, actualPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
func containsByName(files []model.Obj, file model.Obj) bool {
|
|
||||||
for _, f := range files {
|
|
||||||
if f.GetName() == file.GetName() {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func getFileStreamFromLink(file model.Obj, link *model.Link) (*model.FileStream, error) {
|
func getFileStreamFromLink(file model.Obj, link *model.Link) (*model.FileStream, error) {
|
||||||
var rc io.ReadCloser
|
var rc io.ReadCloser
|
||||||
mimetype := utils.GetMimeType(file.GetName())
|
mimetype := utils.GetMimeType(file.GetName())
|
||||||
@ -51,6 +34,16 @@ func getFileStreamFromLink(file model.Obj, link *model.Link) (*model.FileStream,
|
|||||||
return nil, errors.Wrapf(err, "failed to open file %s", *link.FilePath)
|
return nil, errors.Wrapf(err, "failed to open file %s", *link.FilePath)
|
||||||
}
|
}
|
||||||
rc = f
|
rc = f
|
||||||
|
} else if link.Writer != nil {
|
||||||
|
r, w := io.Pipe()
|
||||||
|
go func() {
|
||||||
|
err := link.Writer(w)
|
||||||
|
err = w.CloseWithError(err)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("[getFileStreamFromLink] failed to write: %v", err)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
rc = r
|
||||||
} else {
|
} else {
|
||||||
req, err := http.NewRequest(http.MethodGet, link.URL, nil)
|
req, err := http.NewRequest(http.MethodGet, link.URL, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -59,7 +52,7 @@ func getFileStreamFromLink(file model.Obj, link *model.Link) (*model.FileStream,
|
|||||||
for h, val := range link.Header {
|
for h, val := range link.Header {
|
||||||
req.Header[h] = val
|
req.Header[h] = val
|
||||||
}
|
}
|
||||||
res, err := base.HttpClient.Do(req)
|
res, err := common.HttpClient().Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, errors.Wrapf(err, "failed to get response for %s", link.URL)
|
return nil, errors.Wrapf(err, "failed to get response for %s", link.URL)
|
||||||
}
|
}
|
||||||
|
@ -7,23 +7,27 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type ListArgs struct {
|
type ListArgs struct {
|
||||||
ReqPath string
|
ReqPath string
|
||||||
|
S3ShowPlaceholder bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type LinkArgs struct {
|
type LinkArgs struct {
|
||||||
IP string
|
IP string
|
||||||
Header http.Header
|
Header http.Header
|
||||||
Type string
|
Type string
|
||||||
|
HttpReq *http.Request
|
||||||
}
|
}
|
||||||
|
|
||||||
type Link struct {
|
type Link struct {
|
||||||
URL string `json:"url"`
|
URL string `json:"url"`
|
||||||
Header http.Header `json:"header"` // needed header
|
Header http.Header `json:"header"` // needed header (for url) or response header(for data or writer)
|
||||||
Data io.ReadCloser // return file reader directly
|
Data io.ReadCloser // return file reader directly
|
||||||
Status int // status maybe 200 or 206, etc
|
Status int // status maybe 200 or 206, etc
|
||||||
FilePath *string // local file, return the filepath
|
FilePath *string // local file, return the filepath
|
||||||
Expiration *time.Duration // url expiration time
|
Expiration *time.Duration // url expiration time
|
||||||
Handle func(w http.ResponseWriter, r *http.Request) error `json:"-"` // custom handler
|
IPCacheKey bool // add ip to cache key
|
||||||
|
//Handle func(w http.ResponseWriter, r *http.Request) error `json:"-"` // custom handler
|
||||||
|
Writer WriterFunc `json:"-"` // custom writer
|
||||||
}
|
}
|
||||||
|
|
||||||
type OtherArgs struct {
|
type OtherArgs struct {
|
||||||
@ -37,3 +41,5 @@ type FsOtherArgs struct {
|
|||||||
Method string `json:"method" form:"method"`
|
Method string `json:"method" form:"method"`
|
||||||
Data interface{} `json:"data" form:"data"`
|
Data interface{} `json:"data" form:"data"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type WriterFunc func(w io.Writer) error
|
||||||
|
@ -33,7 +33,7 @@ type User struct {
|
|||||||
// 10: can add qbittorrent tasks
|
// 10: can add qbittorrent tasks
|
||||||
Permission int32 `json:"permission"`
|
Permission int32 `json:"permission"`
|
||||||
OtpSecret string `json:"-"`
|
OtpSecret string `json:"-"`
|
||||||
SsoID string `json:"sso_id"`
|
SsoID string `json:"sso_id"` // unique by sso platform
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u User) IsGuest() bool {
|
func (u User) IsGuest() bool {
|
||||||
|
@ -243,7 +243,7 @@ func Link(ctx context.Context, storage driver.Driver, path string, args model.Li
|
|||||||
if file.IsDir() {
|
if file.IsDir() {
|
||||||
return nil, nil, errors.WithStack(errs.NotFile)
|
return nil, nil, errors.WithStack(errs.NotFile)
|
||||||
}
|
}
|
||||||
key := Key(storage, path) + ":" + args.IP
|
key := Key(storage, path)
|
||||||
if link, ok := linkCache.Get(key); ok {
|
if link, ok := linkCache.Get(key); ok {
|
||||||
return link, file, nil
|
return link, file, nil
|
||||||
}
|
}
|
||||||
@ -253,6 +253,9 @@ func Link(ctx context.Context, storage driver.Driver, path string, args model.Li
|
|||||||
return nil, errors.Wrapf(err, "failed get link")
|
return nil, errors.Wrapf(err, "failed get link")
|
||||||
}
|
}
|
||||||
if link.Expiration != nil {
|
if link.Expiration != nil {
|
||||||
|
if link.IPCacheKey {
|
||||||
|
key = key + ":" + args.IP
|
||||||
|
}
|
||||||
linkCache.Set(key, link, cache.WithEx[*model.Link](*link.Expiration))
|
linkCache.Set(key, link, cache.WithEx[*model.Link](*link.Expiration))
|
||||||
}
|
}
|
||||||
return link, nil
|
return link, nil
|
||||||
@ -563,6 +566,9 @@ func Put(ctx context.Context, storage driver.Driver, dstDirPath string, file *mo
|
|||||||
err := Remove(ctx, storage, tempPath)
|
err := Remove(ctx, storage, tempPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
} else {
|
||||||
|
key := Key(storage, stdpath.Join(dstDirPath, file.GetName()))
|
||||||
|
linkCache.Del(key)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,12 +3,13 @@ package qbittorrent
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
"io"
|
"io"
|
||||||
"mime/multipart"
|
"mime/multipart"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/cookiejar"
|
"net/http/cookiejar"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
|
||||||
|
"github.com/alist-org/alist/v3/pkg/utils"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Client interface {
|
type Client interface {
|
||||||
@ -213,7 +214,7 @@ type TorrentInfo struct {
|
|||||||
Hash string `json:"hash"` //
|
Hash string `json:"hash"` //
|
||||||
LastActivity int `json:"last_activity"` // 上次活跃的时间(Unix Epoch)
|
LastActivity int `json:"last_activity"` // 上次活跃的时间(Unix Epoch)
|
||||||
MagnetURI string `json:"magnet_uri"` // 与此 torrent 对应的 Magnet URI
|
MagnetURI string `json:"magnet_uri"` // 与此 torrent 对应的 Magnet URI
|
||||||
MaxRatio int `json:"max_ratio"` // 种子/上传停止种子前的最大共享比率
|
MaxRatio float64 `json:"max_ratio"` // 种子/上传停止种子前的最大共享比率
|
||||||
MaxSeedingTime int `json:"max_seeding_time"` // 停止种子种子前的最长种子时间(秒)
|
MaxSeedingTime int `json:"max_seeding_time"` // 停止种子种子前的最长种子时间(秒)
|
||||||
Name string `json:"name"` //
|
Name string `json:"name"` //
|
||||||
NumComplete int `json:"num_complete"` //
|
NumComplete int `json:"num_complete"` //
|
||||||
|
@ -1,114 +0,0 @@
|
|||||||
package utils
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"reflect"
|
|
||||||
"time"
|
|
||||||
)
|
|
||||||
|
|
||||||
func LimitRateReflect(f interface{}, interval time.Duration) func(...interface{}) []interface{} {
|
|
||||||
// Use closures to save the time of the last function call
|
|
||||||
var lastCall time.Time
|
|
||||||
|
|
||||||
fValue := reflect.ValueOf(f)
|
|
||||||
fType := fValue.Type()
|
|
||||||
|
|
||||||
if fType.Kind() != reflect.Func {
|
|
||||||
panic("f must be a function")
|
|
||||||
}
|
|
||||||
|
|
||||||
//if fType.NumOut() == 0 {
|
|
||||||
// panic("f must have at least one output parameter")
|
|
||||||
//}
|
|
||||||
|
|
||||||
outCount := fType.NumOut()
|
|
||||||
outTypes := make([]reflect.Type, outCount)
|
|
||||||
|
|
||||||
for i := 0; i < outCount; i++ {
|
|
||||||
outTypes[i] = fType.Out(i)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns a new function, which is used to limit the function to be called only once at a specified time interval
|
|
||||||
return func(args ...interface{}) []interface{} {
|
|
||||||
// Calculate the time interval since the last function call
|
|
||||||
elapsed := time.Since(lastCall)
|
|
||||||
// If the interval is less than the specified time, wait for the remaining time
|
|
||||||
if elapsed < interval {
|
|
||||||
time.Sleep(interval - elapsed)
|
|
||||||
}
|
|
||||||
// Update the time of the last function call
|
|
||||||
lastCall = time.Now()
|
|
||||||
|
|
||||||
inCount := fType.NumIn()
|
|
||||||
in := make([]reflect.Value, inCount)
|
|
||||||
|
|
||||||
if len(args) != inCount {
|
|
||||||
panic("wrong number of arguments")
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := 0; i < inCount; i++ {
|
|
||||||
in[i] = reflect.ValueOf(args[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
out := fValue.Call(in)
|
|
||||||
|
|
||||||
if len(out) != outCount {
|
|
||||||
panic("function returned wrong number of values")
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make([]interface{}, outCount)
|
|
||||||
|
|
||||||
for i := 0; i < outCount; i++ {
|
|
||||||
result[i] = out[i].Interface()
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Fn[T any, R any] func(T) (R, error)
|
|
||||||
type FnCtx[T any, R any] func(context.Context, T) (R, error)
|
|
||||||
|
|
||||||
func LimitRate[T any, R any](f Fn[T, R], interval time.Duration) Fn[T, R] {
|
|
||||||
// Use closures to save the time of the last function call
|
|
||||||
var lastCall time.Time
|
|
||||||
// Returns a new function, which is used to limit the function to be called only once at a specified time interval
|
|
||||||
return func(t T) (R, error) {
|
|
||||||
// Calculate the time interval since the last function call
|
|
||||||
elapsed := time.Since(lastCall)
|
|
||||||
// If the interval is less than the specified time, wait for the remaining time
|
|
||||||
if elapsed < interval {
|
|
||||||
time.Sleep(interval - elapsed)
|
|
||||||
}
|
|
||||||
// Update the time of the last function call
|
|
||||||
lastCall = time.Now()
|
|
||||||
// Execute the function that needs to be limited
|
|
||||||
return f(t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func LimitRateCtx[T any, R any](f FnCtx[T, R], interval time.Duration) FnCtx[T, R] {
|
|
||||||
// Use closures to save the time of the last function call
|
|
||||||
var lastCall time.Time
|
|
||||||
// Returns a new function, which is used to limit the function to be called only once at a specified time interval
|
|
||||||
return func(ctx context.Context, t T) (R, error) {
|
|
||||||
// Calculate the time interval since the last function call
|
|
||||||
elapsed := time.Since(lastCall)
|
|
||||||
// If the interval is less than the specified time, wait for the remaining time
|
|
||||||
if elapsed < interval {
|
|
||||||
t := time.NewTimer(interval - elapsed)
|
|
||||||
select {
|
|
||||||
case <-ctx.Done():
|
|
||||||
t.Stop()
|
|
||||||
var zero R
|
|
||||||
return zero, ctx.Err()
|
|
||||||
case <-t.C:
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Update the time of the last function call
|
|
||||||
lastCall = time.Now()
|
|
||||||
// Execute the function that needs to be limited
|
|
||||||
return f(ctx, t)
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,59 +0,0 @@
|
|||||||
package utils_test
|
|
||||||
|
|
||||||
import (
|
|
||||||
"context"
|
|
||||||
"testing"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/alist-org/alist/v3/pkg/utils"
|
|
||||||
)
|
|
||||||
|
|
||||||
func myFunction(a int) (int, error) {
|
|
||||||
// do something
|
|
||||||
return a + 1, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLimitRate(t *testing.T) {
|
|
||||||
myLimitedFunction := utils.LimitRate(myFunction, time.Second)
|
|
||||||
result, _ := myLimitedFunction(1)
|
|
||||||
t.Log(result) // Output: 2
|
|
||||||
result, _ = myLimitedFunction(2)
|
|
||||||
t.Log(result) // Output: 3
|
|
||||||
}
|
|
||||||
|
|
||||||
type Test struct {
|
|
||||||
limitFn func(string) (string, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *Test) myFunction(a string) (string, error) {
|
|
||||||
// do something
|
|
||||||
return a + " world", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLimitRateStruct(t *testing.T) {
|
|
||||||
test := &Test{}
|
|
||||||
test.limitFn = utils.LimitRate(test.myFunction, time.Second)
|
|
||||||
result, _ := test.limitFn("hello")
|
|
||||||
t.Log(result) // Output: hello world
|
|
||||||
result, _ = test.limitFn("hi")
|
|
||||||
t.Log(result) // Output: hi world
|
|
||||||
}
|
|
||||||
|
|
||||||
func myFunctionCtx(ctx context.Context, a int) (int, error) {
|
|
||||||
// do something
|
|
||||||
return a + 1, nil
|
|
||||||
}
|
|
||||||
func TestLimitRateCtx(t *testing.T) {
|
|
||||||
myLimitedFunction := utils.LimitRateCtx(myFunctionCtx, time.Second)
|
|
||||||
result, _ := myLimitedFunction(context.Background(), 1)
|
|
||||||
t.Log(result) // Output: 2
|
|
||||||
ctx, cancel := context.WithCancel(context.Background())
|
|
||||||
go func() {
|
|
||||||
time.Sleep(500 * time.Millisecond)
|
|
||||||
cancel()
|
|
||||||
}()
|
|
||||||
result, err := myLimitedFunction(ctx, 2)
|
|
||||||
t.Log(result, err) // Output: 0 context canceled
|
|
||||||
result, _ = myLimitedFunction(context.Background(), 3)
|
|
||||||
t.Log(result) // Output: 4
|
|
||||||
}
|
|
@ -9,24 +9,28 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func GetSHA1Encode(data string) string {
|
func GetSHA1Encode(data []byte) string {
|
||||||
h := sha1.New()
|
h := sha1.New()
|
||||||
h.Write([]byte(data))
|
h.Write(data)
|
||||||
return hex.EncodeToString(h.Sum(nil))
|
return hex.EncodeToString(h.Sum(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetSHA256Encode(data string) string {
|
func GetSHA256Encode(data []byte) string {
|
||||||
h := sha256.New()
|
h := sha256.New()
|
||||||
h.Write([]byte(data))
|
h.Write(data)
|
||||||
return hex.EncodeToString(h.Sum(nil))
|
return hex.EncodeToString(h.Sum(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetMD5Encode(data string) string {
|
func GetMD5Encode(data []byte) string {
|
||||||
h := md5.New()
|
h := md5.New()
|
||||||
h.Write([]byte(data))
|
h.Write(data)
|
||||||
return hex.EncodeToString(h.Sum(nil))
|
return hex.EncodeToString(h.Sum(nil))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetMD5EncodeStr(data string) string {
|
||||||
|
return GetMD5Encode([]byte(data))
|
||||||
|
}
|
||||||
|
|
||||||
var DEC = map[string]string{
|
var DEC = map[string]string{
|
||||||
"-": "+",
|
"-": "+",
|
||||||
"_": "/",
|
"_": "/",
|
||||||
|
@ -46,9 +46,9 @@ func IsSubPath(path string, subPath string) bool {
|
|||||||
func Ext(path string) string {
|
func Ext(path string) string {
|
||||||
ext := stdpath.Ext(path)
|
ext := stdpath.Ext(path)
|
||||||
if strings.HasPrefix(ext, ".") {
|
if strings.HasPrefix(ext, ".") {
|
||||||
return ext[1:]
|
ext = ext[1:]
|
||||||
}
|
}
|
||||||
return ext
|
return strings.ToLower(ext)
|
||||||
}
|
}
|
||||||
|
|
||||||
func EncodePath(path string, all ...bool) string {
|
func EncodePath(path string, all ...bool) string {
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user