Compare commits
178 Commits
Author | SHA1 | Date | |
---|---|---|---|
a3446720a2 | |||
3c4c2ad4e0 | |||
077a525961 | |||
5be79eb26e | |||
ddc19ab699 | |||
ddfca5a29b | |||
c19166be1c | |||
daad61443c | |||
4b0c01158d | |||
f97f1d532e | |||
e15755fef0 | |||
ea88998325 | |||
74d971aa8a | |||
d41d868a8d | |||
555cc26cbf | |||
ab4215080b | |||
9502f5acd7 | |||
b03879403f | |||
ee4ac81677 | |||
b69fc8c306 | |||
ee6c31332d | |||
9fa16bd5fc | |||
c77ed5fcb0 | |||
822be17fb9 | |||
7e3b13ea2d | |||
f8fb48fb32 | |||
4bf46268da | |||
b7ea73b3c2 | |||
9fbc54314d | |||
cf8ab29a17 | |||
51cadd2d49 | |||
2bae8e129e | |||
9d55ad3af6 | |||
36cd504783 | |||
49f13b9b90 | |||
adb0739dfe | |||
340cb940e3 | |||
8711f2a1c5 | |||
7f35aab071 | |||
ecd167d2f9 | |||
220fd30830 | |||
5cba10446e | |||
a9bdb15205 | |||
c5f6a90f54 | |||
46f9aefb04 | |||
fdcad9c154 | |||
027025361a | |||
f1245153b9 | |||
570b8be022 | |||
86a773674a | |||
75fd0ee185 | |||
cc43238bd1 | |||
c0a6beecea | |||
c77eebb035 | |||
b1efb86b28 | |||
0707449c8f | |||
0f8a84f67e | |||
a475783b00 | |||
67413015e8 | |||
3a311a47af | |||
9ccd802126 | |||
0acba7cd22 | |||
3cdb8e7a81 | |||
d3efee2ea1 | |||
4ec274e748 | |||
3b07c72f88 | |||
0c5820a98f | |||
86beadc0ed | |||
be62d64dba | |||
112363031a | |||
48dc3552a6 | |||
663814c9ef | |||
bd892e6a63 | |||
4fd2c09845 | |||
0eab31bdf5 | |||
c6af22b97e | |||
b2a5110672 | |||
c628992ea6 | |||
c65d868e09 | |||
aeb48b2ecc | |||
cefec1a663 | |||
e7ad830aa8 | |||
b27eed265a | |||
3abe26473c | |||
023107226c | |||
8b109cfe40 | |||
b48e97d406 | |||
6c91cfeb90 | |||
bfd1f25972 | |||
8c0defce09 | |||
a1e88cfa05 | |||
443f5ffbcc | |||
b8bc94306d | |||
d9795ff22f | |||
c4108007cd | |||
f3db23a41e | |||
4741a75c92 | |||
301756ba03 | |||
3b2703a5e5 | |||
2a601f06cb | |||
adc3a56552 | |||
4d9a29bddd | |||
666e02f0c3 | |||
6aaec19c1c | |||
1091e1b740 | |||
d06c605421 | |||
43de823058 | |||
02d0aef611 | |||
5596661ce8 | |||
2379cb8d67 | |||
8c0ebe0841 | |||
fd868bac84 | |||
ebcbb29a0f | |||
00ff0a43a7 | |||
3d3f23ec9e | |||
d484219c48 | |||
dd4c97393e | |||
07b8ff25a7 | |||
0d5c3c5080 | |||
75b4429f73 | |||
34ef6bd18d | |||
c915313ec9 | |||
12a095a1d6 | |||
dc000f640a | |||
aa1c5b2be3 | |||
1d4ec3c50d | |||
ebfeef52f4 | |||
c595fd7f94 | |||
421052f88a | |||
603681fbe6 | |||
f442185aa5 | |||
ca9e739465 | |||
53a1c4283b | |||
93dd768234 | |||
c9c4d6bc7e | |||
81e10f8939 | |||
4dd753de52 | |||
79df63d319 | |||
ec54831162 | |||
c8f3e8ab4d | |||
4be8524d80 | |||
0d3146b51d | |||
f95d843969 | |||
28aee8c493 | |||
de3ea82eb9 | |||
268ba3d069 | |||
309d6558fb | |||
c08fdfc868 | |||
1b28e6af3e | |||
8655e33e60 | |||
50579fef84 | |||
e39299bfe2 | |||
d1ab2443f1 | |||
658cf368bb | |||
fd36ce59f6 | |||
95b3b87672 | |||
0d07d81802 | |||
923937b530 | |||
09492193c4 | |||
40b26a81a0 | |||
4293a0ba8c | |||
6c2f3486fc | |||
3c7512f64a | |||
84219d3d70 | |||
05d3727335 | |||
ee77c3b113 | |||
fcaf485e0b | |||
bd83469bb1 | |||
90f111b24f | |||
7d1034c569 | |||
236c17176c | |||
6ee4c10e8f | |||
3798634028 | |||
567ba5ccd4 | |||
ae2ee1821a | |||
805b1e4fa3 | |||
d92c10da56 | |||
6659f6d367 |
19
.github/stale.yml
vendored
Normal file
19
.github/stale.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 44
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 10
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- accepted
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: >
|
||||
This issue was closed due to inactive more than 52 days. You can reopen or
|
||||
recreate it if you think it should continue. Thank you for your contributions again.
|
6
.github/workflows/auto_lang.yml
vendored
6
.github/workflows/auto_lang.yml
vendored
@ -16,12 +16,12 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ 1.19 ]
|
||||
go-version: [ '1.20' ]
|
||||
name: auto generate lang.json
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
@ -54,7 +54,7 @@ jobs:
|
||||
cd alist-web
|
||||
git add .
|
||||
git config --local user.email "i@nn.ci"
|
||||
git config --local user.name "Noah Hsu"
|
||||
git config --local user.name "Andy Hsu"
|
||||
git commit -m "chore: auto update i18n file" -a 2>/dev/null || :
|
||||
cd ..
|
||||
|
||||
|
4
.github/workflows/build.yml
vendored
4
.github/workflows/build.yml
vendored
@ -11,12 +11,12 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.19]
|
||||
go-version: [ '1.20' ]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
|
19
.github/workflows/changelog.yml
vendored
Normal file
19
.github/workflows/changelog.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
name: auto changelog
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
17
.github/workflows/issue_check_inactive.yml
vendored
17
.github/workflows/issue_check_inactive.yml
vendored
@ -1,17 +0,0 @@
|
||||
name: Check inactive
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 1 * *"
|
||||
|
||||
jobs:
|
||||
check-inactive:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: check-inactive
|
||||
uses: actions-cool/issues-helper@v3
|
||||
with:
|
||||
actions: 'check-inactive'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
inactive-day: 30
|
||||
body: Hello, this issue has been inactive for more than 30 days and will be closed if inactive for another 30 days.
|
8
.github/workflows/issue_close_question.yml
vendored
8
.github/workflows/issue_close_question.yml
vendored
@ -2,7 +2,7 @@ name: Close need info
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 0 */7 * *"
|
||||
- cron: "0 0 */1 * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@ -15,8 +15,8 @@ jobs:
|
||||
actions: 'close-issues'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: 'question'
|
||||
inactive-day: 7
|
||||
inactive-day: 3
|
||||
close-reason: 'not_planned'
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 7 days.
|
||||
你好 @${{ github.event.issue.user.login }},此issue因超过7天未回复被关闭。
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 3 days.
|
||||
你好 @${{ github.event.issue.user.login }},此issue因超过3天未回复被关闭。
|
@ -14,8 +14,8 @@ jobs:
|
||||
with:
|
||||
actions: 'close-issues'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
labels: 'inactive'
|
||||
inactive-day: 30
|
||||
labels: 'stale'
|
||||
inactive-day: 8
|
||||
close-reason: 'not_planned'
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 60 days. You can reopen or recreate it if you think it should continue.
|
||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue.
|
4
.github/workflows/issue_question.yml
vendored
4
.github/workflows/issue_question.yml
vendored
@ -16,5 +16,5 @@ jobs:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
body: |
|
||||
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 7 days.
|
||||
你好 @${{ github.event.issue.user.login }},请按照issue模板填写, 并详细说明问题/复现步骤/复现链接/实现思路或提供更多信息等, 7天内未回复issue自动关闭。
|
||||
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 3 days.
|
||||
你好 @${{ github.event.issue.user.login }},请按照issue模板填写, 并详细说明问题/日志记录/复现步骤/复现链接/实现思路或提供更多信息等, 3天内未回复issue自动关闭。
|
68
.github/workflows/release.yml
vendored
68
.github/workflows/release.yml
vendored
@ -1,33 +1,27 @@
|
||||
name: release
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
changelog:
|
||||
name: Create Release
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
||||
release:
|
||||
needs: changelog
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [1.19]
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Prerelease
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
token: ${{ secrets.MY_TOKEN }}
|
||||
id: ${{ github.event.release.id }}
|
||||
prerelease: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v3
|
||||
uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
@ -47,7 +41,41 @@ jobs:
|
||||
run: |
|
||||
bash build.sh release
|
||||
|
||||
- name: Release
|
||||
- name: Release latest
|
||||
uses: irongut/EditRelease@v1.2.0
|
||||
with:
|
||||
token: ${{ secrets.MY_TOKEN }}
|
||||
id: ${{ github.event.release.id }}
|
||||
prerelease: false
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: build/compress/*
|
||||
files: build/compress/*
|
||||
|
||||
release_desktop:
|
||||
needs: release
|
||||
name: Release desktop
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: alist-org/desktop-release
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Add tag
|
||||
run: |
|
||||
git config --local user.email "i@nn.ci"
|
||||
git config --local user.name "Andy Hsu"
|
||||
version=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
|
||||
git tag -a $version -m "release $version"
|
||||
|
||||
- name: Push tags
|
||||
uses: ad-m/github-push-action@master
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/desktop-release
|
2
.github/workflows/release_docker.yml
vendored
2
.github/workflows/release_docker.yml
vendored
@ -57,7 +57,7 @@ jobs:
|
||||
- name: Add tag
|
||||
run: |
|
||||
git config --local user.email "i@nn.ci"
|
||||
git config --local user.name "Noah Hsu"
|
||||
git config --local user.name "Andy Hsu"
|
||||
git tag -a ${{ github.ref_name }} -m "release ${{ github.ref_name }}"
|
||||
|
||||
- name: Push tags
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
Prerequisites:
|
||||
|
||||
- [git](https://nodejs.org/zh-cn/)
|
||||
- [git](https://git-scm.com)
|
||||
- [Go 1.19+](https://golang.org/doc/install)
|
||||
- [gcc](https://gcc.gnu.org/)
|
||||
- [nodejs](https://nodejs.org/)
|
||||
|
@ -1,11 +1,11 @@
|
||||
FROM alpine:edge as builder
|
||||
FROM alpine:3.17 as builder
|
||||
LABEL stage=go-builder
|
||||
WORKDIR /app/
|
||||
COPY ./ ./
|
||||
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
||||
bash build.sh release docker
|
||||
|
||||
FROM alpine:edge
|
||||
FROM alpine:3.17
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
WORKDIR /opt/alist/
|
||||
@ -15,4 +15,4 @@ RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
EXPOSE 5244
|
||||
ENTRYPOINT [ "/entrypoint.sh" ]
|
||||
CMD [ "/entrypoint.sh" ]
|
||||
|
14
README_cn.md
14
README_cn.md
@ -41,7 +41,7 @@
|
||||
|
||||
[English](./README.md) | 中文 | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
|
||||
|
||||
## Features
|
||||
## 功能
|
||||
|
||||
- [x] 多种存储
|
||||
- [x] 本地存储
|
||||
@ -89,7 +89,7 @@
|
||||
- [x] 离线下载
|
||||
- [x] 跨存储复制文件
|
||||
|
||||
## Document
|
||||
## 文档
|
||||
|
||||
<https://alist.nn.ci/zh/>
|
||||
|
||||
@ -97,21 +97,21 @@
|
||||
|
||||
<https://al.nn.ci>
|
||||
|
||||
## Discussion
|
||||
## 讨论
|
||||
|
||||
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) ,**issue仅针对错误报告和功能请求。**
|
||||
|
||||
## Sponsor
|
||||
## 赞助
|
||||
|
||||
AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我继续下去,请考虑赞助我或提供一个单一的捐款!感谢所有的爱和支持:https://alist.nn.ci/zh/guide/sponsor.html
|
||||
|
||||
### Special sponsors
|
||||
### 特别赞助
|
||||
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
|
||||
- [KinhDown 百度云盘不限速下载!永久免费!已稳定运行3年!非常可靠!Q群 -> 786799372](https://kinhdown.com)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
|
||||
## Contributors
|
||||
## 贡献者
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
@ -130,4 +130,4 @@ Thanks goes to these wonderful people:
|
||||
|
||||
---
|
||||
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
|
||||
|
8
build.sh
8
build.sh
@ -43,6 +43,8 @@ FetchWebRelease() {
|
||||
|
||||
BuildWinArm64() {
|
||||
echo building for windows-arm64
|
||||
chmod +x ./wrapper/zcc-arm64
|
||||
chmod +x ./wrapper/zcxx-arm64
|
||||
export GOOS=windows
|
||||
export GOARCH=arm64
|
||||
export CC=$(pwd)/wrapper/zcc-arm64
|
||||
@ -57,7 +59,8 @@ BuildDev() {
|
||||
mv alist-* dist
|
||||
cd dist
|
||||
upx -9 ./alist-linux*
|
||||
upx -9 ./alist-windows-amd64.exe
|
||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||
upx -9 ./alist-windows-amd64-upx.exe
|
||||
find . -type f -print0 | xargs -0 md5sum >md5.txt
|
||||
cat md5.txt
|
||||
}
|
||||
@ -93,7 +96,8 @@ BuildRelease() {
|
||||
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
# why? Because some target platforms seem to have issues with upx compression
|
||||
upx -9 ./alist-linux-amd64
|
||||
upx -9 ./alist-windows-amd64.exe
|
||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||
upx -9 ./alist-windows-amd64-upx.exe
|
||||
mv alist-* build
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,8 @@ var Cancel2FACmd = &cobra.Command{
|
||||
err := op.Cancel2FAByUser(admin)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to cancel 2FA: %+v", err)
|
||||
} else {
|
||||
utils.Log.Info("2FA canceled")
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -6,4 +6,5 @@ var (
|
||||
NoPrefix bool
|
||||
Dev bool
|
||||
ForceBinDir bool
|
||||
LogStd bool
|
||||
)
|
||||
|
@ -78,10 +78,19 @@ func writeFile(name string, data interface{}) {
|
||||
func generateDriversJson() {
|
||||
drivers := make(Drivers)
|
||||
drivers["drivers"] = make(KV[interface{}])
|
||||
drivers["config"] = make(KV[interface{}])
|
||||
driverInfoMap := op.GetDriverInfoMap()
|
||||
for k, v := range driverInfoMap {
|
||||
drivers["drivers"][k] = convert(k)
|
||||
items := make(KV[interface{}])
|
||||
config := map[string]string{}
|
||||
if v.Config.Alert != "" {
|
||||
alert := strings.SplitN(v.Config.Alert, "|", 2)
|
||||
if len(alert) > 1 {
|
||||
config["alert"] = alert[1]
|
||||
}
|
||||
}
|
||||
drivers["config"][k] = config
|
||||
for i := range v.Additional {
|
||||
item := v.Additional[i]
|
||||
items[item.Name] = convert(item.Name)
|
||||
|
@ -29,4 +29,5 @@ func init() {
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.ForceBinDir, "force-bin-dir", false, "Force to use the directory where the binary file is located as data directory")
|
||||
RootCmd.PersistentFlags().BoolVar(&flags.LogStd, "log-std", false, "Force to log to std")
|
||||
}
|
||||
|
52
cmd/storage.go
Normal file
52
cmd/storage.go
Normal file
@ -0,0 +1,52 @@
|
||||
/*
|
||||
Copyright © 2023 NAME HERE <EMAIL ADDRESS>
|
||||
*/
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// storageCmd represents the storage command
|
||||
var storageCmd = &cobra.Command{
|
||||
Use: "storage",
|
||||
Short: "Manage storage",
|
||||
}
|
||||
|
||||
func init() {
|
||||
var mountPath string
|
||||
var disable = &cobra.Command{
|
||||
Use: "disable",
|
||||
Short: "Disable a storage",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
storage, err := db.GetStorageByMountPath(mountPath)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storage: %+v", err)
|
||||
} else {
|
||||
storage.Disabled = true
|
||||
err = db.UpdateStorage(storage)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to update storage: %+v", err)
|
||||
} else {
|
||||
utils.Log.Infof("Storage with mount path [%s] have been disabled", mountPath)
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
disable.Flags().StringVarP(&mountPath, "mount-path", "m", "", "The mountPath of storage")
|
||||
RootCmd.AddCommand(storageCmd)
|
||||
storageCmd.AddCommand(disable)
|
||||
|
||||
// Here you will define your flags and configuration settings.
|
||||
|
||||
// Cobra supports Persistent Flags which will work for this command
|
||||
// and all subcommands, e.g.:
|
||||
// storageCmd.PersistentFlags().String("foo", "", "A help for foo")
|
||||
|
||||
// Cobra supports local flags which will only run when this command
|
||||
// is called directly, e.g.:
|
||||
// storageCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
|
||||
}
|
@ -44,7 +44,11 @@ func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
||||
}
|
||||
|
||||
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
downloadInfo, err := d.client.Download(file.(driver115.File).PickCode)
|
||||
downloadInfo, err := d.client.
|
||||
SetUserAgent(driver115.UA115Browser).
|
||||
Download(file.(driver115.File).PickCode)
|
||||
// recover for upload
|
||||
d.client.SetUserAgent(driver115.UA115Desktop)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -6,16 +6,18 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie"`
|
||||
QRCodeToken string `json:"qrcode_token"`
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: true,
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: true,
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -4,10 +4,11 @@ import (
|
||||
"fmt"
|
||||
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/106.0.0.0 Safari/537.36 115Browser/23.9.3.2 115disk/30.1.0"
|
||||
var UserAgent = driver.UA115Desktop
|
||||
|
||||
func (d *Pan115) login() error {
|
||||
var err error
|
||||
@ -15,6 +16,7 @@ func (d *Pan115) login() error {
|
||||
driver.UA(UserAgent),
|
||||
}
|
||||
d.client = driver.New(opts...)
|
||||
d.client.SetHttpClient(base.HttpClient)
|
||||
cr := &driver.Credential{}
|
||||
if d.Addition.QRCodeToken != "" {
|
||||
s := &driver.QRCodeSession{
|
||||
@ -38,7 +40,10 @@ func (d *Pan115) login() error {
|
||||
|
||||
func (d *Pan115) getFiles(fileId string) ([]driver.File, error) {
|
||||
res := make([]driver.File, 0)
|
||||
files, err := d.client.List(fileId)
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = driver.FileListLimit
|
||||
}
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -29,7 +29,6 @@ import (
|
||||
type Pan123 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
func (d *Pan123) Config() driver.Config {
|
||||
@ -41,7 +40,8 @@ func (d *Pan123) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Pan123) Init(ctx context.Context) error {
|
||||
return d.login()
|
||||
_, err := d.request(UserInfo, http.MethodGet, nil, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) Drop(ctx context.Context) error {
|
||||
@ -77,7 +77,7 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
"size": f.Size,
|
||||
"type": f.Type,
|
||||
}
|
||||
resp, err := d.request("https://www.123pan.com/api/file/download_info", http.MethodPost, func(req *resty.Request) {
|
||||
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetHeaders(headers)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
@ -97,7 +97,8 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
}
|
||||
}
|
||||
u_ := u.String()
|
||||
res, err := base.NoRedirectClient.R().SetQueryParamsFromValues(u.Query()).Head(u_)
|
||||
log.Debug("download url: ", u_)
|
||||
res, err := base.NoRedirectClient.R().Get(u_)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -108,6 +109,8 @@ func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
log.Debugln("res code: ", res.StatusCode())
|
||||
if res.StatusCode() == 302 {
|
||||
link.URL = res.Header().Get("location")
|
||||
} else if res.StatusCode() == 200 {
|
||||
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
|
||||
}
|
||||
return &link, nil
|
||||
} else {
|
||||
@ -124,7 +127,7 @@ func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
"size": 0,
|
||||
"type": 1,
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Mkdir, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -135,7 +138,7 @@ func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
|
||||
"parentFileId": dstDir.GetID(),
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/mod_pid", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Move, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -147,7 +150,7 @@ func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) e
|
||||
"fileId": srcObj.GetID(),
|
||||
"fileName": newName,
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/rename", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Rename, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -164,7 +167,7 @@ func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
|
||||
"operation": true,
|
||||
"fileTrashInfoList": []File{f},
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/b/api/file/trash", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request(Trash, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
@ -220,36 +223,41 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
"type": 0,
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err := d.request("https://www.123pan.com/a/api/file/upload_request", http.MethodPost, func(req *resty.Request) {
|
||||
res, err := d.request(UploadRequest, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugln("upload request res: ", string(res))
|
||||
if resp.Data.Reuse || resp.Data.Key == "" {
|
||||
return nil
|
||||
}
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
Region: aws.String("123pan"),
|
||||
Endpoint: aws.String("file.123pan.com"),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
|
||||
err = d.newUpload(ctx, &resp, stream, uploadFile, up)
|
||||
} else {
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
|
||||
Region: aws.String("123pan"),
|
||||
Endpoint: aws.String(resp.Data.EndPoint),
|
||||
S3ForcePathStyle: aws.Bool(true),
|
||||
}
|
||||
s, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: uploadFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
}
|
||||
s, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
Body: uploadFile,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = d.request("https://www.123pan.com/api/file/upload_complete", http.MethodPost, func(req *resty.Request) {
|
||||
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": resp.Data.FileId,
|
||||
}).SetContext(ctx)
|
||||
|
@ -6,14 +6,13 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
|
||||
driver.RootID
|
||||
// define other
|
||||
StreamUpload bool `json:"stream_upload"`
|
||||
//Field string `json:"field" type:"select" required:"true" options:"a,b,c" default:"a"`
|
||||
StreamUpload bool `json:"stream_upload"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -7,18 +7,6 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
//type BaseResp struct {
|
||||
// Code interface{} `json:"code"`
|
||||
// Message string `json:"message"`
|
||||
//}
|
||||
|
||||
type TokenResp struct {
|
||||
//BaseResp
|
||||
Data struct {
|
||||
Token string `json:"token"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
FileName string `json:"FileName"`
|
||||
Size int64 `json:"Size"`
|
||||
@ -86,5 +74,14 @@ type UploadResp struct {
|
||||
SessionToken string `json:"SessionToken"`
|
||||
FileId int64 `json:"FileId"`
|
||||
Reuse bool `json:"Reuse"`
|
||||
EndPoint string `json:"EndPoint"`
|
||||
StorageNode string `json:"StorageNode"`
|
||||
UploadId string `json:"UploadId"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type S3PreSignedURLs struct {
|
||||
Data struct {
|
||||
PreSignedUrls map[string]string `json:"presignedUrls"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
127
drivers/123/upload.go
Normal file
127
drivers/123/upload.go
Normal file
@ -0,0 +1,127 @@
|
||||
package _123
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
|
||||
data := base.Json{
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"partNumberEnd": end,
|
||||
"partNumberStart": start,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
var s3PreSignedUrls S3PreSignedURLs
|
||||
_, err := d.request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, &s3PreSignedUrls)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &s3PreSignedUrls, nil
|
||||
}
|
||||
|
||||
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp) error {
|
||||
data := base.Json{
|
||||
"bucket": upReq.Data.Bucket,
|
||||
"key": upReq.Data.Key,
|
||||
"uploadId": upReq.Data.UploadId,
|
||||
"StorageNode": upReq.Data.StorageNode,
|
||||
}
|
||||
_, err := d.request(S3Complete, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, reader io.Reader, up driver.UpdateProgress) error {
|
||||
chunkSize := int64(1024 * 1024 * 5)
|
||||
// fetch s3 pre signed urls
|
||||
chunkCount := int(math.Ceil(float64(file.GetSize()) / float64(chunkSize)))
|
||||
// upload 10 chunks each batch
|
||||
batchSize := 10
|
||||
for i := 1; i <= chunkCount; i += batchSize {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
start := i
|
||||
end := i + batchSize
|
||||
if end > chunkCount+1 {
|
||||
end = chunkCount + 1
|
||||
}
|
||||
s3PreSignedUrls, err := d.getS3PreSignedUrls(ctx, upReq, start, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// upload each chunk
|
||||
for j := start; j < end; j++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
curSize := chunkSize
|
||||
if j == chunkCount {
|
||||
curSize = file.GetSize() - (int64(chunkCount)-1)*chunkSize
|
||||
}
|
||||
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.LimitReader(reader, chunkSize), curSize, false)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(j * 100 / chunkCount)
|
||||
}
|
||||
}
|
||||
// complete s3 upload
|
||||
return d.completeS3(ctx, upReq)
|
||||
}
|
||||
|
||||
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool) error {
|
||||
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
|
||||
if uploadUrl == "" {
|
||||
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.ContentLength = curSize
|
||||
//req.Header.Set("Content-Length", strconv.FormatInt(curSize, 10))
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer res.Body.Close()
|
||||
if res.StatusCode == http.StatusForbidden {
|
||||
if retry {
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
|
||||
}
|
||||
// refresh s3 pre signed urls
|
||||
newS3PreSignedUrls, err := d.getS3PreSignedUrls(ctx, upReq, cur, end)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
|
||||
// retry
|
||||
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
body, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return fmt.Errorf("upload s3 chunk %d failed, status code: %d, body: %s", cur, res.StatusCode, body)
|
||||
}
|
||||
return nil
|
||||
}
|
@ -4,6 +4,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
@ -13,9 +14,24 @@ import (
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
const (
|
||||
API = "https://www.123pan.com/b/api"
|
||||
SignIn = API + "/user/sign_in"
|
||||
UserInfo = API + "/user/info"
|
||||
FileList = API + "/file/list/new"
|
||||
DownloadInfo = "https://www.123pan.com/a/api/file/download_info"
|
||||
Mkdir = API + "/file/upload_request"
|
||||
Move = API + "/file/mod_pid"
|
||||
Rename = API + "/file/rename"
|
||||
Trash = API + "/file/trash"
|
||||
UploadRequest = API + "/file/upload_request"
|
||||
UploadComplete = API + "/file/upload_complete"
|
||||
S3PreSignedUrls = API + "/file/s3_repare_upload_parts_batch"
|
||||
S3Complete = API + "/file/s3_complete_multipart_upload"
|
||||
)
|
||||
|
||||
func (d *Pan123) login() error {
|
||||
var body base.Json
|
||||
url := "https://www.123pan.com/a/api/user/sign_in"
|
||||
if utils.IsEmailFormat(d.Username) {
|
||||
body = base.Json{
|
||||
"mail": d.Username,
|
||||
@ -28,17 +44,15 @@ func (d *Pan123) login() error {
|
||||
"password": d.Password,
|
||||
}
|
||||
}
|
||||
var resp TokenResp
|
||||
res, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetBody(body).Post(url)
|
||||
SetBody(body).Post(SignIn)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if utils.Json.Get(res.Body(), "code").ToInt() != 200 {
|
||||
err = fmt.Errorf(utils.Json.Get(res.Body(), "message").ToString())
|
||||
} else {
|
||||
d.AccessToken = resp.Data.Token
|
||||
d.AccessToken = utils.Json.Get(res.Body(), "data", "token").ToString()
|
||||
}
|
||||
return err
|
||||
}
|
||||
@ -77,27 +91,31 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
}
|
||||
|
||||
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||
next := "0"
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
for next != "-1" {
|
||||
for {
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": next,
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"Page": strconv.Itoa(page),
|
||||
}
|
||||
_, err := d.request("https://www.123pan.com/api/file/list/new", http.MethodGet, func(req *resty.Request) {
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
next = resp.Data.Next
|
||||
page++
|
||||
res = append(res, resp.Data.InfoList...)
|
||||
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
|
||||
break
|
||||
}
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
@ -30,12 +30,9 @@ func (d *Cloud189) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Cloud189) Init(ctx context.Context) error {
|
||||
d.client = resty.New().
|
||||
SetTimeout(base.DefaultTimeout).
|
||||
SetRetryCount(3).
|
||||
SetHeader("Referer", "https://cloud.189.cn/").
|
||||
SetHeader("User-Agent", base.UserAgent)
|
||||
return d.login()
|
||||
d.client = base.NewRestyClient().
|
||||
SetHeader("Referer", "https://cloud.189.cn/")
|
||||
return d.newLogin()
|
||||
}
|
||||
|
||||
func (d *Cloud189) Drop(ctx context.Context) error {
|
||||
|
126
drivers/189/login.go
Normal file
126
drivers/189/login.go
Normal file
@ -0,0 +1,126 @@
|
||||
package _189
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type AppConf struct {
|
||||
Data struct {
|
||||
AccountType string `json:"accountType"`
|
||||
AgreementCheck string `json:"agreementCheck"`
|
||||
AppKey string `json:"appKey"`
|
||||
ClientType int `json:"clientType"`
|
||||
IsOauth2 bool `json:"isOauth2"`
|
||||
LoginSort string `json:"loginSort"`
|
||||
MailSuffix string `json:"mailSuffix"`
|
||||
PageKey string `json:"pageKey"`
|
||||
ParamId string `json:"paramId"`
|
||||
RegReturnUrl string `json:"regReturnUrl"`
|
||||
ReqId string `json:"reqId"`
|
||||
ReturnUrl string `json:"returnUrl"`
|
||||
ShowFeedback string `json:"showFeedback"`
|
||||
ShowPwSaveName string `json:"showPwSaveName"`
|
||||
ShowQrSaveName string `json:"showQrSaveName"`
|
||||
ShowSmsSaveName string `json:"showSmsSaveName"`
|
||||
Sso string `json:"sso"`
|
||||
} `json:"data"`
|
||||
Msg string `json:"msg"`
|
||||
Result string `json:"result"`
|
||||
}
|
||||
|
||||
type EncryptConf struct {
|
||||
Result int `json:"result"`
|
||||
Data struct {
|
||||
UpSmsOn string `json:"upSmsOn"`
|
||||
Pre string `json:"pre"`
|
||||
PreDomain string `json:"preDomain"`
|
||||
PubKey string `json:"pubKey"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
func (d *Cloud189) newLogin() error {
|
||||
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
res, err := d.client.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Is logged in
|
||||
redirectURL := res.RawResponse.Request.URL
|
||||
if redirectURL.String() == "https://cloud.189.cn/web/main" {
|
||||
return nil
|
||||
}
|
||||
lt := redirectURL.Query().Get("lt")
|
||||
reqId := redirectURL.Query().Get("reqId")
|
||||
appId := redirectURL.Query().Get("appId")
|
||||
headers := map[string]string{
|
||||
"lt": lt,
|
||||
"reqid": reqId,
|
||||
"referer": redirectURL.String(),
|
||||
"origin": "https://open.e.189.cn",
|
||||
}
|
||||
// get app Conf
|
||||
var appConf AppConf
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
|
||||
"version": "2.0",
|
||||
"appKey": appId,
|
||||
}).SetResult(&appConf).Post("https://open.e.189.cn/api/logbox/oauth2/appConf.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 AppConf resp body: %s", res.String())
|
||||
if appConf.Result != "0" {
|
||||
return errors.New(appConf.Msg)
|
||||
}
|
||||
// get encrypt conf
|
||||
var encryptConf EncryptConf
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
|
||||
"appId": appId,
|
||||
}).Post("https://open.e.189.cn/api/logbox/config/encryptConf.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = utils.Json.Unmarshal(res.Body(), &encryptConf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 EncryptConf resp body: %s\n%+v", res.String(), encryptConf)
|
||||
if encryptConf.Result != 0 {
|
||||
return errors.New("get EncryptConf error:" + res.String())
|
||||
}
|
||||
// TODO: getUUID? needcaptcha
|
||||
// login
|
||||
loginData := map[string]string{
|
||||
"version": "v2.0",
|
||||
"apToken": "",
|
||||
"appKey": appId,
|
||||
"accountType": appConf.Data.AccountType,
|
||||
"userName": encryptConf.Data.Pre + RsaEncode([]byte(d.Username), encryptConf.Data.PubKey, true),
|
||||
"epd": encryptConf.Data.Pre + RsaEncode([]byte(d.Password), encryptConf.Data.PubKey, true),
|
||||
"captchaType": "",
|
||||
"validateCode": "",
|
||||
"smsValidateCode": "",
|
||||
"captchaToken": "",
|
||||
"returnUrl": appConf.Data.ReturnUrl,
|
||||
"mailSuffix": appConf.Data.MailSuffix,
|
||||
"dynamicCheck": "FALSE",
|
||||
"clientType": strconv.Itoa(appConf.Data.ClientType),
|
||||
"cb_SaveName": "3",
|
||||
"isOauth2": strconv.FormatBool(appConf.Data.IsOauth2),
|
||||
"state": "",
|
||||
"paramId": appConf.Data.ParamId,
|
||||
}
|
||||
res, err = d.client.R().SetHeaders(headers).SetFormData(loginData).Post("https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debugf("189 login resp body: %s", res.String())
|
||||
loginResult := utils.Json.Get(res.Body(), "result").ToInt()
|
||||
if loginResult != 0 {
|
||||
return errors.New(utils.Json.Get(res.Body(), "msg").ToString())
|
||||
}
|
||||
return nil
|
||||
}
|
@ -8,6 +8,7 @@ import (
|
||||
type Addition struct {
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
Cookie string `json:"cookie" help:"Fill in the cookie if need captcha"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
@ -15,6 +16,7 @@ var config = driver.Config{
|
||||
Name: "189Cloud",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "-11",
|
||||
Alert: `info|You can try to use 189PC driver if this driver does not work.`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -11,16 +11,13 @@ import (
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
|
||||
"github.com/go-resty/resty/v2"
|
||||
@ -30,118 +27,118 @@ import (
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *Cloud189) login() error {
|
||||
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
b := ""
|
||||
lt := ""
|
||||
ltText := regexp.MustCompile(`lt = "(.+?)"`)
|
||||
var res *resty.Response
|
||||
var err error
|
||||
for i := 0; i < 3; i++ {
|
||||
res, err = d.client.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 已经登陆
|
||||
if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
|
||||
return nil
|
||||
}
|
||||
b = res.String()
|
||||
ltTextArr := ltText.FindStringSubmatch(b)
|
||||
if len(ltTextArr) > 0 {
|
||||
lt = ltTextArr[1]
|
||||
break
|
||||
} else {
|
||||
<-time.After(time.Second)
|
||||
}
|
||||
}
|
||||
if lt == "" {
|
||||
return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
|
||||
b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
|
||||
}
|
||||
captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||
returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||
paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
//reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
|
||||
vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
|
||||
vCodeRS := ""
|
||||
if vCodeID != "" {
|
||||
// need ValidateCode
|
||||
log.Debugf("try to identify verification codes")
|
||||
timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
|
||||
u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
|
||||
imgRes, err := d.client.R().SetHeaders(map[string]string{
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
|
||||
"Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
|
||||
"Sec-Fetch-Dest": "image",
|
||||
"Sec-Fetch-Mode": "no-cors",
|
||||
"Sec-Fetch-Site": "same-origin",
|
||||
}).Get(u)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// Enter the verification code manually
|
||||
//err = message.GetMessenger().WaitSend(message.Message{
|
||||
// Type: "image",
|
||||
// Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
|
||||
//}, 10)
|
||||
//if err != nil {
|
||||
// return err
|
||||
//}
|
||||
//vCodeRS, err = message.GetMessenger().WaitReceive(30)
|
||||
// use ocr api
|
||||
vRes, err := base.RestyClient.R().SetMultipartField(
|
||||
"image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
Post(setting.GetStr(conf.OcrApi))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
|
||||
return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
|
||||
}
|
||||
vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
log.Debugln("code: ", vCodeRS)
|
||||
}
|
||||
userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
|
||||
passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
|
||||
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
|
||||
var loginResp LoginResp
|
||||
res, err = d.client.R().
|
||||
SetHeaders(map[string]string{
|
||||
"lt": lt,
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
|
||||
"Referer": "https://open.e.189.cn/",
|
||||
"accept": "application/json;charset=UTF-8",
|
||||
}).SetFormData(map[string]string{
|
||||
"appKey": "cloud",
|
||||
"accountType": "01",
|
||||
"userName": "{RSA}" + userRsa,
|
||||
"password": "{RSA}" + passwordRsa,
|
||||
"validateCode": vCodeRS,
|
||||
"captchaToken": captchaToken,
|
||||
"returnUrl": returnUrl,
|
||||
"mailSuffix": "@pan.cn",
|
||||
"paramId": paramId,
|
||||
"clientType": "10010",
|
||||
"dynamicCheck": "FALSE",
|
||||
"cb_SaveName": "1",
|
||||
"isOauth2": "false",
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = utils.Json.Unmarshal(res.Body(), &loginResp)
|
||||
if err != nil {
|
||||
log.Error(err.Error())
|
||||
return err
|
||||
}
|
||||
if loginResp.Result != 0 {
|
||||
return fmt.Errorf(loginResp.Msg)
|
||||
}
|
||||
_, err = d.client.R().Get(loginResp.ToUrl)
|
||||
return err
|
||||
}
|
||||
//func (d *Cloud189) login() error {
|
||||
// url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||
// b := ""
|
||||
// lt := ""
|
||||
// ltText := regexp.MustCompile(`lt = "(.+?)"`)
|
||||
// var res *resty.Response
|
||||
// var err error
|
||||
// for i := 0; i < 3; i++ {
|
||||
// res, err = d.client.R().Get(url)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// // 已经登陆
|
||||
// if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
|
||||
// return nil
|
||||
// }
|
||||
// b = res.String()
|
||||
// ltTextArr := ltText.FindStringSubmatch(b)
|
||||
// if len(ltTextArr) > 0 {
|
||||
// lt = ltTextArr[1]
|
||||
// break
|
||||
// } else {
|
||||
// <-time.After(time.Second)
|
||||
// }
|
||||
// }
|
||||
// if lt == "" {
|
||||
// return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
|
||||
// b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
|
||||
// }
|
||||
// captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||
// returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||
// paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
// //reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||
// jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
|
||||
// vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
|
||||
// vCodeRS := ""
|
||||
// if vCodeID != "" {
|
||||
// // need ValidateCode
|
||||
// log.Debugf("try to identify verification codes")
|
||||
// timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
|
||||
// u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
|
||||
// imgRes, err := d.client.R().SetHeaders(map[string]string{
|
||||
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
|
||||
// "Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
|
||||
// "Sec-Fetch-Dest": "image",
|
||||
// "Sec-Fetch-Mode": "no-cors",
|
||||
// "Sec-Fetch-Site": "same-origin",
|
||||
// }).Get(u)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// // Enter the verification code manually
|
||||
// //err = message.GetMessenger().WaitSend(message.Message{
|
||||
// // Type: "image",
|
||||
// // Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
|
||||
// //}, 10)
|
||||
// //if err != nil {
|
||||
// // return err
|
||||
// //}
|
||||
// //vCodeRS, err = message.GetMessenger().WaitReceive(30)
|
||||
// // use ocr api
|
||||
// vRes, err := base.RestyClient.R().SetMultipartField(
|
||||
// "image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
|
||||
// Post(setting.GetStr(conf.OcrApi))
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
|
||||
// return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
|
||||
// }
|
||||
// vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
// log.Debugln("code: ", vCodeRS)
|
||||
// }
|
||||
// userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
|
||||
// passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
|
||||
// url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
|
||||
// var loginResp LoginResp
|
||||
// res, err = d.client.R().
|
||||
// SetHeaders(map[string]string{
|
||||
// "lt": lt,
|
||||
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
|
||||
// "Referer": "https://open.e.189.cn/",
|
||||
// "accept": "application/json;charset=UTF-8",
|
||||
// }).SetFormData(map[string]string{
|
||||
// "appKey": "cloud",
|
||||
// "accountType": "01",
|
||||
// "userName": "{RSA}" + userRsa,
|
||||
// "password": "{RSA}" + passwordRsa,
|
||||
// "validateCode": vCodeRS,
|
||||
// "captchaToken": captchaToken,
|
||||
// "returnUrl": returnUrl,
|
||||
// "mailSuffix": "@pan.cn",
|
||||
// "paramId": paramId,
|
||||
// "clientType": "10010",
|
||||
// "dynamicCheck": "FALSE",
|
||||
// "cb_SaveName": "1",
|
||||
// "isOauth2": "false",
|
||||
// }).Post(url)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
// err = utils.Json.Unmarshal(res.Body(), &loginResp)
|
||||
// if err != nil {
|
||||
// log.Error(err.Error())
|
||||
// return err
|
||||
// }
|
||||
// if loginResp.Result != 0 {
|
||||
// return fmt.Errorf(loginResp.Msg)
|
||||
// }
|
||||
// _, err = d.client.R().Get(loginResp.ToUrl)
|
||||
// return err
|
||||
//}
|
||||
|
||||
func (d *Cloud189) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
var e Error
|
||||
@ -163,7 +160,7 @@ func (d *Cloud189) request(url string, method string, callback base.ReqCallback,
|
||||
//log.Debug(res.String())
|
||||
if e.ErrorCode != "" {
|
||||
if e.ErrorCode == "InvalidSessionKey" {
|
||||
err = d.login()
|
||||
err = d.newLogin()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
114
drivers/alias/driver.go
Normal file
114
drivers/alias/driver.go
Normal file
@ -0,0 +1,114 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type Alias struct {
|
||||
model.Storage
|
||||
Addition
|
||||
pathMap map[string][]string
|
||||
autoFlatten bool
|
||||
oneKey string
|
||||
}
|
||||
|
||||
func (d *Alias) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Alias) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Alias) Init(ctx context.Context) error {
|
||||
if d.Paths == "" {
|
||||
return errors.New("paths is required")
|
||||
}
|
||||
d.pathMap = make(map[string][]string)
|
||||
for _, path := range strings.Split(d.Paths, "\n") {
|
||||
path = strings.TrimSpace(path)
|
||||
if path == "" {
|
||||
continue
|
||||
}
|
||||
k, v := getPair(path)
|
||||
d.pathMap[k] = append(d.pathMap[k], v)
|
||||
}
|
||||
if len(d.pathMap) == 1 {
|
||||
for k := range d.pathMap {
|
||||
d.oneKey = k
|
||||
}
|
||||
d.autoFlatten = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Alias) Drop(ctx context.Context) error {
|
||||
d.pathMap = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Alias) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
if utils.PathEqual(path, "/") {
|
||||
return &model.Object{
|
||||
Name: "Root",
|
||||
IsFolder: true,
|
||||
Path: "/",
|
||||
}, nil
|
||||
}
|
||||
root, sub := d.getRootAndPath(path)
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
for _, dst := range dsts {
|
||||
obj, err := d.get(ctx, path, dst, sub)
|
||||
if err == nil {
|
||||
return obj, nil
|
||||
}
|
||||
}
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
path := dir.GetPath()
|
||||
if utils.PathEqual(path, "/") && !d.autoFlatten {
|
||||
return d.listRoot(), nil
|
||||
}
|
||||
root, sub := d.getRootAndPath(path)
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
var objs []model.Obj
|
||||
for _, dst := range dsts {
|
||||
tmp, err := d.list(ctx, dst, sub)
|
||||
if err == nil {
|
||||
objs = append(objs, tmp...)
|
||||
}
|
||||
}
|
||||
return objs, nil
|
||||
}
|
||||
|
||||
func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
root, sub := d.getRootAndPath(file.GetPath())
|
||||
dsts, ok := d.pathMap[root]
|
||||
if !ok {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
for _, dst := range dsts {
|
||||
link, err := d.link(ctx, dst, sub, args)
|
||||
if err == nil {
|
||||
return link, nil
|
||||
}
|
||||
}
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Alias)(nil)
|
27
drivers/alias/meta.go
Normal file
27
drivers/alias/meta.go
Normal file
@ -0,0 +1,27 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
// driver.RootPath
|
||||
// define other
|
||||
Paths string `json:"paths" required:"true" type:"text"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Alias",
|
||||
LocalSort: true,
|
||||
NoCache: true,
|
||||
NoUpload: true,
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Alias{}
|
||||
})
|
||||
}
|
1
drivers/alias/types.go
Normal file
1
drivers/alias/types.go
Normal file
@ -0,0 +1 @@
|
||||
package alias
|
113
drivers/alias/util.go
Normal file
113
drivers/alias/util.go
Normal file
@ -0,0 +1,113 @@
|
||||
package alias
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/fs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
)
|
||||
|
||||
func (d *Alias) listRoot() []model.Obj {
|
||||
var objs []model.Obj
|
||||
for k, _ := range d.pathMap {
|
||||
obj := model.Object{
|
||||
Name: k,
|
||||
IsFolder: true,
|
||||
Modified: d.Modified,
|
||||
}
|
||||
objs = append(objs, &obj)
|
||||
}
|
||||
return objs
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
func getPair(path string) (string, string) {
|
||||
//path = strings.TrimSpace(path)
|
||||
if strings.Contains(path, ":") {
|
||||
pair := strings.SplitN(path, ":", 2)
|
||||
if !strings.Contains(pair[0], "/") {
|
||||
return pair[0], pair[1]
|
||||
}
|
||||
}
|
||||
return stdpath.Base(path), path
|
||||
}
|
||||
|
||||
func (d *Alias) getRootAndPath(path string) (string, string) {
|
||||
if d.autoFlatten {
|
||||
return d.oneKey, path
|
||||
}
|
||||
path = strings.TrimPrefix(path, "/")
|
||||
parts := strings.SplitN(path, "/", 2)
|
||||
if len(parts) == 1 {
|
||||
return parts[0], ""
|
||||
}
|
||||
return parts[0], parts[1]
|
||||
}
|
||||
|
||||
func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Obj, error) {
|
||||
obj, err := fs.Get(ctx, stdpath.Join(dst, sub), &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
Path: path,
|
||||
Name: obj.GetName(),
|
||||
Size: obj.GetSize(),
|
||||
Modified: obj.ModTime(),
|
||||
IsFolder: obj.IsDir(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
|
||||
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
|
||||
// the obj must implement the model.SetPath interface
|
||||
// return objs, err
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(objs, func(obj model.Obj) (model.Obj, error) {
|
||||
thumb, ok := model.GetThumb(obj)
|
||||
objRes := model.Object{
|
||||
Name: obj.GetName(),
|
||||
Size: obj.GetSize(),
|
||||
Modified: obj.ModTime(),
|
||||
IsFolder: obj.IsDir(),
|
||||
}
|
||||
if !ok {
|
||||
return &objRes, nil
|
||||
}
|
||||
return &model.ObjThumb{
|
||||
Object: objRes,
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: thumb,
|
||||
},
|
||||
}, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
|
||||
reqPath := stdpath.Join(dst, sub)
|
||||
storage, err := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if common.ShouldProxy(storage, stdpath.Base(sub)) {
|
||||
return &model.Link{
|
||||
URL: fmt.Sprintf("/p%s?sign=%s",
|
||||
utils.EncodePath(reqPath, true),
|
||||
sign.Sign(reqPath)),
|
||||
}, nil
|
||||
}
|
||||
link, _, err := fs.Link(ctx, reqPath, args)
|
||||
return link, err
|
||||
}
|
@ -2,15 +2,19 @@ package alist_v3
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type AListV3 struct {
|
||||
@ -28,9 +32,39 @@ func (d *AListV3) GetAddition() driver.Additional {
|
||||
|
||||
func (d *AListV3) Init(ctx context.Context) error {
|
||||
d.Addition.Address = strings.TrimSuffix(d.Addition.Address, "/")
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
var resp common.Resp[MeResp]
|
||||
_, err := d.request("/me", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// if the username is not empty and the username is not the same as the current username, then login again
|
||||
if d.Username != "" && d.Username != resp.Data.Username {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// re-get the user info
|
||||
_, err = d.request("/me", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Data.Role == model.GUEST {
|
||||
url := d.Address + "/api/public/settings"
|
||||
res, err := base.RestyClient.R().Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
allowMounted := utils.Json.Get(res.Body(), "data", conf.AllowMounted).ToString() == "true"
|
||||
if !allowMounted {
|
||||
return fmt.Errorf("the site does not allow mounted")
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Drop(ctx context.Context) error {
|
||||
@ -38,20 +72,18 @@ func (d *AListV3) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
url := d.Address + "/api/fs/list"
|
||||
var resp common.Resp[FsListResp]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(ListReq{
|
||||
_, err := d.request("/fs/list", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(ListReq{
|
||||
PageReq: model.PageReq{
|
||||
Page: 1,
|
||||
PerPage: 0,
|
||||
},
|
||||
Path: dir.GetPath(),
|
||||
Password: d.Password,
|
||||
Password: d.MetaPassword,
|
||||
Refresh: false,
|
||||
}).Post(url)
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -72,15 +104,13 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
|
||||
}
|
||||
|
||||
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
url := d.Address + "/api/fs/get"
|
||||
var resp common.Resp[FsGetResp]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(FsGetReq{
|
||||
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(FsGetReq{
|
||||
Path: file.GetPath(),
|
||||
Password: d.Password,
|
||||
}).Post(url)
|
||||
Password: d.MetaPassword,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -90,86 +120,64 @@ func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
}
|
||||
|
||||
func (d *AListV3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
url := d.Address + "/api/fs/mkdir"
|
||||
var resp common.Resp[interface{}]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(MkdirOrLinkReq{
|
||||
_, err := d.request("/fs/mkdir", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MkdirOrLinkReq{
|
||||
Path: path.Join(parentDir.GetPath(), dirName),
|
||||
}).Post(url)
|
||||
return checkResp(resp, err)
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
url := d.Address + "/api/fs/move"
|
||||
var resp common.Resp[interface{}]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(MoveCopyReq{
|
||||
_, err := d.request("/fs/move", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MoveCopyReq{
|
||||
SrcDir: path.Dir(srcObj.GetPath()),
|
||||
DstDir: dstDir.GetPath(),
|
||||
Names: []string{srcObj.GetName()},
|
||||
}).Post(url)
|
||||
return checkResp(resp, err)
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
url := d.Address + "/api/fs/rename"
|
||||
var resp common.Resp[interface{}]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(RenameReq{
|
||||
_, err := d.request("/fs/rename", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(RenameReq{
|
||||
Path: srcObj.GetPath(),
|
||||
Name: newName,
|
||||
}).Post(url)
|
||||
return checkResp(resp, err)
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
url := d.Address + "/api/fs/copy"
|
||||
var resp common.Resp[interface{}]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(MoveCopyReq{
|
||||
_, err := d.request("/fs/copy", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(MoveCopyReq{
|
||||
SrcDir: path.Dir(srcObj.GetPath()),
|
||||
DstDir: dstDir.GetPath(),
|
||||
Names: []string{srcObj.GetName()},
|
||||
}).Post(url)
|
||||
return checkResp(resp, err)
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
||||
url := d.Address + "/api/fs/remove"
|
||||
var resp common.Resp[interface{}]
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetBody(RemoveReq{
|
||||
_, err := d.request("/fs/remove", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(RemoveReq{
|
||||
Dir: path.Dir(obj.GetPath()),
|
||||
Names: []string{obj.GetName()},
|
||||
}).Post(url)
|
||||
return checkResp(resp, err)
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
url := d.Address + "/api/fs/put"
|
||||
var resp common.Resp[interface{}]
|
||||
fileBytes, err := io.ReadAll(stream.GetReadCloser())
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
_, err = base.RestyClient.R().SetContext(ctx).
|
||||
SetResult(&resp).
|
||||
SetHeader("Authorization", d.AccessToken).
|
||||
SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||
SetHeader("Password", d.Password).
|
||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||
SetBody(fileBytes).Put(url)
|
||||
return checkResp(resp, err)
|
||||
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||
SetHeader("Password", d.MetaPassword).
|
||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||
SetBody(stream.GetReadCloser())
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
|
@ -7,15 +7,18 @@ import (
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Address string `json:"url" required:"true"`
|
||||
Password string `json:"password"`
|
||||
AccessToken string `json:"access_token"`
|
||||
Address string `json:"url" required:"true"`
|
||||
MetaPassword string `json:"meta_password"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AList V3",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -63,3 +63,19 @@ type RemoveReq struct {
|
||||
Dir string `json:"dir"`
|
||||
Names []string `json:"names"`
|
||||
}
|
||||
|
||||
type LoginResp struct {
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
type MeResp struct {
|
||||
Id int `json:"id"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
BasePath string `json:"base_path"`
|
||||
Role int `json:"role"`
|
||||
Disabled bool `json:"disabled"`
|
||||
Permission int `json:"permission"`
|
||||
SsoId string `json:"sso_id"`
|
||||
Otp bool `json:"otp"`
|
||||
}
|
||||
|
@ -1,17 +1,56 @@
|
||||
package alist_v3
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func checkResp(resp common.Resp[interface{}], err error) error {
|
||||
func (d *AListV3) login() error {
|
||||
var resp common.Resp[LoginResp]
|
||||
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetResult(&resp).SetBody(base.Json{
|
||||
"username": d.Username,
|
||||
"password": d.Password,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Message == "success" {
|
||||
return nil
|
||||
}
|
||||
return errors.New(resp.Message)
|
||||
d.Token = resp.Data.Token
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AListV3) request(api, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
url := d.Address + "/api" + api
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", d.Token)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if res.StatusCode() >= 400 {
|
||||
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
if code != 200 {
|
||||
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(api, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
@ -71,14 +71,14 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
||||
// init privateKey
|
||||
privateKey, _ := NewPrivateKeyFromHex(deviceID)
|
||||
state := State{
|
||||
nonce: -1,
|
||||
privateKey: privateKey,
|
||||
deviceID: deviceID,
|
||||
}
|
||||
// store state
|
||||
global.Store(d.UserID, &state)
|
||||
// init signature
|
||||
return d.reSign()
|
||||
d.sign()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliDrive) Drop(ctx context.Context) error {
|
||||
@ -226,7 +226,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
delete(reqBody, "pre_hash")
|
||||
h := sha1.New()
|
||||
if localFile != nil {
|
||||
if _, err = io.Copy(h, localFile); err != nil {
|
||||
if err = utils.CopyWithCtx(ctx, h, localFile, 0, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
|
||||
@ -241,7 +241,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, stream model.FileS
|
||||
_ = tempFile.Close()
|
||||
_ = os.Remove(tempFile.Name())
|
||||
}()
|
||||
if _, err = io.Copy(io.MultiWriter(tempFile, h), file); err != nil {
|
||||
if err = utils.CopyWithCtx(ctx, io.MultiWriter(tempFile, h), file, 0, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
localFile = tempFile
|
||||
@ -337,6 +337,7 @@ func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
data["url_expire_sec"] = 14400
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ import (
|
||||
type State struct {
|
||||
deviceID string
|
||||
signature string
|
||||
nonce int
|
||||
retry int
|
||||
privateKey *ecdsa.PrivateKey
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,9 @@ type Addition struct {
|
||||
var config = driver.Config{
|
||||
Name: "Aliyundrive",
|
||||
DefaultRoot: "root",
|
||||
Alert: `warning|There may be an infinite loop bug in this driver.
|
||||
Deprecated, no longer maintained and will be removed in a future version.
|
||||
We recommend using the official driver AliyundriveOpen.`,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -13,7 +13,6 @@ import (
|
||||
"github.com/dustinxie/ecc"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/google/uuid"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *AliDrive) createSession() error {
|
||||
@ -21,56 +20,39 @@ func (d *AliDrive) createSession() error {
|
||||
if !ok {
|
||||
return fmt.Errorf("can't load user state, user_id: %s", d.UserID)
|
||||
}
|
||||
d.sign()
|
||||
state.retry++
|
||||
if state.retry > 3 {
|
||||
state.retry = 0
|
||||
return fmt.Errorf("createSession failed after three retries")
|
||||
}
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"deviceName": "samsung",
|
||||
"modelName": "SM-G9810",
|
||||
"nonce": state.nonce,
|
||||
"nonce": 0,
|
||||
"pubKey": PublicKeyToHex(&state.privateKey.PublicKey),
|
||||
"refreshToken": d.RefreshToken,
|
||||
})
|
||||
}, nil)
|
||||
if err == nil{
|
||||
state.retry = 0
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliDrive) renewSession() error {
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||
return err
|
||||
}
|
||||
// func (d *AliDrive) renewSession() error {
|
||||
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||
// return err
|
||||
// }
|
||||
|
||||
func (d *AliDrive) sign() {
|
||||
state, ok := global.Load(d.UserID)
|
||||
if !ok {
|
||||
log.Errorf("can't load user state, user_id: %s", d.UserID)
|
||||
return
|
||||
}
|
||||
state, _ := global.Load(d.UserID)
|
||||
secpAppID := "5dde4e1bdf9e4966b387ba58f4b3fdc3"
|
||||
singdata := fmt.Sprintf("%s:%s:%s:%d", secpAppID, state.deviceID, d.UserID, state.nonce)
|
||||
singdata := fmt.Sprintf("%s:%s:%s:%d", secpAppID, state.deviceID, d.UserID, 0)
|
||||
hash := sha256.Sum256([]byte(singdata))
|
||||
data, _ := ecc.SignBytes(state.privateKey, hash[:], ecc.RecID|ecc.LowerS)
|
||||
state.signature = hex.EncodeToString(data)
|
||||
}
|
||||
|
||||
func (d *AliDrive) reSign() error {
|
||||
state, ok := global.Load(d.UserID)
|
||||
if !ok {
|
||||
return fmt.Errorf("can't load user state, user_id: %s", d.UserID)
|
||||
}
|
||||
state.nonce++
|
||||
if state.nonce >= 1073741823 {
|
||||
state.nonce = 0
|
||||
}
|
||||
d.sign()
|
||||
if state.nonce == 0 {
|
||||
return d.createSession()
|
||||
}
|
||||
err := d.renewSession()
|
||||
if err != nil && err.Error() == "device session signature error" {
|
||||
state.nonce = 0
|
||||
d.sign()
|
||||
return d.createSession()
|
||||
}
|
||||
return nil
|
||||
state.signature = hex.EncodeToString(data) //strconv.Itoa(state.nonce)
|
||||
}
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
@ -141,7 +123,7 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
||||
return nil, err, e
|
||||
}
|
||||
case "DeviceSessionSignatureInvalid":
|
||||
err = d.reSign()
|
||||
err = d.createSession()
|
||||
if err != nil {
|
||||
return nil, err, e
|
||||
}
|
||||
|
232
drivers/aliyundrive_open/driver.go
Normal file
232
drivers/aliyundrive_open/driver.go
Normal file
@ -0,0 +1,232 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type AliyundriveOpen struct {
|
||||
model.Storage
|
||||
Addition
|
||||
base string
|
||||
|
||||
DriveId string
|
||||
|
||||
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
|
||||
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Init(ctx context.Context) error {
|
||||
res, err := d.request("/adrive/v1.0/user/getDriveInfo", http.MethodPost, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
|
||||
d.limitList = utils.LimitRateCtx(d.list, time.Second/4)
|
||||
d.limitLink = utils.LimitRateCtx(d.link, time.Second)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
return d.limitList(ctx, dir)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link, error) {
|
||||
res, err := d.request("/adrive/v1.0/openFile/getDownloadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": file.GetID(),
|
||||
"expire_sec": 14400,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
url := utils.Json.Get(res, "url").ToString()
|
||||
exp := time.Hour
|
||||
return &model.Link{
|
||||
URL: url,
|
||||
Expiration: &exp,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
return d.limitLink(ctx, file)
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": parentDir.GetID(),
|
||||
"name": dirName,
|
||||
"type": "folder",
|
||||
"check_name_mode": "refuse",
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/move", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"to_parent_file_id": dstDir.GetID(),
|
||||
"check_name_mode": "refuse", // optional:ignore,auto_rename,refuse
|
||||
//"new_name": "newName", // The new name to use when a file of the same name exists
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/update", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"name": newName,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
_, err := d.request("/adrive/v1.0/openFile/copy", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": srcObj.GetID(),
|
||||
"to_parent_file_id": dstDir.GetID(),
|
||||
"auto_rename": true,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
|
||||
uri := "/adrive/v1.0/openFile/recyclebin/trash"
|
||||
if d.RemoveWay == "delete" {
|
||||
uri = "/adrive/v1.0/openFile/delete"
|
||||
}
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": obj.GetID(),
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// rapid_upload is not currently supported
|
||||
// 1. create
|
||||
const DEFAULT int64 = 20971520
|
||||
createData := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"parent_file_id": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"check_name_mode": "ignore",
|
||||
}
|
||||
count := 1
|
||||
if stream.GetSize() > DEFAULT {
|
||||
count = int(math.Ceil(float64(stream.GetSize()) / float64(DEFAULT)))
|
||||
createData["part_info_list"] = makePartInfos(count)
|
||||
}
|
||||
var createResp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(createData).SetResult(&createResp)
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// 2. upload
|
||||
preTime := time.Now()
|
||||
for i := 1; i <= len(createResp.PartInfoList); i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
err = d.uploadPart(ctx, i, count, utils.NewMultiReadable(io.LimitReader(stream, DEFAULT)), &createResp, true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
}
|
||||
// refresh upload url if 50 minutes passed
|
||||
if time.Since(preTime) > 50*time.Minute {
|
||||
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
preTime = time.Now()
|
||||
}
|
||||
}
|
||||
// 3. complete
|
||||
_, err = d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": createResp.FileId,
|
||||
"upload_id": createResp.UploadId,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
var resp base.Json
|
||||
var uri string
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "video_preview":
|
||||
uri = "/adrive/v1.0/openFile/getVideoPreviewPlayInfo"
|
||||
data["category"] = "live_transcoding"
|
||||
data["url_expire_sec"] = 14400
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*AliyundriveOpen)(nil)
|
39
drivers/aliyundrive_open/meta.go
Normal file
39
drivers/aliyundrive_open/meta.go
Normal file
@ -0,0 +1,39 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.nn.ci/alist/ali_open/token"`
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "AliyundriveOpen",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "root",
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliyundriveOpen{
|
||||
base: "https://openapi.aliyundrive.com",
|
||||
}
|
||||
})
|
||||
}
|
69
drivers/aliyundrive_open/types.go
Normal file
69
drivers/aliyundrive_open/types.go
Normal file
@ -0,0 +1,69 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type ErrResp struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type Files struct {
|
||||
Items []File `json:"items"`
|
||||
NextMarker string `json:"next_marker"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
DriveId string `json:"drive_id"`
|
||||
FileId string `json:"file_id"`
|
||||
ParentFileId string `json:"parent_file_id"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
FileExtension string `json:"file_extension"`
|
||||
ContentHash string `json:"content_hash"`
|
||||
Category string `json:"category"`
|
||||
Type string `json:"type"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Url string `json:"url"`
|
||||
CreatedAt *time.Time `json:"created_at"`
|
||||
UpdatedAt time.Time `json:"updated_at"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.FileId,
|
||||
Name: f.Name,
|
||||
Size: f.Size,
|
||||
Modified: f.UpdatedAt,
|
||||
IsFolder: f.Type == "folder",
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
|
||||
}
|
||||
}
|
||||
|
||||
type PartInfo struct {
|
||||
Etag interface{} `json:"etag"`
|
||||
PartNumber int `json:"part_number"`
|
||||
PartSize interface{} `json:"part_size"`
|
||||
UploadUrl string `json:"upload_url"`
|
||||
ContentType string `json:"content_type"`
|
||||
}
|
||||
|
||||
type CreateResp struct {
|
||||
//Type string `json:"type"`
|
||||
//ParentFileId string `json:"parent_file_id"`
|
||||
//DriveId string `json:"drive_id"`
|
||||
FileId string `json:"file_id"`
|
||||
//RevisionId string `json:"revision_id"`
|
||||
//EncryptMode string `json:"encrypt_mode"`
|
||||
//DomainId string `json:"domain_id"`
|
||||
//FileName string `json:"file_name"`
|
||||
UploadId string `json:"upload_id"`
|
||||
//Location string `json:"location"`
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
PartInfoList []PartInfo `json:"part_info_list"`
|
||||
}
|
167
drivers/aliyundrive_open/util.go
Normal file
167
drivers/aliyundrive_open/util.go
Normal file
@ -0,0 +1,167 @@
|
||||
package aliyundrive_open
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *AliyundriveOpen) refreshToken() error {
|
||||
url := d.base + "/oauth/access_token"
|
||||
if d.OauthTokenURL != "" && d.ClientID == "" {
|
||||
url = d.OauthTokenURL
|
||||
}
|
||||
var resp base.TokenResp
|
||||
var e ErrResp
|
||||
_, err := base.RestyClient.R().
|
||||
ForceContentType("application/json").
|
||||
SetBody(base.Json{
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": d.RefreshToken,
|
||||
}).
|
||||
SetResult(&resp).
|
||||
SetError(&e).
|
||||
Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.Code != "" {
|
||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||
}
|
||||
if resp.RefreshToken == "" {
|
||||
return errors.New("failed to refresh token: refresh token is empty")
|
||||
}
|
||||
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
// TODO check whether access_token is expired
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
if method == http.MethodPost {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
var e ErrResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
if e.Code != "" {
|
||||
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
|
||||
err = d.refreshToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(uri, method, callback, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%s:%s", e.Code, e.Message)
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getFiles(fileId string) ([]File, error) {
|
||||
marker := "first"
|
||||
res := make([]File, 0)
|
||||
for marker != "" {
|
||||
if marker == "first" {
|
||||
marker = ""
|
||||
}
|
||||
var resp Files
|
||||
data := base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"limit": 200,
|
||||
"marker": marker,
|
||||
"order_by": d.OrderBy,
|
||||
"order_direction": d.OrderDirection,
|
||||
"parent_file_id": fileId,
|
||||
//"category": "",
|
||||
//"type": "",
|
||||
//"video_thumbnail_time": 120000,
|
||||
//"video_thumbnail_width": 480,
|
||||
//"image_thumbnail_width": 480,
|
||||
}
|
||||
_, err := d.request("/adrive/v1.0/openFile/list", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
marker = resp.NextMarker
|
||||
res = append(res, resp.Items...)
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func makePartInfos(size int) []base.Json {
|
||||
partInfoList := make([]base.Json, size)
|
||||
for i := 0; i < size; i++ {
|
||||
partInfoList[i] = base.Json{"part_number": 1 + i}
|
||||
}
|
||||
return partInfoList
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
|
||||
partInfoList := makePartInfos(count)
|
||||
var resp CreateResp
|
||||
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": fileId,
|
||||
"part_info_list": partInfoList,
|
||||
"upload_id": uploadId,
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
return resp.PartInfoList, err
|
||||
}
|
||||
|
||||
func (d *AliyundriveOpen) uploadPart(ctx context.Context, i, count int, reader *utils.MultiReadable, resp *CreateResp, retry bool) error {
|
||||
partInfo := resp.PartInfoList[i-1]
|
||||
uploadUrl := partInfo.UploadUrl
|
||||
if d.InternalUpload {
|
||||
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, reader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
if retry {
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
return err
|
||||
}
|
||||
res.Body.Close()
|
||||
if retry && res.StatusCode == http.StatusForbidden {
|
||||
resp.PartInfoList, err = d.getUploadUrl(count, resp.FileId, resp.UploadId)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
reader.Reset()
|
||||
return d.uploadPart(ctx, i, count, reader, resp, false)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
|
||||
return fmt.Errorf("upload status: %d", res.StatusCode)
|
||||
}
|
||||
return nil
|
||||
}
|
@ -2,15 +2,16 @@ package aliyundrive_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/cron"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
@ -77,40 +78,43 @@ func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.
|
||||
"share_id": d.ShareId,
|
||||
}
|
||||
var resp ShareLinkResp
|
||||
var e ErrorResp
|
||||
_, err := base.RestyClient.R().
|
||||
SetError(&e).SetBody(data).SetResult(&resp).
|
||||
SetHeader("content-type", "application/json").
|
||||
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
||||
SetHeader("x-share-token", d.ShareToken).
|
||||
Post("https://api.aliyundrive.com/v2/file/get_share_link_download_url")
|
||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var u string
|
||||
if e.Code != "" {
|
||||
if e.Code == "AccessTokenInvalid" || e.Code == "ShareLinkTokenInvalid" {
|
||||
if e.Code == "AccessTokenInvalid" {
|
||||
err = d.refreshToken()
|
||||
} else {
|
||||
err = d.getShareToken()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.Link(ctx, file, args)
|
||||
} else {
|
||||
return nil, errors.New(e.Code + ": " + e.Message)
|
||||
}
|
||||
} else {
|
||||
u = resp.DownloadUrl
|
||||
}
|
||||
return &model.Link{
|
||||
Header: http.Header{
|
||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
||||
},
|
||||
URL: u,
|
||||
URL: resp.DownloadUrl,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
var resp base.Json
|
||||
var url string
|
||||
data := base.Json{
|
||||
"share_id": d.ShareId,
|
||||
"file_id": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "doc_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*AliyundriveShare)(nil)
|
||||
|
@ -52,6 +52,40 @@ func (d *AliyundriveShare) getShareToken() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) request(url, method string, callback base.ReqCallback) ([]byte, error) {
|
||||
var e ErrorResp
|
||||
req := base.RestyClient.R().
|
||||
SetError(&e).
|
||||
SetHeader("content-type", "application/json").
|
||||
SetHeader("Authorization", "Bearer\t"+d.AccessToken).
|
||||
SetHeader("x-share-token", d.ShareToken)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
} else {
|
||||
req.SetBody("{}")
|
||||
}
|
||||
resp, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if e.Code != "" {
|
||||
if e.Code == "AccessTokenInvalid" || e.Code == "ShareLinkTokenInvalid" {
|
||||
if e.Code == "AccessTokenInvalid" {
|
||||
err = d.refreshToken()
|
||||
} else {
|
||||
err = d.getShareToken()
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(url, method, callback)
|
||||
} else {
|
||||
return nil, errors.New(e.Code + ": " + e.Message)
|
||||
}
|
||||
}
|
||||
return resp.Body(), nil
|
||||
}
|
||||
|
||||
func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
||||
files := make([]File, 0)
|
||||
data := base.Json{
|
||||
|
@ -6,21 +6,26 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/139"
|
||||
_ "github.com/alist-org/alist/v3/drivers/189"
|
||||
_ "github.com/alist-org/alist/v3/drivers/189pc"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alias"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alist_v2"
|
||||
_ "github.com/alist-org/alist/v3/drivers/alist_v3"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive_open"
|
||||
_ "github.com/alist-org/alist/v3/drivers/aliyundrive_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
|
||||
_ "github.com/alist-org/alist/v3/drivers/mega"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark"
|
||||
@ -31,6 +36,8 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/teambition"
|
||||
_ "github.com/alist-org/alist/v3/drivers/terabox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/thunder"
|
||||
_ "github.com/alist-org/alist/v3/drivers/trainbit"
|
||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
||||
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
||||
|
@ -154,7 +154,7 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
||||
"target": fmt.Sprintf("[\"%s\"]", file.GetPath()),
|
||||
"dlink": "1",
|
||||
"web": "5",
|
||||
"origin": "dlna",
|
||||
//"origin": "dlna",
|
||||
}
|
||||
_, err := d.request("https://pan.baidu.com/api/filemetas", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(param)
|
||||
@ -165,7 +165,7 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
||||
return &model.Link{
|
||||
URL: resp.Info[0].Dlink,
|
||||
Header: http.Header{
|
||||
"User-Agent": []string{"pan.baidu.com"},
|
||||
"User-Agent": []string{"netdisk"},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
"isdir": "0",
|
||||
"rtype": "1",
|
||||
"ctype": "11",
|
||||
"path": stream.GetName(),
|
||||
"path": fmt.Sprintf("/%s", stream.GetName()),
|
||||
"size": fmt.Sprint(stream.GetSize()),
|
||||
"slice-md5": slice_md5,
|
||||
"content-md5": content_md5,
|
||||
|
@ -18,7 +18,7 @@ func getTid() string {
|
||||
|
||||
// 检查名称
|
||||
func checkName(name string) bool {
|
||||
return len(name) <= 20 && regexp.MustCompile("[\u4e00-\u9fa5A-Za-z0-9_-]").MatchString(name)
|
||||
return len(name) <= 50 && regexp.MustCompile("[\u4e00-\u9fa5A-Za-z0-9_-]").MatchString(name)
|
||||
}
|
||||
|
||||
func toTime(t int64) *time.Time {
|
||||
|
@ -23,7 +23,7 @@ const (
|
||||
)
|
||||
|
||||
var (
|
||||
ErrNotSupportName = errors.New("only chinese and english, numbers and underscores are supported, and the length is no more than 20")
|
||||
ErrNotSupportName = errors.New("only chinese and english, numbers and underscores are supported, and the length is no more than 50")
|
||||
)
|
||||
|
||||
func (d *BaiduPhoto) Request(furl string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
|
251
drivers/baidu_share/driver.go
Normal file
251
drivers/baidu_share/driver.go
Normal file
@ -0,0 +1,251 @@
|
||||
package baidu_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type BaiduShare struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *resty.Client
|
||||
info struct {
|
||||
Root string
|
||||
Seckey string
|
||||
Shareid string
|
||||
Uk string
|
||||
}
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *BaiduShare) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Init(ctx context.Context) error {
|
||||
// TODO login / refresh token
|
||||
//op.MustSaveDriverStorage(d)
|
||||
d.client = resty.New().
|
||||
SetBaseURL("https://pan.baidu.com").
|
||||
SetHeader("User-Agent", "netdisk").
|
||||
SetCookie(&http.Cookie{Name: "BDUSS", Value: d.BDUSS}).
|
||||
SetCookie(&http.Cookie{Name: "ndut_fmt"})
|
||||
respJson := struct {
|
||||
Errno int64 `json:"errno"`
|
||||
Data struct {
|
||||
List [1]struct {
|
||||
Path string `json:"path"`
|
||||
} `json:"list"`
|
||||
Uk json.Number `json:"uk"`
|
||||
Shareid json.Number `json:"shareid"`
|
||||
Seckey string `json:"seckey"`
|
||||
} `json:"data"`
|
||||
}{}
|
||||
resp, err := d.client.R().
|
||||
SetBody(url.Values{
|
||||
"pwd": {d.Pwd},
|
||||
"root": {"1"},
|
||||
"shorturl": {d.Surl},
|
||||
}.Encode()).
|
||||
SetResult(&respJson).
|
||||
Post("share/wxlist?channel=weixin&version=2.2.2&clienttype=25&web=1")
|
||||
if err == nil {
|
||||
if resp.IsSuccess() && respJson.Errno == 0 {
|
||||
d.info.Root = path.Dir(respJson.Data.List[0].Path)
|
||||
d.info.Seckey = respJson.Data.Seckey
|
||||
d.info.Shareid = respJson.Data.Shareid.String()
|
||||
d.info.Uk = respJson.Data.Uk.String()
|
||||
} else {
|
||||
err = fmt.Errorf(" %s; %s; ", resp.Status(), resp.Body())
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *BaiduShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
// TODO return the files list, required
|
||||
reqDir := dir.GetPath()
|
||||
isRoot := "0"
|
||||
if reqDir == d.RootFolderPath {
|
||||
reqDir = path.Join(d.info.Root, reqDir)
|
||||
}
|
||||
if reqDir == d.info.Root {
|
||||
isRoot = "1"
|
||||
}
|
||||
objs := []model.Obj{}
|
||||
var err error
|
||||
var page uint64 = 1
|
||||
more := true
|
||||
for more && err == nil {
|
||||
respJson := struct {
|
||||
Errno int64 `json:"errno"`
|
||||
Data struct {
|
||||
More bool `json:"has_more"`
|
||||
List []struct {
|
||||
Fsid json.Number `json:"fs_id"`
|
||||
Isdir json.Number `json:"isdir"`
|
||||
Path string `json:"path"`
|
||||
Name string `json:"server_filename"`
|
||||
Mtime json.Number `json:"server_mtime"`
|
||||
Size json.Number `json:"size"`
|
||||
} `json:"list"`
|
||||
} `json:"data"`
|
||||
}{}
|
||||
resp, e := d.client.R().
|
||||
SetBody(url.Values{
|
||||
"dir": {reqDir},
|
||||
"num": {"1000"},
|
||||
"order": {"time"},
|
||||
"page": {fmt.Sprint(page)},
|
||||
"pwd": {d.Pwd},
|
||||
"root": {isRoot},
|
||||
"shorturl": {d.Surl},
|
||||
}.Encode()).
|
||||
SetResult(&respJson).
|
||||
Post("share/wxlist?channel=weixin&version=2.2.2&clienttype=25&web=1")
|
||||
err = e
|
||||
if err == nil {
|
||||
if resp.IsSuccess() && respJson.Errno == 0 {
|
||||
page++
|
||||
more = respJson.Data.More
|
||||
for _, v := range respJson.Data.List {
|
||||
size, _ := v.Size.Int64()
|
||||
mtime, _ := v.Mtime.Int64()
|
||||
objs = append(objs, &model.Object{
|
||||
ID: v.Fsid.String(),
|
||||
Path: v.Path,
|
||||
Name: v.Name,
|
||||
Size: size,
|
||||
Modified: time.Unix(mtime, 0),
|
||||
IsFolder: v.Isdir.String() == "1",
|
||||
})
|
||||
}
|
||||
} else {
|
||||
err = fmt.Errorf(" %s; %s; ", resp.Status(), resp.Body())
|
||||
}
|
||||
}
|
||||
}
|
||||
return objs, err
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
// TODO return link of file, required
|
||||
link := model.Link{Header: d.client.Header}
|
||||
sign := ""
|
||||
stamp := ""
|
||||
signJson := struct {
|
||||
Errno int64 `json:"errno"`
|
||||
Data struct {
|
||||
Stamp json.Number `json:"timestamp"`
|
||||
Sign string `json:"sign"`
|
||||
} `json:"data"`
|
||||
}{}
|
||||
resp, err := d.client.R().
|
||||
SetQueryParam("surl", d.Surl).
|
||||
SetResult(&signJson).
|
||||
Get("share/tplconfig?fields=sign,timestamp&channel=chunlei&web=1&app_id=250528&clienttype=0")
|
||||
if err == nil {
|
||||
if resp.IsSuccess() && signJson.Errno == 0 {
|
||||
stamp = signJson.Data.Stamp.String()
|
||||
sign = signJson.Data.Sign
|
||||
} else {
|
||||
err = fmt.Errorf(" %s; %s; ", resp.Status(), resp.Body())
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
respJson := struct {
|
||||
Errno int64 `json:"errno"`
|
||||
List [1]struct {
|
||||
Dlink string `json:"dlink"`
|
||||
} `json:"list"`
|
||||
}{}
|
||||
resp, err = d.client.R().
|
||||
SetQueryParam("sign", sign).
|
||||
SetQueryParam("timestamp", stamp).
|
||||
SetBody(url.Values{
|
||||
"encrypt": {"0"},
|
||||
"extra": {fmt.Sprintf(`{"sekey":"%s"}`, d.info.Seckey)},
|
||||
"fid_list": {fmt.Sprintf("[%s]", file.GetID())},
|
||||
"primaryid": {d.info.Shareid},
|
||||
"product": {"share"},
|
||||
"type": {"nolimit"},
|
||||
"uk": {d.info.Uk},
|
||||
}.Encode()).
|
||||
SetResult(&respJson).
|
||||
Post("api/sharedownload?app_id=250528&channel=chunlei&clienttype=12&web=1")
|
||||
if err == nil {
|
||||
if resp.IsSuccess() && respJson.Errno == 0 && respJson.List[0].Dlink != "" {
|
||||
link.URL = respJson.List[0].Dlink
|
||||
} else {
|
||||
err = fmt.Errorf(" %s; %s; ", resp.Status(), resp.Body())
|
||||
}
|
||||
}
|
||||
if err == nil {
|
||||
resp, err = d.client.R().
|
||||
SetDoNotParseResponse(true).
|
||||
Get(link.URL)
|
||||
if err == nil {
|
||||
defer resp.RawBody().Close()
|
||||
if resp.IsError() {
|
||||
byt, _ := io.ReadAll(resp.RawBody())
|
||||
err = fmt.Errorf(" %s; %s; ", resp.Status(), byt)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return &link, err
|
||||
}
|
||||
|
||||
func (d *BaiduShare) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
// TODO create folder, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO move obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
// TODO rename obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Remove(ctx context.Context, obj model.Obj) error {
|
||||
// TODO remove obj, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *BaiduShare) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// TODO upload file, optional
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*BaiduShare)(nil)
|
37
drivers/baidu_share/meta.go
Normal file
37
drivers/baidu_share/meta.go
Normal file
@ -0,0 +1,37 @@
|
||||
package baidu_share
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
driver.RootPath
|
||||
// driver.RootID
|
||||
// define other
|
||||
// Field string `json:"field" type:"select" required:"true" options:"a,b,c" default:"a"`
|
||||
Surl string `json:"surl"`
|
||||
Pwd string `json:"pwd"`
|
||||
BDUSS string `json:"BDUSS"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "BaiduShare",
|
||||
LocalSort: true,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: true,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "/",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &BaiduShare{}
|
||||
})
|
||||
}
|
1
drivers/baidu_share/types.go
Normal file
1
drivers/baidu_share/types.go
Normal file
@ -0,0 +1 @@
|
||||
package baidu_share
|
3
drivers/baidu_share/util.go
Normal file
3
drivers/baidu_share/util.go
Normal file
@ -0,0 +1,3 @@
|
||||
package baidu_share
|
||||
|
||||
// do others that not defined in Driver interface
|
@ -1,31 +1,48 @@
|
||||
package base
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
var NoRedirectClient *resty.Client
|
||||
var RestyClient = NewRestyClient()
|
||||
var HttpClient = &http.Client{}
|
||||
var (
|
||||
NoRedirectClient *resty.Client
|
||||
RestyClient *resty.Client
|
||||
HttpClient *http.Client
|
||||
)
|
||||
var UserAgent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36"
|
||||
var DefaultTimeout = time.Second * 30
|
||||
|
||||
func init() {
|
||||
func InitClient() {
|
||||
NoRedirectClient = resty.New().SetRedirectPolicy(
|
||||
resty.RedirectPolicyFunc(func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
}),
|
||||
)
|
||||
).SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
NoRedirectClient.SetHeader("user-agent", UserAgent)
|
||||
|
||||
RestyClient = NewRestyClient()
|
||||
HttpClient = NewHttpClient()
|
||||
}
|
||||
|
||||
func NewRestyClient() *resty.Client {
|
||||
client := resty.New().
|
||||
SetHeader("user-agent", UserAgent).
|
||||
SetRetryCount(3).
|
||||
SetTimeout(DefaultTimeout)
|
||||
SetTimeout(DefaultTimeout).
|
||||
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
return client
|
||||
}
|
||||
|
||||
func NewHttpClient() *http.Client {
|
||||
return &http.Client{
|
||||
Timeout: DefaultTimeout,
|
||||
Transport: &http.Transport{
|
||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
30
drivers/base/util.go
Normal file
30
drivers/base/util.go
Normal file
@ -0,0 +1,30 @@
|
||||
package base
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
func HandleRange(link *model.Link, file io.ReadSeekCloser, header http.Header, size int64) {
|
||||
if header.Get("Range") != "" {
|
||||
r, err := http_range.ParseRange(header.Get("Range"), size)
|
||||
if err == nil && len(r) > 0 {
|
||||
_, err := file.Seek(r[0].Start, io.SeekStart)
|
||||
if err == nil {
|
||||
link.Data = utils.NewLimitReadCloser(file, func() error {
|
||||
return file.Close()
|
||||
}, r[0].Length)
|
||||
link.Status = http.StatusPartialContent
|
||||
link.Header = http.Header{
|
||||
"Content-Range": []string{r[0].ContentRange(size)},
|
||||
"Content-Length": []string{strconv.FormatInt(r[0].Length, 10)},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -2,21 +2,20 @@ package cloudreve
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type Cloudreve struct {
|
||||
model.Storage
|
||||
Addition
|
||||
Cookie string
|
||||
}
|
||||
|
||||
func (d *Cloudreve) Config() driver.Config {
|
||||
@ -28,6 +27,9 @@ func (d *Cloudreve) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Cloudreve) Init(ctx context.Context) error {
|
||||
if d.Cookie != "" {
|
||||
return nil
|
||||
}
|
||||
return d.login()
|
||||
}
|
||||
|
||||
|
@ -10,8 +10,9 @@ type Addition struct {
|
||||
driver.RootPath
|
||||
// define other
|
||||
Address string `json:"address" required:"true"`
|
||||
Username string `json:"username" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Cookie string `json:"cookie"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -1,8 +1,9 @@
|
||||
package cloudreve
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type Resp struct {
|
||||
@ -52,3 +53,8 @@ func objectToObj(f Object) *model.Object {
|
||||
IsFolder: f.Type == "dir",
|
||||
}
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
LoginCaptcha bool `json:"loginCaptcha"`
|
||||
CaptchaType string `json:"captcha_type"`
|
||||
}
|
||||
|
@ -1,13 +1,19 @@
|
||||
package cloudreve
|
||||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/setting"
|
||||
"github.com/alist-org/alist/v3/pkg/cookie"
|
||||
"github.com/go-resty/resty/v2"
|
||||
json "github.com/json-iterator/go"
|
||||
"net/http"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
@ -43,13 +49,15 @@ func (d *Cloudreve) request(method string, path string, callback base.ReqCallbac
|
||||
|
||||
// 刷新 cookie
|
||||
if r.Code == http.StatusUnauthorized && path != loginPath {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
if d.Username != "" && d.Password != "" {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return d.request(method, path, callback, out)
|
||||
}
|
||||
return d.request(method, path, callback, out)
|
||||
}
|
||||
|
||||
|
||||
return errors.New(r.Msg)
|
||||
}
|
||||
sess := cookie.GetCookie(resp.Cookies(), "cloudreve-session")
|
||||
@ -72,13 +80,57 @@ func (d *Cloudreve) request(method string, path string, callback base.ReqCallbac
|
||||
}
|
||||
|
||||
func (d *Cloudreve) login() error {
|
||||
return d.request(http.MethodPost, loginPath, func(req *resty.Request) {
|
||||
var siteConfig Config
|
||||
err := d.request(http.MethodGet, "/site/config", nil, &siteConfig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for i := 0; i < 5; i++ {
|
||||
err = d.doLogin(siteConfig.LoginCaptcha)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
if err != nil && err.Error() != "CAPTCHA not match." {
|
||||
break
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Cloudreve) doLogin(needCaptcha bool) error {
|
||||
var captchaCode string
|
||||
var err error
|
||||
if needCaptcha {
|
||||
var captcha string
|
||||
err = d.request(http.MethodGet, "/site/captcha", nil, &captcha)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(captcha) == 0 {
|
||||
return errors.New("can not get captcha")
|
||||
}
|
||||
i := strings.Index(captcha, ",")
|
||||
dec := base64.NewDecoder(base64.StdEncoding, strings.NewReader(captcha[i+1:]))
|
||||
vRes, err := base.RestyClient.R().SetMultipartField(
|
||||
"image", "validateCode.png", "image/png", dec).
|
||||
Post(setting.GetStr(conf.OcrApi))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
|
||||
return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
|
||||
}
|
||||
captchaCode = jsoniter.Get(vRes.Body(), "result").ToString()
|
||||
}
|
||||
var resp Resp
|
||||
err = d.request(http.MethodPost, loginPath, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"username": d.Addition.Username,
|
||||
"Password": d.Addition.Password,
|
||||
"captchaCode": "",
|
||||
"captchaCode": captchaCode,
|
||||
})
|
||||
}, nil)
|
||||
}, &resp)
|
||||
return err
|
||||
}
|
||||
|
||||
func convertSrc(obj model.Obj) map[string]interface{} {
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
stdpath "path"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -44,8 +45,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
|
||||
return nil, err
|
||||
}
|
||||
res := make([]model.Obj, 0)
|
||||
for i, _ := range entries {
|
||||
entry := entries[i]
|
||||
for _, entry := range entries {
|
||||
if entry.Name == "." || entry.Name == ".." {
|
||||
continue
|
||||
}
|
||||
@ -64,13 +64,13 @@ func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
||||
if err := d.login(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp, err := d.conn.Retr(file.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
r := NewFTPFileReader(d.conn, file.GetPath())
|
||||
link := &model.Link{
|
||||
Data: r,
|
||||
}
|
||||
return &model.Link{
|
||||
Data: resp,
|
||||
}, nil
|
||||
base.HandleRange(link, r, args.Header, file.GetSize())
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *FTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
|
@ -1,6 +1,13 @@
|
||||
package ftp
|
||||
|
||||
import "github.com/jlaffaye/ftp"
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/jlaffaye/ftp"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
@ -11,7 +18,7 @@ func (d *FTP) login() error {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
conn, err := ftp.Dial(d.Address)
|
||||
conn, err := ftp.Dial(d.Address, ftp.DialWithShutTimeout(10*time.Second))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -22,3 +29,81 @@ func (d *FTP) login() error {
|
||||
d.conn = conn
|
||||
return nil
|
||||
}
|
||||
|
||||
// An FTP file reader that implements io.ReadSeekCloser for seeking.
|
||||
type FTPFileReader struct {
|
||||
conn *ftp.ServerConn
|
||||
resp *ftp.Response
|
||||
offset int64
|
||||
mu sync.Mutex
|
||||
path string
|
||||
}
|
||||
|
||||
func NewFTPFileReader(conn *ftp.ServerConn, path string) *FTPFileReader {
|
||||
return &FTPFileReader{
|
||||
conn: conn,
|
||||
path: path,
|
||||
}
|
||||
}
|
||||
|
||||
func (r *FTPFileReader) Read(buf []byte) (n int, err error) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
if r.resp == nil {
|
||||
r.resp, err = r.conn.RetrFrom(r.path, uint64(r.offset))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
n, err = r.resp.Read(buf)
|
||||
r.offset += int64(n)
|
||||
return
|
||||
}
|
||||
|
||||
func (r *FTPFileReader) Seek(offset int64, whence int) (int64, error) {
|
||||
r.mu.Lock()
|
||||
defer r.mu.Unlock()
|
||||
|
||||
oldOffset := r.offset
|
||||
var newOffset int64
|
||||
switch whence {
|
||||
case io.SeekStart:
|
||||
newOffset = offset
|
||||
case io.SeekCurrent:
|
||||
newOffset = oldOffset + offset
|
||||
case io.SeekEnd:
|
||||
size, err := r.conn.FileSize(r.path)
|
||||
if err != nil {
|
||||
return oldOffset, err
|
||||
}
|
||||
newOffset = offset + int64(size)
|
||||
default:
|
||||
return -1, os.ErrInvalid
|
||||
}
|
||||
|
||||
if newOffset < 0 {
|
||||
// offset out of range
|
||||
return oldOffset, os.ErrInvalid
|
||||
}
|
||||
if newOffset == oldOffset {
|
||||
// offset not changed, so return directly
|
||||
return oldOffset, nil
|
||||
}
|
||||
r.offset = newOffset
|
||||
|
||||
if r.resp != nil {
|
||||
// close the existing ftp data connection, otherwise the next read will be blocked
|
||||
_ = r.resp.Close() // we do not care about whether it returns an error
|
||||
r.resp = nil
|
||||
}
|
||||
return newOffset, nil
|
||||
}
|
||||
|
||||
func (r *FTPFileReader) Close() error {
|
||||
if r.resp != nil {
|
||||
return r.resp.Close()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -17,7 +17,9 @@ import (
|
||||
type GoogleDrive struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
AccessToken string
|
||||
ServiceAccountFile int
|
||||
ServiceAccountFileList []string
|
||||
}
|
||||
|
||||
func (d *GoogleDrive) Config() driver.Config {
|
||||
|
@ -2,21 +2,134 @@ package google_drive
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"github.com/golang-jwt/jwt/v4"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
type googleDriveServiceAccount struct {
|
||||
//Type string `json:"type"`
|
||||
//ProjectID string `json:"project_id"`
|
||||
//PrivateKeyID string `json:"private_key_id"`
|
||||
PrivateKey string `json:"private_key"`
|
||||
ClientEMail string `json:"client_email"`
|
||||
//ClientID string `json:"client_id"`
|
||||
//AuthURI string `json:"auth_uri"`
|
||||
TokenURI string `json:"token_uri"`
|
||||
//AuthProviderX509CertURL string `json:"auth_provider_x509_cert_url"`
|
||||
//ClientX509CertURL string `json:"client_x509_cert_url"`
|
||||
}
|
||||
|
||||
func (d *GoogleDrive) refreshToken() error {
|
||||
// googleDriveServiceAccountFile gdsaFile
|
||||
gdsaFile, gdsaFileErr := os.Stat(d.RefreshToken)
|
||||
if gdsaFileErr == nil {
|
||||
gdsaFileThis := d.RefreshToken
|
||||
if gdsaFile.IsDir() {
|
||||
if len(d.ServiceAccountFileList) <= 0 {
|
||||
gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
|
||||
if gdsaDirErr != nil {
|
||||
log.Error("read dir fail")
|
||||
return gdsaDirErr
|
||||
}
|
||||
var gdsaFileList []string
|
||||
for _, fi := range gdsaReadDir {
|
||||
if !fi.IsDir() {
|
||||
match, _ := regexp.MatchString("^.*\\.json$", fi.Name())
|
||||
if !match {
|
||||
continue
|
||||
}
|
||||
gdsaDirText := d.RefreshToken
|
||||
if d.RefreshToken[len(d.RefreshToken)-1:] != "/" {
|
||||
gdsaDirText = d.RefreshToken + "/"
|
||||
}
|
||||
gdsaFileList = append(gdsaFileList, gdsaDirText+fi.Name())
|
||||
}
|
||||
}
|
||||
d.ServiceAccountFileList = gdsaFileList
|
||||
gdsaFileThis = d.ServiceAccountFileList[d.ServiceAccountFile]
|
||||
d.ServiceAccountFile++
|
||||
} else {
|
||||
if d.ServiceAccountFile < len(d.ServiceAccountFileList) {
|
||||
d.ServiceAccountFile++
|
||||
} else {
|
||||
d.ServiceAccountFile = 0
|
||||
}
|
||||
gdsaFileThis = d.ServiceAccountFileList[d.ServiceAccountFile]
|
||||
}
|
||||
}
|
||||
|
||||
gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Now let's unmarshal the data into `payload`
|
||||
var jsonData googleDriveServiceAccount
|
||||
err = utils.Json.Unmarshal(gdsaFileThisContent, &jsonData)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gdsaScope := "https://www.googleapis.com/auth/drive https://www.googleapis.com/auth/drive.appdata https://www.googleapis.com/auth/drive.file https://www.googleapis.com/auth/drive.metadata https://www.googleapis.com/auth/drive.metadata.readonly https://www.googleapis.com/auth/drive.readonly https://www.googleapis.com/auth/drive.scripts"
|
||||
|
||||
timeNow := time.Now()
|
||||
var timeStart int64 = timeNow.Unix()
|
||||
var timeEnd int64 = timeNow.Add(time.Minute * 60).Unix()
|
||||
|
||||
// load private key from string
|
||||
privateKeyPem, _ := pem.Decode([]byte(jsonData.PrivateKey))
|
||||
privateKey, _ := x509.ParsePKCS8PrivateKey(privateKeyPem.Bytes)
|
||||
|
||||
jwtToken := jwt.NewWithClaims(jwt.SigningMethodRS256,
|
||||
jwt.MapClaims{
|
||||
"iss": jsonData.ClientEMail,
|
||||
"scope": gdsaScope,
|
||||
"aud": jsonData.TokenURI,
|
||||
"exp": timeEnd,
|
||||
"iat": timeStart,
|
||||
})
|
||||
assertion, err := jwtToken.SignedString(privateKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var resp base.TokenResp
|
||||
var e TokenError
|
||||
res, err := base.RestyClient.R().SetResult(&resp).SetError(&e).
|
||||
SetFormData(map[string]string{
|
||||
"assertion": assertion,
|
||||
"grant_type": "urn:ietf:params:oauth:grant-type:jwt-bearer",
|
||||
}).Post(jsonData.TokenURI)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Debug(res.String())
|
||||
if e.Error != "" {
|
||||
return fmt.Errorf(e.Error)
|
||||
}
|
||||
d.AccessToken = resp.AccessToken
|
||||
return nil
|
||||
}
|
||||
if gdsaFileErr != nil && os.IsExist(gdsaFileErr) {
|
||||
return gdsaFileErr
|
||||
}
|
||||
url := "https://www.googleapis.com/oauth2/v4/token"
|
||||
var resp base.TokenResp
|
||||
var e TokenError
|
||||
|
128
drivers/ipfs_api/driver.go
Normal file
128
drivers/ipfs_api/driver.go
Normal file
@ -0,0 +1,128 @@
|
||||
package ipfs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
stdpath "path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
shell "github.com/ipfs/go-ipfs-api"
|
||||
)
|
||||
|
||||
type IPFS struct {
|
||||
model.Storage
|
||||
Addition
|
||||
sh *shell.Shell
|
||||
gateURL *url.URL
|
||||
}
|
||||
|
||||
func (d *IPFS) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *IPFS) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *IPFS) Init(ctx context.Context) error {
|
||||
d.sh = shell.NewShell(d.Endpoint)
|
||||
gateURL, err := url.Parse(d.Gateway)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.gateURL = gateURL
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *IPFS) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
path := dir.GetPath()
|
||||
if path[len(path):] != "/" {
|
||||
path += "/"
|
||||
}
|
||||
|
||||
path_cid, err := d.sh.FilesStat(ctx, path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
dirs, err := d.sh.List(path_cid.Hash)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
objlist := []model.Obj{}
|
||||
for _, file := range dirs {
|
||||
gateurl := *d.gateURL
|
||||
gateurl.Path = "ipfs/" + file.Hash
|
||||
gateurl.RawQuery = "filename=" + file.Name
|
||||
objlist = append(objlist, &model.ObjectURL{
|
||||
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
|
||||
Url: model.Url{Url: gateurl.String()},
|
||||
})
|
||||
}
|
||||
|
||||
return objlist, nil
|
||||
}
|
||||
|
||||
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + file.GetName()
|
||||
return &model.Link{URL: link}, nil
|
||||
}
|
||||
|
||||
func (d *IPFS) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
path := parentDir.GetPath()
|
||||
if path[len(path):] != "/" {
|
||||
path += "/"
|
||||
}
|
||||
return d.sh.FilesMkdir(ctx, path+dirName)
|
||||
}
|
||||
|
||||
func (d *IPFS) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return d.sh.FilesMv(ctx, srcObj.GetPath(), dstDir.GetPath())
|
||||
}
|
||||
|
||||
func (d *IPFS) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
newFileName := filepath.Dir(srcObj.GetPath()) + "/" + newName
|
||||
return d.sh.FilesMv(ctx, srcObj.GetPath(), strings.ReplaceAll(newFileName, "\\", "/"))
|
||||
}
|
||||
|
||||
func (d *IPFS) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj, optional
|
||||
fmt.Println(srcObj.GetPath())
|
||||
fmt.Println(dstDir.GetPath())
|
||||
newFileName := dstDir.GetPath() + "/" + filepath.Base(srcObj.GetPath())
|
||||
fmt.Println(newFileName)
|
||||
return d.sh.FilesCp(ctx, srcObj.GetPath(), strings.ReplaceAll(newFileName, "\\", "/"))
|
||||
}
|
||||
|
||||
func (d *IPFS) Remove(ctx context.Context, obj model.Obj) error {
|
||||
// TODO remove obj, optional
|
||||
return d.sh.FilesRm(ctx, obj.GetPath(), true)
|
||||
}
|
||||
|
||||
func (d *IPFS) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// TODO upload file, optional
|
||||
_, err := d.sh.Add(stream, ToFiles(stdpath.Join(dstDir.GetPath(), stream.GetName())))
|
||||
return err
|
||||
}
|
||||
|
||||
func ToFiles(dstDir string) shell.AddOpts {
|
||||
return func(rb *shell.RequestBuilder) error {
|
||||
rb.Option("to-files", dstDir)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*IPFS)(nil)
|
25
drivers/ipfs_api/meta.go
Normal file
25
drivers/ipfs_api/meta.go
Normal file
@ -0,0 +1,25 @@
|
||||
package ipfs
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
// Usually one of two
|
||||
driver.RootPath
|
||||
Endpoint string `json:"endpoint" default:"http://127.0.0.1:5001"`
|
||||
Gateway string `json:"gateway" default:"https://ipfs.io"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "IPFS API",
|
||||
DefaultRoot: "/",
|
||||
LocalSort: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &IPFS{}
|
||||
})
|
||||
}
|
@ -5,7 +5,6 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -15,8 +14,6 @@ import (
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
var upClient = base.NewRestyClient().SetTimeout(120 * time.Second)
|
||||
|
||||
type LanZou struct {
|
||||
Addition
|
||||
model.Storage
|
||||
@ -209,11 +206,11 @@ func (d *LanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
var resp RespText[[]FileOrFolder]
|
||||
_, err := d._post(d.BaseUrl+"/fileup.php", func(req *resty.Request) {
|
||||
req.SetFormData(map[string]string{
|
||||
"task": "1",
|
||||
"vie": "2",
|
||||
"ve": "2",
|
||||
"id": "WU_FILE_0",
|
||||
"name": stream.GetName(),
|
||||
"task": "1",
|
||||
"vie": "2",
|
||||
"ve": "2",
|
||||
"id": "WU_FILE_0",
|
||||
"name": stream.GetName(),
|
||||
"folder_id_bb_n": dstDir.GetID(),
|
||||
}).SetFileReader("upload_file", stream.GetName(), stream).SetContext(ctx)
|
||||
}, &resp, true)
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -16,6 +17,9 @@ import (
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var upClient *resty.Client
|
||||
var once sync.Once
|
||||
|
||||
func (d *LanZou) doupload(callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
return d.post(d.BaseUrl+"/doupload.php", func(req *resty.Request) {
|
||||
req.SetQueryParam("uid", d.uid)
|
||||
@ -64,6 +68,9 @@ func (d *LanZou) _post(url string, callback base.ReqCallback, resp interface{},
|
||||
func (d *LanZou) request(url string, method string, callback base.ReqCallback, up bool) ([]byte, error) {
|
||||
var req *resty.Request
|
||||
if up {
|
||||
once.Do(func() {
|
||||
upClient = base.NewRestyClient().SetTimeout(120 * time.Second)
|
||||
})
|
||||
req = upClient.R()
|
||||
} else {
|
||||
req = base.RestyClient.R()
|
||||
|
@ -1,12 +1,10 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
stdpath "path"
|
||||
@ -21,13 +19,13 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/sign"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
"github.com/disintegration/imaging"
|
||||
_ "golang.org/x/image/webp"
|
||||
)
|
||||
|
||||
type Local struct {
|
||||
model.Storage
|
||||
Addition
|
||||
mkdirPerm int32
|
||||
}
|
||||
|
||||
func (d *Local) Config() driver.Config {
|
||||
@ -35,6 +33,15 @@ func (d *Local) Config() driver.Config {
|
||||
}
|
||||
|
||||
func (d *Local) Init(ctx context.Context) error {
|
||||
if d.MkdirPerm == "" {
|
||||
d.mkdirPerm = 0777
|
||||
} else {
|
||||
v, err := strconv.ParseUint(d.MkdirPerm, 8, 32)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.mkdirPerm = int32(v)
|
||||
}
|
||||
if !utils.Exists(d.GetRootPath()) {
|
||||
return fmt.Errorf("root folder %s not exists", d.GetRootPath())
|
||||
}
|
||||
@ -45,6 +52,12 @@ func (d *Local) Init(ctx context.Context) error {
|
||||
}
|
||||
d.Addition.RootFolderPath = abs
|
||||
}
|
||||
if d.ThumbCacheFolder != "" && !utils.Exists(d.ThumbCacheFolder) {
|
||||
err := os.MkdirAll(d.ThumbCacheFolder, os.FileMode(d.mkdirPerm))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -58,7 +71,7 @@ func (d *Local) GetAddition() driver.Additional {
|
||||
|
||||
func (d *Local) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
fullPath := dir.GetPath()
|
||||
rawFiles, err := ioutil.ReadDir(fullPath)
|
||||
rawFiles, err := readDir(fullPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -68,10 +81,13 @@ func (d *Local) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
continue
|
||||
}
|
||||
thumb := ""
|
||||
if d.Thumbnail && utils.GetFileType(f.Name()) == conf.IMAGE {
|
||||
thumb = common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, f.Name())
|
||||
thumb = utils.EncodePath(thumb, true)
|
||||
thumb += "?type=thumb&sign=" + sign.Sign(stdpath.Join(args.ReqPath, f.Name()))
|
||||
if d.Thumbnail {
|
||||
typeName := utils.GetFileType(f.Name())
|
||||
if typeName == conf.IMAGE || typeName == conf.VIDEO {
|
||||
thumb = common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, f.Name())
|
||||
thumb = utils.EncodePath(thumb, true)
|
||||
thumb += "?type=thumb&sign=" + sign.Sign(stdpath.Join(args.ReqPath, f.Name()))
|
||||
}
|
||||
}
|
||||
isFolder := f.IsDir() || isSymlinkDir(f, fullPath)
|
||||
var size int64
|
||||
@ -123,25 +139,18 @@ func (d *Local) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
fullPath := file.GetPath()
|
||||
var link model.Link
|
||||
if args.Type == "thumb" && utils.Ext(file.GetName()) != "svg" {
|
||||
imgData, err := ioutil.ReadFile(fullPath)
|
||||
buf, thumbPath, err := d.getThumb(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
srcBuf := bytes.NewBuffer(imgData)
|
||||
image, err := imaging.Decode(srcBuf)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
thumbImg := imaging.Resize(image, 144, 0, imaging.Lanczos)
|
||||
var buf bytes.Buffer
|
||||
err = imaging.Encode(&buf, thumbImg, imaging.PNG)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
size := buf.Len()
|
||||
link.Data = io.NopCloser(&buf)
|
||||
link.Header = http.Header{
|
||||
"Content-Length": []string{strconv.Itoa(size)},
|
||||
"Content-Type": []string{"image/png"},
|
||||
}
|
||||
if thumbPath != nil {
|
||||
link.FilePath = thumbPath
|
||||
} else {
|
||||
link.Data = io.NopCloser(buf)
|
||||
link.Header.Set("Content-Length", strconv.Itoa(buf.Len()))
|
||||
}
|
||||
} else {
|
||||
link.FilePath = &fullPath
|
||||
@ -151,7 +160,7 @@ func (d *Local) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
|
||||
func (d *Local) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
fullPath := filepath.Join(parentDir.GetPath(), dirName)
|
||||
err := os.MkdirAll(fullPath, 0777)
|
||||
err := os.MkdirAll(fullPath, os.FileMode(d.mkdirPerm))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -7,8 +7,10 @@ import (
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Thumbnail bool `json:"thumbnail" required:"true" help:"enable thumbnail"`
|
||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||
Thumbnail bool `json:"thumbnail" required:"true" help:"enable thumbnail"`
|
||||
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -1,9 +1,19 @@
|
||||
package local
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/disintegration/imaging"
|
||||
ffmpeg "github.com/u2takey/ffmpeg-go"
|
||||
)
|
||||
|
||||
func isSymlinkDir(f fs.FileInfo, path string) bool {
|
||||
@ -23,3 +33,79 @@ func isSymlinkDir(f fs.FileInfo, path string) bool {
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func GetSnapshot(videoPath string, frameNum int) (imgData *bytes.Buffer, err error) {
|
||||
srcBuf := bytes.NewBuffer(nil)
|
||||
err = ffmpeg.Input(videoPath).Filter("select", ffmpeg.Args{fmt.Sprintf("gte(n,%d)", frameNum)}).
|
||||
Output("pipe:", ffmpeg.KwArgs{"vframes": 1, "format": "image2", "vcodec": "mjpeg"}).
|
||||
WithOutput(srcBuf, os.Stdout).
|
||||
Run()
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcBuf, nil
|
||||
}
|
||||
|
||||
func readDir(dirname string) ([]fs.FileInfo, error) {
|
||||
f, err := os.Open(dirname)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
list, err := f.Readdir(-1)
|
||||
f.Close()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sort.Slice(list, func(i, j int) bool { return list[i].Name() < list[j].Name() })
|
||||
return list, nil
|
||||
}
|
||||
|
||||
func (d *Local) getThumb(file model.Obj) (*bytes.Buffer, *string, error) {
|
||||
fullPath := file.GetPath()
|
||||
thumbPrefix := "alist_thumb_"
|
||||
thumbName := thumbPrefix + utils.GetMD5Encode(fullPath) + ".png"
|
||||
if d.ThumbCacheFolder != "" {
|
||||
// skip if the file is a thumbnail
|
||||
if strings.HasPrefix(file.GetName(), thumbPrefix) {
|
||||
return nil, &fullPath, nil
|
||||
}
|
||||
thumbPath := filepath.Join(d.ThumbCacheFolder, thumbName)
|
||||
if utils.Exists(thumbPath) {
|
||||
return nil, &thumbPath, nil
|
||||
}
|
||||
}
|
||||
var srcBuf *bytes.Buffer
|
||||
if utils.GetFileType(file.GetName()) == conf.VIDEO {
|
||||
videoBuf, err := GetSnapshot(fullPath, 10)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
srcBuf = videoBuf
|
||||
} else {
|
||||
imgData, err := os.ReadFile(fullPath)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
imgBuf := bytes.NewBuffer(imgData)
|
||||
srcBuf = imgBuf
|
||||
}
|
||||
|
||||
image, err := imaging.Decode(srcBuf)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
thumbImg := imaging.Resize(image, 144, 0, imaging.Lanczos)
|
||||
var buf bytes.Buffer
|
||||
err = imaging.Encode(&buf, thumbImg, imaging.PNG)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if d.ThumbCacheFolder != "" {
|
||||
err = os.WriteFile(filepath.Join(d.ThumbCacheFolder, thumbName), buf.Bytes(), 0666)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
return &buf, nil, nil
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -75,9 +76,19 @@ func (d *Onedrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName str
|
||||
}
|
||||
|
||||
func (d *Onedrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
parentPath := ""
|
||||
if dstDir.GetID() == "" {
|
||||
parentPath = dstDir.GetPath()
|
||||
if utils.PathEqual(parentPath, "/") {
|
||||
parentPath = path.Join("/drive/root", parentPath)
|
||||
} else {
|
||||
parentPath = path.Join("/drive/root:/", parentPath)
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"parentReference": base.Json{
|
||||
"id": dstDir.GetID(),
|
||||
"id": dstDir.GetID(),
|
||||
"path": parentPath,
|
||||
},
|
||||
"name": srcObj.GetName(),
|
||||
}
|
||||
@ -89,13 +100,15 @@ func (d *Onedrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Onedrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
//dstDir, err := op.GetUnwrap(ctx, d, stdpath.Dir(srcObj.GetPath()))
|
||||
var parentID string
|
||||
if o, ok := srcObj.(*Object); ok {
|
||||
parentID = o.ParentID
|
||||
} else {
|
||||
return fmt.Errorf("srcObj is not Object")
|
||||
}
|
||||
if parentID == "" {
|
||||
parentID = "root"
|
||||
}
|
||||
data := base.Json{
|
||||
"parentReference": base.Json{
|
||||
"id": parentID,
|
||||
|
@ -11,7 +11,7 @@ type Addition struct {
|
||||
IsSharepoint bool `json:"is_sharepoint"`
|
||||
ClientID string `json:"client_id" required:"true"`
|
||||
ClientSecret string `json:"client_secret" required:"true"`
|
||||
RedirectUri string `json:"redirect_uri" required:"true" default:"https://tool.nn.ci/onedrive/callback"`
|
||||
RedirectUri string `json:"redirect_uri" required:"true" default:"https://alist.nn.ci/tool/onedrive/callback"`
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
SiteId string `json:"site_id"`
|
||||
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
|
||||
|
@ -193,6 +193,9 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
||||
finish += byteSize
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 {
|
||||
data, _ := io.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
|
160
drivers/onedrive_app/driver.go
Normal file
160
drivers/onedrive_app/driver.go
Normal file
@ -0,0 +1,160 @@
|
||||
package onedrive_app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"path"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
type OnedriveAPP struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Init(ctx context.Context) error {
|
||||
if d.ChunkSize < 1 {
|
||||
d.ChunkSize = 5
|
||||
}
|
||||
return d.accessToken()
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src, dir.GetID()), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
f, err := d.GetFile(file.GetPath())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if f.File == nil {
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
return &model.Link{
|
||||
URL: f.Url,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
url := d.GetMetaUrl(false, parentDir.GetPath()) + "/children"
|
||||
data := base.Json{
|
||||
"name": dirName,
|
||||
"folder": base.Json{},
|
||||
"@microsoft.graph.conflictBehavior": "rename",
|
||||
}
|
||||
_, err := d.Request(url, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
parentPath := ""
|
||||
if dstDir.GetID() == "" {
|
||||
parentPath = dstDir.GetPath()
|
||||
if utils.PathEqual(parentPath, "/") {
|
||||
parentPath = path.Join("/drive/root", parentPath)
|
||||
} else {
|
||||
parentPath = path.Join("/drive/root:/", parentPath)
|
||||
}
|
||||
}
|
||||
data := base.Json{
|
||||
"parentReference": base.Json{
|
||||
"id": dstDir.GetID(),
|
||||
"path": parentPath,
|
||||
},
|
||||
"name": srcObj.GetName(),
|
||||
}
|
||||
url := d.GetMetaUrl(false, srcObj.GetPath())
|
||||
_, err := d.Request(url, http.MethodPatch, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
var parentID string
|
||||
if o, ok := srcObj.(*Object); ok {
|
||||
parentID = o.ParentID
|
||||
} else {
|
||||
return fmt.Errorf("srcObj is not Object")
|
||||
}
|
||||
if parentID == "" {
|
||||
parentID = "root"
|
||||
}
|
||||
data := base.Json{
|
||||
"parentReference": base.Json{
|
||||
"id": parentID,
|
||||
},
|
||||
"name": newName,
|
||||
}
|
||||
url := d.GetMetaUrl(false, srcObj.GetPath())
|
||||
_, err := d.Request(url, http.MethodPatch, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
dst, err := d.GetFile(dstDir.GetPath())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
data := base.Json{
|
||||
"parentReference": base.Json{
|
||||
"driveId": dst.ParentReference.DriveId,
|
||||
"id": dst.Id,
|
||||
},
|
||||
"name": srcObj.GetName(),
|
||||
}
|
||||
url := d.GetMetaUrl(false, srcObj.GetPath()) + "/copy"
|
||||
_, err = d.Request(url, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Remove(ctx context.Context, obj model.Obj) error {
|
||||
url := d.GetMetaUrl(false, obj.GetPath())
|
||||
_, err := d.Request(url, http.MethodDelete, nil, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
var err error
|
||||
if stream.GetSize() <= 4*1024*1024 {
|
||||
err = d.upSmall(ctx, dstDir, stream)
|
||||
} else {
|
||||
err = d.upBig(ctx, dstDir, stream, up)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*OnedriveAPP)(nil)
|
28
drivers/onedrive_app/meta.go
Normal file
28
drivers/onedrive_app/meta.go
Normal file
@ -0,0 +1,28 @@
|
||||
package onedrive_app
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootPath
|
||||
Region string `json:"region" type:"select" required:"true" options:"global,cn,us,de" default:"global"`
|
||||
ClientID string `json:"client_id" required:"true"`
|
||||
ClientSecret string `json:"client_secret" required:"true"`
|
||||
TenantID string `json:"tenant_id"`
|
||||
Email string `json:"email"`
|
||||
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "OnedriveAPP",
|
||||
LocalSort: true,
|
||||
DefaultRoot: "/",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &OnedriveAPP{}
|
||||
})
|
||||
}
|
74
drivers/onedrive_app/types.go
Normal file
74
drivers/onedrive_app/types.go
Normal file
@ -0,0 +1,74 @@
|
||||
package onedrive_app
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type Host struct {
|
||||
Oauth string
|
||||
Api string
|
||||
}
|
||||
|
||||
type TokenErr struct {
|
||||
Error string `json:"error"`
|
||||
ErrorDescription string `json:"error_description"`
|
||||
}
|
||||
|
||||
type RespErr struct {
|
||||
Error struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
} `json:"error"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Id string `json:"id"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
LastModifiedDateTime time.Time `json:"lastModifiedDateTime"`
|
||||
Url string `json:"@microsoft.graph.downloadUrl"`
|
||||
File *struct {
|
||||
MimeType string `json:"mimeType"`
|
||||
} `json:"file"`
|
||||
Thumbnails []struct {
|
||||
Medium struct {
|
||||
Url string `json:"url"`
|
||||
} `json:"medium"`
|
||||
} `json:"thumbnails"`
|
||||
ParentReference struct {
|
||||
DriveId string `json:"driveId"`
|
||||
} `json:"parentReference"`
|
||||
}
|
||||
|
||||
type Object struct {
|
||||
model.ObjThumb
|
||||
ParentID string
|
||||
}
|
||||
|
||||
func fileToObj(f File, parentID string) *Object {
|
||||
thumb := ""
|
||||
if len(f.Thumbnails) > 0 {
|
||||
thumb = f.Thumbnails[0].Medium.Url
|
||||
}
|
||||
return &Object{
|
||||
ObjThumb: model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.Id,
|
||||
Name: f.Name,
|
||||
Size: f.Size,
|
||||
Modified: f.LastModifiedDateTime,
|
||||
IsFolder: f.File == nil,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: thumb},
|
||||
//Url: model.Url{Url: f.Url},
|
||||
},
|
||||
ParentID: parentID,
|
||||
}
|
||||
}
|
||||
|
||||
type Files struct {
|
||||
Value []File `json:"value"`
|
||||
NextLink string `json:"@odata.nextLink"`
|
||||
}
|
199
drivers/onedrive_app/util.go
Normal file
199
drivers/onedrive_app/util.go
Normal file
@ -0,0 +1,199 @@
|
||||
package onedrive_app
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
stdpath "path"
|
||||
"strconv"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
var onedriveHostMap = map[string]Host{
|
||||
"global": {
|
||||
Oauth: "https://login.microsoftonline.com",
|
||||
Api: "https://graph.microsoft.com",
|
||||
},
|
||||
"cn": {
|
||||
Oauth: "https://login.chinacloudapi.cn",
|
||||
Api: "https://microsoftgraph.chinacloudapi.cn",
|
||||
},
|
||||
"us": {
|
||||
Oauth: "https://login.microsoftonline.us",
|
||||
Api: "https://graph.microsoft.us",
|
||||
},
|
||||
"de": {
|
||||
Oauth: "https://login.microsoftonline.de",
|
||||
Api: "https://graph.microsoft.de",
|
||||
},
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) GetMetaUrl(auth bool, path string) string {
|
||||
host, _ := onedriveHostMap[d.Region]
|
||||
path = utils.EncodePath(path, true)
|
||||
if auth {
|
||||
return host.Oauth
|
||||
}
|
||||
if path == "/" || path == "\\" {
|
||||
return fmt.Sprintf("%s/v1.0/users/%s/drive/root", host.Api, d.Email)
|
||||
}
|
||||
return fmt.Sprintf("%s/v1.0/users/%s/drive/root:%s:", host.Api, d.Email, path)
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) accessToken() error {
|
||||
var err error
|
||||
for i := 0; i < 3; i++ {
|
||||
err = d._accessToken()
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) _accessToken() error {
|
||||
url := d.GetMetaUrl(true, "") + "/" + d.TenantID + "/oauth2/token"
|
||||
var resp base.TokenResp
|
||||
var e TokenErr
|
||||
_, err := base.RestyClient.R().SetResult(&resp).SetError(&e).SetFormData(map[string]string{
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
"resource": "https://graph.microsoft.com/",
|
||||
"scope": "https://graph.microsoft.com/.default",
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if e.Error != "" {
|
||||
return fmt.Errorf("%s", e.ErrorDescription)
|
||||
}
|
||||
if resp.AccessToken == "" {
|
||||
return errs.EmptyToken
|
||||
}
|
||||
d.AccessToken = resp.AccessToken
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
var e RespErr
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if e.Error.Code != "" {
|
||||
if e.Error.Code == "InvalidAuthenticationToken" {
|
||||
err = d.accessToken()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.Request(url, method, callback, resp)
|
||||
}
|
||||
return nil, errors.New(e.Error.Message)
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) getFiles(path string) ([]File, error) {
|
||||
var res []File
|
||||
nextLink := d.GetMetaUrl(false, path) + "/children?$top=5000&$expand=thumbnails($select=medium)&$select=id,name,size,lastModifiedDateTime,content.downloadUrl,file,parentReference"
|
||||
for nextLink != "" {
|
||||
var files Files
|
||||
_, err := d.Request(nextLink, http.MethodGet, nil, &files)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = append(res, files.Value...)
|
||||
nextLink = files.NextLink
|
||||
}
|
||||
return res, nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) GetFile(path string) (*File, error) {
|
||||
var file File
|
||||
u := d.GetMetaUrl(false, path)
|
||||
_, err := d.Request(u, http.MethodGet, nil, &file)
|
||||
return &file, err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) upSmall(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) error {
|
||||
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/content"
|
||||
data, err := io.ReadAll(stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = d.Request(url, http.MethodPut, func(req *resty.Request) {
|
||||
req.SetBody(data).SetContext(ctx)
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
url := d.GetMetaUrl(false, stdpath.Join(dstDir.GetPath(), stream.GetName())) + "/createUploadSession"
|
||||
res, err := d.Request(url, http.MethodPost, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploadUrl := jsoniter.Get(res, "uploadUrl").ToString()
|
||||
var finish int64 = 0
|
||||
DEFAULT := d.ChunkSize * 1024 * 1024
|
||||
for finish < stream.GetSize() {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
log.Debugf("upload: %d", finish)
|
||||
var byteSize int64 = DEFAULT
|
||||
left := stream.GetSize() - finish
|
||||
if left < DEFAULT {
|
||||
byteSize = left
|
||||
}
|
||||
byteData := make([]byte, byteSize)
|
||||
n, err := io.ReadFull(stream, byteData)
|
||||
log.Debug(err, n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest("PUT", uploadUrl, bytes.NewBuffer(byteData))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Length", strconv.Itoa(int(byteSize)))
|
||||
req.Header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", finish, finish+byteSize-1, stream.GetSize()))
|
||||
finish += byteSize
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 {
|
||||
data, _ := io.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
return errors.New(string(data))
|
||||
}
|
||||
res.Body.Close()
|
||||
up(int(finish * 100 / stream.GetSize()))
|
||||
}
|
||||
return nil
|
||||
}
|
@ -8,7 +8,6 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type PikPakShare struct {
|
||||
@ -71,10 +70,6 @@ func (d *PikPakShare) Link(ctx context.Context, file model.Obj, args model.LinkA
|
||||
link := model.Link{
|
||||
URL: resp.FileInfo.WebContentLink,
|
||||
}
|
||||
if len(resp.FileInfo.Medias) > 0 && resp.FileInfo.Medias[0].Link.Url != "" {
|
||||
log.Debugln("use media link")
|
||||
link.URL = resp.FileInfo.Medias[0].Link.Url
|
||||
}
|
||||
return &link, nil
|
||||
}
|
||||
|
||||
|
@ -5,15 +5,18 @@ import (
|
||||
"crypto/md5"
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
@ -56,17 +59,66 @@ func (d *Quark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
"fids": []string{file.GetID()},
|
||||
}
|
||||
var resp DownResp
|
||||
ua := "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch"
|
||||
_, err := d.request("/file/download", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
req.SetHeader("User-Agent", ua).
|
||||
SetBody(data)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u := resp.Data[0].DownloadUrl
|
||||
start, end := int64(0), file.GetSize()
|
||||
return &model.Link{
|
||||
URL: resp.Data[0].DownloadUrl,
|
||||
Header: http.Header{
|
||||
"Cookie": []string{d.Cookie},
|
||||
"Referer": []string{"https://pan.quark.cn"},
|
||||
Handle: func(w http.ResponseWriter, r *http.Request) error {
|
||||
if rg := r.Header.Get("Range"); rg != "" {
|
||||
parseRange, err := http_range.ParseRange(rg, file.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
start, end = parseRange[0].Start, parseRange[0].Start+parseRange[0].Length
|
||||
w.Header().Set("Content-Range", parseRange[0].ContentRange(file.GetSize()))
|
||||
w.Header().Set("Content-Length", strconv.FormatInt(parseRange[0].Length, 10))
|
||||
w.WriteHeader(http.StatusPartialContent)
|
||||
} else {
|
||||
w.Header().Set("Content-Length", strconv.FormatInt(file.GetSize(), 10))
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
// request 10 MB at a time
|
||||
chunkSize := int64(10 * 1024 * 1024)
|
||||
for start < end {
|
||||
_end := start + chunkSize
|
||||
if _end > end {
|
||||
_end = end
|
||||
}
|
||||
_range := "bytes=" + strconv.FormatInt(start, 10) + "-" + strconv.FormatInt(_end-1, 10)
|
||||
start = _end
|
||||
err = func() error {
|
||||
req, err := http.NewRequest(r.Method, u, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Range", _range)
|
||||
req.Header.Set("User-Agent", ua)
|
||||
req.Header.Set("Cookie", d.Cookie)
|
||||
req.Header.Set("Referer", "https://pan.quark.cn")
|
||||
resp, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusPartialContent {
|
||||
return fmt.Errorf("unexpected status code: %d", resp.StatusCode)
|
||||
}
|
||||
_, err = io.Copy(w, resp.Body)
|
||||
return err
|
||||
}()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
|
@ -13,9 +13,10 @@ type Addition struct {
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Quark",
|
||||
OnlyProxy: true,
|
||||
DefaultRoot: "0",
|
||||
Name: "Quark",
|
||||
OnlyLocal: true,
|
||||
DefaultRoot: "0",
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"io"
|
||||
"net/url"
|
||||
stdpath "path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -59,7 +60,8 @@ func (d *S3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]mo
|
||||
|
||||
func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
path := getKey(file.GetPath(), false)
|
||||
disposition := fmt.Sprintf(`attachment;filename="%s"`, url.QueryEscape(stdpath.Base(path)))
|
||||
filename := stdpath.Base(path)
|
||||
disposition := fmt.Sprintf(`attachment; filename="%s"; filename*=UTF-8''%s`, filename, url.PathEscape(filename))
|
||||
input := &s3.GetObjectInput{
|
||||
Bucket: &d.Bucket,
|
||||
Key: &path,
|
||||
@ -74,6 +76,9 @@ func (d *S3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*mo
|
||||
if d.CustomHost != "" {
|
||||
err = req.Build()
|
||||
link = req.HTTPRequest.URL.String()
|
||||
if d.RemoveBucket {
|
||||
link = strings.Replace(link, "/"+d.Bucket, "", 1)
|
||||
}
|
||||
} else {
|
||||
link, err = req.Presign(time.Hour * time.Duration(d.SignURLExpire))
|
||||
}
|
||||
@ -127,6 +132,9 @@ func (d *S3) Remove(ctx context.Context, obj model.Obj) error {
|
||||
|
||||
func (d *S3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
uploader := s3manager.NewUploader(d.Session)
|
||||
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
key := getKey(stdpath.Join(dstDir.GetPath(), stream.GetName()), false)
|
||||
log.Debugln("key:", key)
|
||||
input := &s3manager.UploadInput{
|
||||
|
@ -17,6 +17,7 @@ type Addition struct {
|
||||
Placeholder string `json:"placeholder"`
|
||||
ForcePathStyle bool `json:"force_path_style"`
|
||||
ListObjectVersion string `json:"list_object_version" type:"select" options:"v1,v2" default:"v1"`
|
||||
RemoveBucket bool `json:"remove_bucket" help:"Remove bucket name from path when using custom host."`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"os"
|
||||
"path"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -52,9 +53,11 @@ func (d *SFTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
link := &model.Link{
|
||||
Data: remoteFile,
|
||||
}, nil
|
||||
}
|
||||
base.HandleRange(link, remoteFile, args.Header, file.GetSize())
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *SFTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
@ -15,10 +15,10 @@ import (
|
||||
)
|
||||
|
||||
type SMB struct {
|
||||
lastConnTime int64
|
||||
model.Storage
|
||||
Addition
|
||||
fs *smb2.Share
|
||||
lastConnTime time.Time
|
||||
fs *smb2.Share
|
||||
}
|
||||
|
||||
func (d *SMB) Config() driver.Config {
|
||||
@ -79,10 +79,12 @@ func (d *SMB) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
|
||||
d.cleanLastConnTime()
|
||||
return nil, err
|
||||
}
|
||||
d.updateLastConnTime()
|
||||
return &model.Link{
|
||||
link := &model.Link{
|
||||
Data: remoteFile,
|
||||
}, nil
|
||||
}
|
||||
base.HandleRange(link, remoteFile, args.Header, file.GetSize())
|
||||
d.updateLastConnTime()
|
||||
return link, nil
|
||||
}
|
||||
|
||||
func (d *SMB) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
|
@ -6,17 +6,22 @@ import (
|
||||
"net"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/hirochachacha/go-smb2"
|
||||
)
|
||||
|
||||
func (d *SMB) updateLastConnTime() {
|
||||
d.lastConnTime = time.Now()
|
||||
atomic.StoreInt64(&d.lastConnTime, time.Now().Unix())
|
||||
}
|
||||
|
||||
func (d *SMB) cleanLastConnTime() {
|
||||
d.lastConnTime = time.Now().AddDate(0, 0, -1)
|
||||
atomic.StoreInt64(&d.lastConnTime, 0)
|
||||
}
|
||||
|
||||
func (d *SMB) getLastConnTime() time.Time {
|
||||
return time.Unix(atomic.LoadInt64(&d.lastConnTime), 0)
|
||||
}
|
||||
|
||||
func (d *SMB) initFS() error {
|
||||
@ -43,7 +48,7 @@ func (d *SMB) initFS() error {
|
||||
}
|
||||
|
||||
func (d *SMB) checkConn() error {
|
||||
if time.Since(d.lastConnTime) < 5*time.Minute {
|
||||
if time.Since(d.getLastConnTime()) < 5*time.Minute {
|
||||
return nil
|
||||
}
|
||||
if d.fs != nil {
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
@ -124,11 +125,14 @@ func (d *Teambition) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *Teambition) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
res, err := d.request("/projects", http.MethodGet, nil, nil)
|
||||
if d.UseS3UploadMethod {
|
||||
return d.newUpload(ctx, dstDir, stream, up)
|
||||
}
|
||||
res, err := d.request("/api/v2/users/me", http.MethodGet, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
token := GetBetweenStr(string(res), "strikerAuth":"", "","phoneForLogin")
|
||||
token := utils.Json.Get(res, "strikerAuth").ToString()
|
||||
var newFile *FileUpload
|
||||
if stream.GetSize() <= 20971520 {
|
||||
// post upload
|
||||
|
@ -10,8 +10,9 @@ type Addition struct {
|
||||
Cookie string `json:"cookie" required:"true"`
|
||||
ProjectID string `json:"project_id" required:"true"`
|
||||
driver.RootID
|
||||
OrderBy string `json:"order_by" type:"select" options:"fileName,fileSize,updated,created" default:"fileName"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"Asc,Desc" default:"Asc"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"fileName,fileSize,updated,created" default:"fileName"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"Asc,Desc" default:"Asc"`
|
||||
UseS3UploadMethod bool `json:"use_s3_upload_method" default:"true"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -66,3 +66,24 @@ type ChunkUpload struct {
|
||||
PreviewExt string `json:"previewExt"`
|
||||
LastUploadTime interface{} `json:"lastUploadTime"`
|
||||
}
|
||||
|
||||
type UploadToken struct {
|
||||
Sdk struct {
|
||||
Endpoint string `json:"endpoint"`
|
||||
Region string `json:"region"`
|
||||
S3ForcePathStyle bool `json:"s3ForcePathStyle"`
|
||||
Credentials struct {
|
||||
AccessKeyId string `json:"accessKeyId"`
|
||||
SecretAccessKey string `json:"secretAccessKey"`
|
||||
SessionToken string `json:"sessionToken"`
|
||||
} `json:"credentials"`
|
||||
} `json:"sdk"`
|
||||
Upload struct {
|
||||
Bucket string `json:"Bucket"`
|
||||
Key string `json:"Key"`
|
||||
ContentDisposition string `json:"ContentDisposition"`
|
||||
ContentType string `json:"ContentType"`
|
||||
} `json:"upload"`
|
||||
Token string `json:"token"`
|
||||
DownloadUrl string `json:"downloadUrl"`
|
||||
}
|
||||
|
@ -7,13 +7,16 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
@ -210,17 +213,56 @@ func (d *Teambition) finishUpload(file *FileUpload, parentId string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func GetBetweenStr(str, start, end string) string {
|
||||
n := strings.Index(str, start)
|
||||
if n == -1 {
|
||||
return ""
|
||||
func (d *Teambition) newUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
var uploadToken UploadToken
|
||||
_, err := d.request("/api/awos/upload-token", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"category": "work",
|
||||
"fileName": stream.GetName(),
|
||||
"fileSize": stream.GetSize(),
|
||||
"fileType": stream.GetMimetype(),
|
||||
"payload": base.Json{
|
||||
"involveMembers": []struct{}{},
|
||||
"visible": "members",
|
||||
},
|
||||
"scope": "project:" + d.ProjectID,
|
||||
})
|
||||
}, &uploadToken)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
n = n + len(start)
|
||||
str = string([]byte(str)[n:])
|
||||
m := strings.Index(str, end)
|
||||
if m == -1 {
|
||||
return ""
|
||||
cfg := &aws.Config{
|
||||
Credentials: credentials.NewStaticCredentials(
|
||||
uploadToken.Sdk.Credentials.AccessKeyId, uploadToken.Sdk.Credentials.SecretAccessKey, uploadToken.Sdk.Credentials.SessionToken),
|
||||
Region: &uploadToken.Sdk.Region,
|
||||
Endpoint: &uploadToken.Sdk.Endpoint,
|
||||
S3ForcePathStyle: &uploadToken.Sdk.S3ForcePathStyle,
|
||||
}
|
||||
str = string([]byte(str)[:m])
|
||||
return str
|
||||
ss, err := session.NewSession(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(ss)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &uploadToken.Upload.Bucket,
|
||||
Key: &uploadToken.Upload.Key,
|
||||
ContentDisposition: &uploadToken.Upload.ContentDisposition,
|
||||
ContentType: &uploadToken.Upload.ContentType,
|
||||
Body: stream,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// finish upload
|
||||
_, err = d.request("/api/works", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileTokens": []string{uploadToken.Token},
|
||||
"involveMembers": []struct{}{},
|
||||
"visible": "members",
|
||||
"works": []struct{}{},
|
||||
"_parentId": dstDir.GetID(),
|
||||
})
|
||||
}, nil)
|
||||
return err
|
||||
}
|
||||
|
@ -32,42 +32,42 @@ func (d *Template) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *Template) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
// TODO return the files list
|
||||
// TODO return the files list, required
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
// TODO return link of file
|
||||
// TODO return link of file, required
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
// TODO create folder
|
||||
// TODO create folder, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO move obj
|
||||
// TODO move obj, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
// TODO rename obj
|
||||
// TODO rename obj, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj
|
||||
// TODO copy obj, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Remove(ctx context.Context, obj model.Obj) error {
|
||||
// TODO remove obj
|
||||
// TODO remove obj, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Template) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
// TODO upload file
|
||||
// TODO upload file, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
|
@ -14,14 +14,17 @@ type Addition struct {
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Template",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "root, / or other",
|
||||
Name: "Template",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "root, / or other",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
143
drivers/trainbit/driver.go
Normal file
143
drivers/trainbit/driver.go
Normal file
@ -0,0 +1,143 @@
|
||||
package trainbit
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type Trainbit struct {
|
||||
model.Storage
|
||||
Addition
|
||||
}
|
||||
|
||||
var apiExpiredate, guid string
|
||||
|
||||
func (d *Trainbit) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Trainbit) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Trainbit) Init(ctx context.Context) error {
|
||||
base.HttpClient.CheckRedirect = func(req *http.Request, via []*http.Request) error {
|
||||
return http.ErrUseLastResponse
|
||||
}
|
||||
var err error
|
||||
apiExpiredate, guid, err = getToken(d.ApiKey, d.AUSHELLPORTAL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Trainbit) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Trainbit) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
form := make(url.Values)
|
||||
form.Set("parentid", strings.Split(dir.GetID(), "_")[0])
|
||||
res, err := postForm("https://trainbit.com/lib/api/v1/listoffiles", form, apiExpiredate, d.ApiKey, d.AUSHELLPORTAL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var jsonData any
|
||||
json.Unmarshal(data, &jsonData)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
object, err := parseRawFileObject(jsonData.(map[string]any)["items"].([]any))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return object, nil
|
||||
}
|
||||
|
||||
func (d *Trainbit) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
res, err := get(fmt.Sprintf("https://trainbit.com/files/%s/", strings.Split(file.GetID(), "_")[0]), d.ApiKey, d.AUSHELLPORTAL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: res.Header.Get("Location"),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *Trainbit) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
form := make(url.Values)
|
||||
form.Set("name", local2provider(dirName, true))
|
||||
form.Set("parentid", strings.Split(parentDir.GetID(), "_")[0])
|
||||
_, err := postForm("https://trainbit.com/lib/api/v1/createfolder", form, apiExpiredate, d.ApiKey, d.AUSHELLPORTAL)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Trainbit) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
form := make(url.Values)
|
||||
form.Set("sourceid", strings.Split(srcObj.GetID(), "_")[0])
|
||||
form.Set("destinationid", strings.Split(dstDir.GetID(), "_")[0])
|
||||
_, err := postForm("https://trainbit.com/lib/api/v1/move", form, apiExpiredate, d.ApiKey, d.AUSHELLPORTAL)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Trainbit) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
form := make(url.Values)
|
||||
form.Set("id", strings.Split(srcObj.GetID(), "_")[0])
|
||||
form.Set("name", local2provider(newName, srcObj.IsDir()))
|
||||
_, err := postForm("https://trainbit.com/lib/api/v1/edit", form, apiExpiredate, d.ApiKey, d.AUSHELLPORTAL)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Trainbit) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *Trainbit) Remove(ctx context.Context, obj model.Obj) error {
|
||||
form := make(url.Values)
|
||||
form.Set("id", strings.Split(obj.GetID(), "_")[0])
|
||||
_, err := postForm("https://trainbit.com/lib/api/v1/delete", form, apiExpiredate, d.ApiKey, d.AUSHELLPORTAL)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Trainbit) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
endpoint, _ := url.Parse("https://tb28.trainbit.com/api/upload/send_raw/")
|
||||
query := &url.Values{}
|
||||
query.Add("q", strings.Split(dstDir.GetID(), "_")[1])
|
||||
query.Add("guid", guid)
|
||||
query.Add("name", url.QueryEscape(local2provider(stream.GetName(), false)+"."))
|
||||
endpoint.RawQuery = query.Encode()
|
||||
var total int64
|
||||
total = 0
|
||||
progressReader := &ProgressReader{
|
||||
stream,
|
||||
func(byteNum int) {
|
||||
total += int64(byteNum)
|
||||
up(int(math.Round(float64(total) / float64(stream.GetSize()) * 100)))
|
||||
},
|
||||
}
|
||||
req, err := http.NewRequest(http.MethodPost, endpoint.String(), progressReader)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Content-Type", "text/json; charset=UTF-8")
|
||||
_, err = base.HttpClient.Do(req)
|
||||
return err
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Trainbit)(nil)
|
29
drivers/trainbit/meta.go
Normal file
29
drivers/trainbit/meta.go
Normal file
@ -0,0 +1,29 @@
|
||||
package trainbit
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
AUSHELLPORTAL string `json:"AUSHELLPORTAL" required:"true"`
|
||||
ApiKey string `json:"apikey" required:"true"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "Trainbit",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0_000",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Trainbit{}
|
||||
})
|
||||
}
|
1
drivers/trainbit/types.go
Normal file
1
drivers/trainbit/types.go
Normal file
@ -0,0 +1 @@
|
||||
package trainbit
|
135
drivers/trainbit/util.go
Normal file
135
drivers/trainbit/util.go
Normal file
@ -0,0 +1,135 @@
|
||||
package trainbit
|
||||
|
||||
import (
|
||||
"html"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type ProgressReader struct {
|
||||
io.Reader
|
||||
reporter func(byteNum int)
|
||||
}
|
||||
|
||||
func (progressReader *ProgressReader) Read(data []byte) (int, error) {
|
||||
byteNum, err := progressReader.Reader.Read(data)
|
||||
progressReader.reporter(byteNum)
|
||||
return byteNum, err
|
||||
}
|
||||
|
||||
func get(url string, apiKey string, AUSHELLPORTAL string) (*http.Response, error) {
|
||||
req, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.AddCookie(&http.Cookie{
|
||||
Name: ".AUSHELLPORTAL",
|
||||
Value: AUSHELLPORTAL,
|
||||
MaxAge: 2 * 60,
|
||||
})
|
||||
req.AddCookie(&http.Cookie{
|
||||
Name: "retkeyapi",
|
||||
Value: apiKey,
|
||||
MaxAge: 2 * 60,
|
||||
})
|
||||
res, err := base.HttpClient.Do(req)
|
||||
return res, err
|
||||
}
|
||||
|
||||
func postForm(endpoint string, data url.Values, apiExpiredate string, apiKey string, AUSHELLPORTAL string) (*http.Response, error) {
|
||||
extData := make(url.Values)
|
||||
for key, value := range data {
|
||||
extData[key] = make([]string, len(value))
|
||||
copy(extData[key], value)
|
||||
}
|
||||
extData.Set("apikey", apiKey)
|
||||
extData.Set("expiredate", apiExpiredate)
|
||||
req, err := http.NewRequest(http.MethodPost, endpoint, strings.NewReader(extData.Encode()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.AddCookie(&http.Cookie{
|
||||
Name: ".AUSHELLPORTAL",
|
||||
Value: AUSHELLPORTAL,
|
||||
MaxAge: 2 * 60,
|
||||
})
|
||||
req.AddCookie(&http.Cookie{
|
||||
Name: "retkeyapi",
|
||||
Value: apiKey,
|
||||
MaxAge: 2 * 60,
|
||||
})
|
||||
res, err := base.HttpClient.Do(req)
|
||||
return res, err
|
||||
}
|
||||
|
||||
func getToken(apiKey string, AUSHELLPORTAL string) (string, string, error) {
|
||||
res, err := get("https://trainbit.com/files/", apiKey, AUSHELLPORTAL)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
data, err := io.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
text := string(data)
|
||||
apiExpiredateReg := regexp.MustCompile(`core.api.expiredate = '([^']*)';`)
|
||||
result := apiExpiredateReg.FindAllStringSubmatch(text, -1)
|
||||
apiExpiredate := result[0][1]
|
||||
guidReg := regexp.MustCompile(`app.vars.upload.guid = '([^']*)';`)
|
||||
result = guidReg.FindAllStringSubmatch(text, -1)
|
||||
guid := result[0][1]
|
||||
return apiExpiredate, guid, nil
|
||||
}
|
||||
|
||||
func local2provider(filename string, isFolder bool) string {
|
||||
if isFolder {
|
||||
return filename
|
||||
}
|
||||
return filename + ".delete_suffix"
|
||||
}
|
||||
|
||||
func provider2local(filename string) string {
|
||||
filename = html.UnescapeString(filename)
|
||||
index := strings.LastIndex(filename, ".delete_suffix")
|
||||
if index != -1 {
|
||||
filename = filename[:index]
|
||||
}
|
||||
return filename
|
||||
}
|
||||
|
||||
func parseRawFileObject(rawObject []any) ([]model.Obj, error) {
|
||||
objectList := make([]model.Obj, 0)
|
||||
for _, each := range rawObject {
|
||||
object := each.(map[string]any)
|
||||
if object["id"].(string) == "0" {
|
||||
continue
|
||||
}
|
||||
isFolder := int64(object["ty"].(float64)) == 1
|
||||
var name string
|
||||
if object["ext"].(string) != "" {
|
||||
name = strings.Join([]string{object["name"].(string), object["ext"].(string)}, ".")
|
||||
} else {
|
||||
name = object["name"].(string)
|
||||
}
|
||||
modified, err := time.Parse("2006/01/02 15:04:05", object["modified"].(string))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
objectList = append(objectList, model.Obj(&model.Object{
|
||||
ID: strings.Join([]string{object["id"].(string), strings.Split(object["uploadurl"].(string), "=")[1]}, "_"),
|
||||
Name: provider2local(name),
|
||||
Size: int64(object["byte"].(float64)),
|
||||
Modified: modified.Add(-210 * time.Minute),
|
||||
IsFolder: isFolder,
|
||||
}))
|
||||
}
|
||||
return objectList, nil
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user