Compare commits

..

1 Commits

Author SHA1 Message Date
06f3618897 refactor: filesystem struct 2022-07-27 17:09:48 +08:00
456 changed files with 2915 additions and 40709 deletions

98
.all-contributorsrc Normal file
View File

@ -0,0 +1,98 @@
{
"files": [
"CONTRIBUTORS.md"
],
"imageSize": 100,
"commit": false,
"contributors": [
{
"login": "Xhofe",
"name": "Xhofe",
"avatar_url": "https://avatars.githubusercontent.com/u/36558727?v=4",
"profile": "http://nn.ci",
"contributions": [
"code",
"ideas",
"doc"
]
},
{
"login": "foxxorcat",
"name": "foxxorcat",
"avatar_url": "https://avatars.githubusercontent.com/u/95907542?v=4",
"profile": "https://github.com/foxxorcat",
"contributions": [
"code"
]
},
{
"login": "DaoChen6",
"name": "道辰",
"avatar_url": "https://avatars.githubusercontent.com/u/63903027?v=4",
"profile": "https://www.iflu.cf/",
"contributions": [
"doc"
]
},
{
"login": "vg-land",
"name": "vg-land",
"avatar_url": "https://avatars.githubusercontent.com/u/16739728?v=4",
"profile": "https://vg-land.github.io/",
"contributions": [
"code"
]
},
{
"login": "Clansty",
"name": "凌莞~(=^▽^=)",
"avatar_url": "https://avatars.githubusercontent.com/u/18461360?v=4",
"profile": "https://c5y.moe",
"contributions": [
"doc"
]
},
{
"login": "Windman1320",
"name": "Windman",
"avatar_url": "https://avatars.githubusercontent.com/u/9999486?v=4",
"profile": "https://github.com/Windman1320",
"contributions": [
"code"
]
},
{
"login": "ericarena",
"name": "ericarena",
"avatar_url": "https://avatars.githubusercontent.com/u/4518927?v=4",
"profile": "https://github.com/ericarena",
"contributions": [
"code"
]
},
{
"login": "WntFlm",
"name": "WntFlm",
"avatar_url": "https://avatars.githubusercontent.com/u/34620278?v=4",
"profile": "https://github.com/WntFlm",
"contributions": [
"code"
]
},
{
"login": "XZB-1248",
"name": "XZB-1248",
"avatar_url": "https://avatars.githubusercontent.com/u/28593573?v=4",
"profile": "https://github.com/XZB-1248",
"contributions": [
"code"
]
}
],
"contributorsPerLine": 7,
"projectName": "alist",
"projectOwner": "alist-org",
"repoType": "github",
"repoHost": "https://github.com",
"skipCi": true
}

13
.github/FUNDING.yml vendored
View File

@ -1,13 +0,0 @@
# These are supported funding model platforms
github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: xhofe # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: ['https://alist.nn.ci/guide/sponsor.html']

View File

@ -7,44 +7,27 @@ body:
value: |
Thanks for taking the time to fill out this bug report, please **confirm that your issue is not a duplicate issue and not because of your operation or version issues**
感谢您花时间填写此错误报告,请**务必确认您的issue不是重复的且不是因为您的操作或版本问题**
- type: checkboxes
attributes:
label: Please make sure of the following things
description: |
You must check all the following, otherwise your issue may be closed directly. Or you can go to the [discussions](https://github.com/alist-org/alist/discussions)
您必须勾选以下所有内容否则您的issue可能会被直接关闭。或者您可以去[讨论区](https://github.com/alist-org/alist/discussions)
description: You may select more than one, even select all.
options:
- label: |
I have read the [documentation](https://alist.nn.ci).
我已经阅读了[文档](https://alist.nn.ci)。
- label: |
I'm sure there are no duplicate issues or discussions.
我确定没有重复的issue或讨论。
- label: |
I'm sure it's due to `AList` and not something else(such as [Network](https://alist.nn.ci/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host) ,`Dependencies` or `Operational`).
我确定是`AList`的问题,而不是其他原因(例如[网络](https://alist.nn.ci/zh/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host)`依赖`或`操作`)。
- label: |
I'm sure this issue is not fixed in the latest version.
我确定这个问题在最新版本中没有被修复。
- label: I have read the [documentation](https://alist-doc.nn.ci).
- label: I'm sure there are no duplicate issues or discussions.
- label: I'm sure it's due to `alist` and not something else(such as `Dependencies` or `Operational`).
- type: input
id: version
attributes:
label: AList Version / AList 版本
description: |
What version of our software are you running? Do not use `latest` or `master` as an answer.
您使用的是哪个版本的软件?请不要使用`latest`或`master`作为答案。
placeholder: v3.xx.xx
label: Alist Version / Alist 版本
description: What version of our software are you running?
placeholder: v2.0.0
validations:
required: true
- type: input
id: driver
attributes:
label: Driver used / 使用的存储驱动
description: |
What storage driver are you using?
您使用的是哪个存储驱动?
description: What storage driver are you using?
placeholder: "for example: Onedrive"
validations:
required: true
@ -59,17 +42,8 @@ body:
attributes:
label: Reproduction / 复现链接
description: |
Please provide a link to a repo that can reproduce the problem you ran into. Please be aware that your issue may be closed directly if you don't provide it.
请提供能复现此问题的链接请知悉如果不提供它你的issue可能会被直接关闭。
validations:
required: true
- type: textarea
id: config
attributes:
label: Config / 配置
description: |
Please provide the configuration file of your `AList` application and take a screenshot of the relevant storage configuration. (hide privacy field)
请提供您的`AList`应用的配置文件,并截图相关存储配置。(隐藏隐私字段)
Please provide a link to a repo that can reproduce the problem you ran into.
请提供能复现此问题的链接
validations:
required: true
- type: textarea
@ -79,3 +53,4 @@ body:
description: |
Please copy and paste any relevant log output.
请复制粘贴错误日志,或者截图
render: shell

View File

@ -7,7 +7,7 @@ body:
label: Please make sure of the following things
description: You may select more than one, even select all.
options:
- label: I have read the [documentation](https://alist.nn.ci).
- label: I have read the [documentation](https://alist-doc.nn.ci).
- label: I'm sure there are no duplicate issues or discussions.
- label: I'm sure this feature is not implemented.
- label: I'm sure it's a reasonable and popular requirement.

19
.github/stale.yml vendored
View File

@ -1,19 +0,0 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 44
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 20
# Issues with these labels will never be considered stale
exemptLabels:
- accepted
- security
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: >
This issue was closed due to inactive more than 52 days. You can reopen or
recreate it if you think it should continue. Thank you for your contributions again.

View File

@ -1,67 +0,0 @@
name: auto_lang
on:
push:
branches:
- 'main'
paths:
- 'drivers/**'
- 'internal/bootstrap/data/setting.go'
- 'internal/conf/const.go'
- 'cmd/lang.go'
workflow_dispatch:
jobs:
auto_lang:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
name: auto generate lang.json
runs-on: ${{ matrix.platform }}
steps:
- name: Setup go
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout alist
uses: actions/checkout@v3
with:
path: alist
- name: Checkout alist-web
uses: actions/checkout@v3
with:
repository: 'alist-org/alist-web'
ref: main
persist-credentials: false
fetch-depth: 0
path: alist-web
- name: Generate lang
run: |
cd alist
go run ./main.go lang
cd ..
- name: Copy lang file
run: |
cp -f ./alist/lang/*.json ./alist-web/src/lang/en/ 2>/dev/null || :
- name: Commit git
run: |
cd alist-web
git add .
git config --local user.email "bot@nn.ci"
git config --local user.name "IlaBot"
git commit -m "chore: auto update i18n file" -a 2>/dev/null || :
cd ..
- name: Push lang files
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
directory: alist-web
repository: alist-org/alist-web

View File

@ -1,41 +0,0 @@
name: build
on:
push:
branches: [ 'main' ]
pull_request:
branches: [ 'main' ]
jobs:
build:
strategy:
matrix:
platform: [ubuntu-latest]
go-version: [ '1.20' ]
name: Build
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
- name: Install dependencies
run: |
sudo snap install zig --classic --beta
docker pull crazymax/xgo:latest
go install github.com/crazy-max/xgo@latest
sudo apt install upx
- name: Build
run: |
bash build.sh dev
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: alist
path: dist

View File

@ -1,65 +0,0 @@
name: build_docker
on:
push:
branches: [ main ]
jobs:
build_docker:
name: Build docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: xhofe/alist
- name: Replace release with dev
run: |
sed -i 's/release/dev/g' Dockerfile
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
build_docker_with_aria2:
needs: build_docker
name: Build docker with aria2
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
with:
repository: alist-org/with_aria2
ref: main
persist-credentials: false
fetch-depth: 0
- name: Commit
run: |
git config --local user.email "bot@nn.ci"
git config --local user.name "IlaBot"
git commit --allow-empty -m "Trigger build for ${{ github.sha }}"
- name: Push commit
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
repository: alist-org/with_aria2

View File

@ -1,19 +0,0 @@
name: auto changelog
on:
push:
tags:
- '*'
jobs:
changelog:
name: Create Release
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
env:
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}

View File

@ -1,22 +1,20 @@
name: Close need info
name: Check need info
on:
schedule:
- cron: "0 0 */1 * *"
workflow_dispatch:
- cron: "0 0 */7 * *"
jobs:
close-need-info:
check-need-info:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'question'
inactive-day: 3
close-reason: 'not_planned'
inactive-day: 7
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 3 days.
你好 @${{ github.event.issue.user.login }}此issue因超过3天未回复被关闭。
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 7 days.
你好 @${{ github.event.issue.user.login }}此issue因超过7天未回复被关闭。

View File

@ -1,21 +0,0 @@
name: Close inactive
on:
schedule:
- cron: "0 0 */7 * *"
workflow_dispatch:
jobs:
close-inactive:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'stale'
inactive-day: 8
close-reason: 'not_planned'
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue. Thank you for your contributions again.

View File

@ -10,7 +10,7 @@ jobs:
if: github.event.label.name == 'duplicate'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, your issue is a duplicate and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue是重复的将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -10,7 +10,7 @@ jobs:
if: github.event.label.name == 'invalid'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, your issue is invalid and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue无效将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -0,0 +1,16 @@
name: Issue Month Statistics
on:
schedule:
- cron: "0 1 1 * *"
jobs:
month-statistics:
runs-on: ubuntu-latest
steps:
- name: month-statistics
uses: actions-cool/issues-month-statistics@v1
with:
count-lables: true
count-comments: true
emoji: 'eyes'

View File

@ -10,11 +10,11 @@ jobs:
if: github.event.label.name == 'question'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3.5.2
uses: actions-cool/issues-helper@v2.0.0
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 3 days.
你好 @${{ github.event.issue.user.login }}请按照issue模板填写, 并详细说明问题/日志记录/复现步骤/复现链接/实现思路或提供更多信息等, 3天内未回复issue自动关闭。
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 7 days.
你好 @${{ github.event.issue.user.login }}请按照issue模板填写, 并详细说明问题/复现步骤/复现链接/实现思路或提供更多信息等, 7天内未回复issue自动关闭。

View File

@ -1,17 +0,0 @@
name: Remove working label when issue closed
on:
issues:
types: [closed]
jobs:
rm-working:
runs-on: ubuntu-latest
steps:
- name: Remove working label
uses: actions-cool/issues-helper@v3
with:
actions: 'remove-labels'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: 'working'

View File

@ -1,19 +0,0 @@
name: Issues Similarity Analysis
on:
issues:
types: [opened, edited]
jobs:
similarity-analysis:
runs-on: ubuntu-latest
steps:
- name: analysis
uses: actions-cool/issues-similarity-analysis@v1
with:
filter-threshold: 0.5
comment-title: '### See'
comment-body: '${index}. ${similarity} #${number}'
show-footer: false
show-mentioned: true
since-days: 730

View File

@ -1,13 +0,0 @@
name: Translation Helper
on:
pull_request_target:
types: [opened]
issues:
types: [opened]
jobs:
translate:
runs-on: ubuntu-latest
steps:
- uses: actions-cool/translation-helper@v1.2.0

View File

@ -10,7 +10,7 @@ jobs:
if: github.event.label.name == 'wontfix'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
@ -19,7 +19,7 @@ jobs:
Hello @${{ github.event.issue.user.login }}, this issue will not be worked on and will be closed.
你好 @${{ github.event.issue.user.login }},这不会被处理,将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
uses: actions-cool/issues-helper@v2
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -1,75 +0,0 @@
name: release
on:
release:
types: [ published ]
jobs:
release:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Prerelease
uses: irongut/EditRelease@v1.2.0
with:
token: ${{ secrets.MY_TOKEN }}
id: ${{ github.event.release.id }}
prerelease: true
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Install dependencies
run: |
sudo snap install zig --classic --beta
docker pull crazymax/xgo:latest
go install github.com/crazy-max/xgo@latest
sudo apt install upx
- name: Build
run: |
bash build.sh release
- name: Upload assets
uses: softprops/action-gh-release@v1
with:
files: build/compress/*
prerelease: false
release_desktop:
needs: release
name: Release desktop
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
with:
repository: alist-org/desktop-release
ref: main
persist-credentials: false
fetch-depth: 0
- name: Add tag
run: |
git config --local user.email "bot@nn.ci"
git config --local user.name "IlaBot"
version=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
git tag -a $version -m "release $version"
- name: Push tags
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
repository: alist-org/desktop-release

View File

@ -1,68 +0,0 @@
name: release_docker
on:
push:
tags:
- '*'
jobs:
release_docker:
name: Release Docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v4
with:
images: xhofe/alist
- name: Set up QEMU
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v2
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v4
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
release_docker_with_aria2:
needs: release_docker
name: Release docker with aria2
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v3
with:
repository: alist-org/with_aria2
ref: main
persist-credentials: false
fetch-depth: 0
- name: Add tag
run: |
git config --local user.email "bot@nn.ci"
git config --local user.name "IlaBot"
git tag -a ${{ github.ref_name }} -m "release ${{ github.ref_name }}"
- name: Push tags
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
repository: alist-org/with_aria2

View File

@ -1,34 +0,0 @@
name: release_linux_musl
on:
release:
types: [ published ]
jobs:
release_linux_musl:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Build
run: |
bash build.sh release linux_musl
- name: Upload assets
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

View File

@ -1,34 +0,0 @@
name: release_linux_musl_arm
on:
release:
types: [ published ]
jobs:
release_linux_musl_arm:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Build
run: |
bash build.sh release linux_musl_arm
- name: Upload assets
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

20
.gitignore vendored
View File

@ -1,7 +1,7 @@
.idea/
.DS_Store
output/
/dist/
dist/
# Binaries for programs and plugins
*.exe
@ -20,14 +20,12 @@ output/
# Dependency directories (remove the comment below to include it)
# vendor/
/bin/*
bin/*
/alist
/alist.exe
*.json
/build
/data/
/log/
/lang/
/daemon/
/public/dist/*
/!public/dist/README.md
.VSCodeCounter
public/*.html
public/assets/
public/public/
/data
log/

View File

@ -1,128 +0,0 @@
# Contributor Covenant Code of Conduct
## Our Pledge
We as members, contributors, and leaders pledge to make participation in our
community a harassment-free experience for everyone, regardless of age, body
size, visible or invisible disability, ethnicity, sex characteristics, gender
identity and expression, level of experience, education, socio-economic status,
nationality, personal appearance, race, religion, or sexual identity
and orientation.
We pledge to act and interact in ways that contribute to an open, welcoming,
diverse, inclusive, and healthy community.
## Our Standards
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
Community leaders are responsible for clarifying and enforcing our standards of
acceptable behavior and will take appropriate and fair corrective action in
response to any behavior that they deem inappropriate, threatening, offensive,
or harmful.
Community leaders have the right and responsibility to remove, edit, or reject
comments, commits, code, wiki edits, issues, and other contributions that are
not aligned to this Code of Conduct, and will communicate reasons for moderation
decisions when appropriate.
## Scope
This Code of Conduct applies within all community spaces, and also applies when
an individual is officially representing the community in public spaces.
Examples of representing our community include using an official e-mail address,
posting via an official social media account, or acting as an appointed
representative at an online or offline event.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
i@nn.ci.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the
reporter of any incident.
## Enforcement Guidelines
Community leaders will follow these Community Impact Guidelines in determining
the consequences for any action they deem in violation of this Code of Conduct:
### 1. Correction
**Community Impact**: Use of inappropriate language or other behavior deemed
unprofessional or unwelcome in the community.
**Consequence**: A private, written warning from community leaders, providing
clarity around the nature of the violation and an explanation of why the
behavior was inappropriate. A public apology may be requested.
### 2. Warning
**Community Impact**: A violation through a single incident or series
of actions.
**Consequence**: A warning with consequences for continued behavior. No
interaction with the people involved, including unsolicited interaction with
those enforcing the Code of Conduct, for a specified period of time. This
includes avoiding interactions in community spaces as well as external channels
like social media. Violating these terms may lead to a temporary or
permanent ban.
### 3. Temporary Ban
**Community Impact**: A serious violation of community standards, including
sustained inappropriate behavior.
**Consequence**: A temporary ban from any sort of interaction or public
communication with the community for a specified period of time. No public or
private interaction with the people involved, including unsolicited interaction
with those enforcing the Code of Conduct, is allowed during this period.
Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
the community.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
Community Impact Guidelines were inspired by [Mozilla's code of conduct
enforcement ladder](https://github.com/mozilla/diversity).
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see the FAQ at
https://www.contributor-covenant.org/faq. Translations are available at
https://www.contributor-covenant.org/translations.

View File

@ -6,32 +6,29 @@
Prerequisites:
- [git](https://git-scm.com)
- [Go 1.20+](https://golang.org/doc/install)
- [git](https://nodejs.org/zh-cn/)
- [Go 1.18+](https://golang.org/doc/install)
- [gcc](https://gcc.gnu.org/)
- [nodejs](https://nodejs.org/)
Clone `alist` and `alist-web` anywhere:
```shell
$ git clone https://github.com/alist-org/alist.git
$ git clone --recurse-submodules https://github.com/alist-org/alist-web.git
$ git clone https://github.com/Xhofe/alist.git
$ git clone https://github.com/Xhofe/alist-web.git
```
You should switch to the `main` branch for development.
## Preview your change
### backend
```shell
$ go run main.go
$ go run cmd/alist.go
```
### frontend
```shell
$ pnpm dev
$ yarn dev
```
## Add a new driver
Copy `drivers/template` folder and rename it, and follow the comments in it.
## Create a commit
Commit messages should be well formatted, and to make that "standardized".
@ -76,6 +73,7 @@ Must be one of the following:
* **chore**: Changes to the build process or auxiliary tools and libraries such as documentation
generation
* **release**: Release a new version
* **workflow**: Workflow related file modification
### Scope
The scope could be anything specifying place of the commit change. For example `$location`,

33
CONTRIBUTORS.md Normal file
View File

@ -0,0 +1,33 @@
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
[![All Contributors](https://img.shields.io/badge/all_contributors-9-orange.svg?style=flat-square)](#contributors-)
<!-- ALL-CONTRIBUTORS-BADGE:END -->
## Contributors ✨
Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section -->
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<table>
<tr>
<td align="center"><a href="http://nn.ci"><img src="https://avatars.githubusercontent.com/u/36558727?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Xhofe</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Xhofe" title="Code">💻</a> <a href="#ideas-Xhofe" title="Ideas, Planning, & Feedback">🤔</a> <a href="https://github.com/alist-org/alist/commits?author=Xhofe" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/foxxorcat"><img src="https://avatars.githubusercontent.com/u/95907542?v=4?s=100" width="100px;" alt=""/><br /><sub><b>foxxorcat</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=foxxorcat" title="Code">💻</a></td>
<td align="center"><a href="https://www.iflu.cf/"><img src="https://avatars.githubusercontent.com/u/63903027?v=4?s=100" width="100px;" alt=""/><br /><sub><b>道辰</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=DaoChen6" title="Documentation">📖</a></td>
<td align="center"><a href="https://vg-land.github.io/"><img src="https://avatars.githubusercontent.com/u/16739728?v=4?s=100" width="100px;" alt=""/><br /><sub><b>vg-land</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=vg-land" title="Code">💻</a></td>
<td align="center"><a href="https://c5y.moe"><img src="https://avatars.githubusercontent.com/u/18461360?v=4?s=100" width="100px;" alt=""/><br /><sub><b>凌莞~(=^▽^=)</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Clansty" title="Documentation">📖</a></td>
<td align="center"><a href="https://github.com/Windman1320"><img src="https://avatars.githubusercontent.com/u/9999486?v=4?s=100" width="100px;" alt=""/><br /><sub><b>Windman</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=Windman1320" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/ericarena"><img src="https://avatars.githubusercontent.com/u/4518927?v=4?s=100" width="100px;" alt=""/><br /><sub><b>ericarena</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=ericarena" title="Code">💻</a></td>
</tr>
<tr>
<td align="center"><a href="https://github.com/WntFlm"><img src="https://avatars.githubusercontent.com/u/34620278?v=4?s=100" width="100px;" alt=""/><br /><sub><b>WntFlm</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=WntFlm" title="Code">💻</a></td>
<td align="center"><a href="https://github.com/XZB-1248"><img src="https://avatars.githubusercontent.com/u/28593573?v=4?s=100" width="100px;" alt=""/><br /><sub><b>XZB-1248</b></sub></a><br /><a href="https://github.com/alist-org/alist/commits?author=XZB-1248" title="Code">💻</a></td>
</tr>
</table>
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->
<!-- ALL-CONTRIBUTORS-LIST:END -->
This project follows the [all-contributors](https://github.com/all-contributors/all-contributors) specification. Contributions of any kind welcome!

View File

@ -1,18 +0,0 @@
FROM alpine:3.18 as builder
LABEL stage=go-builder
WORKDIR /app/
COPY ./ ./
RUN apk add --no-cache bash curl gcc git go musl-dev; \
bash build.sh release docker
FROM alpine:3.18
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY --from=builder /app/bin/alist ./
COPY entrypoint.sh /entrypoint.sh
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh
ENV PUID=0 PGID=0 UMASK=022
EXPOSE 5244 5245
CMD [ "/entrypoint.sh" ]

124
README.md Normal file → Executable file
View File

@ -1,127 +1,31 @@
<div align="center">
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
<p><em>🗂A file list program that supports multiple storage, powered by Gin and React.</em></p>
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/release/Xhofe/alist?style=flat-square" alt="latest version"></a>
<a href="https://github.com/Xhofe/alist/discussions"><img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936&style=flat-square" alt="discussions"></a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild"><img src="https://img.shields.io/github/workflow/status/Xhofe/alist/build?style=flat-square" alt="Build status"></a>
<a href="https://github.com/Xhofe/alist/releases"><img src="https://img.shields.io/github/downloads/Xhofe/alist/total?style=flat-square&color=%239F7AEA" alt="Downloads"></a>
<a href="https://github.com/Xhofe/alist/blob/v2/LICENSE"><img src="https://img.shields.io/github/license/Xhofe/alist?style=flat-square" alt="License"></a>
<a href="https://pay.xhofe.top">
<img src="https://img.shields.io/badge/%24-donate-ff69b4.svg?style=flat-square" alt="donate">
</a>
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
<img src="https://badges.crowdin.net/alist/localized.svg">
</a>
</div>
<div>
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
</a>
<a href="https://alist.nn.ci/guide/sponsor.html">
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
</a>
</div>
</div>
---
English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
[Contributors](./CONTRIBUTORS.md) | [Contributing](./CONTRIBUTING.md)
## Features
- [x] Multiple storage
- [x] Local storage
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
- [x] [123pan](https://www.123pan.com/)
- [x] FTP / SFTP
- [x] [PikPak](https://www.mypikpak.com/)
- [x] [S3](https://aws.amazon.com/s3/)
- [x] [Seafile](https://seafile.com/)
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
- [x] WebDav(Support OneDrive/SharePoint without API)
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
- [x] [Mediatrack](https://www.mediatrack.cn/)
- [x] [139yun](https://yun.139.com/) (Personal, Family)
- [x] [YandexDisk](https://disk.yandex.com/)
- [x] [BaiduNetdisk](http://pan.baidu.com/)
- [x] [Terabox](https://www.terabox.com/main)
- [x] [UC](https://drive.uc.cn)
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
- [x] SMB
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] Easy to deploy and out-of-the-box
- [x] File preview (PDF, markdown, code, plain text, ...)
- [x] Image preview in gallery mode
- [x] Video and audio preview, support lyrics and subtitles
- [x] Office documents preview (docx, pptx, xlsx, ...)
- [x] `README.md` preview rendering
- [x] File permalink copy and direct file download
- [x] Dark mode
- [x] I18n
- [x] Protected routes (password protection and authentication)
- [x] WebDav (see https://alist.nn.ci/guide/webdav.html for details)
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare workers proxy
- [x] File/Folder package download
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
- [x] Offline download
- [x] Copy files between two storage
- [x] Multi-thread downloading acceleration for single-thread download/stream
## Document
<https://alist.nn.ci/>
## Demo
<https://al.nn.ci>
> ### New version is under development. Checkout [v2](https://github.com/alist-org/alist/tree/v2) branch for latest version.
## Discussion
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature request only.**
## Sponsor
AList is an open-source software, if you happen to like this project and want me to keep going, please consider sponsoring me or providing a single donation! Thanks for all the love and support:
https://alist.nn.ci/guide/sponsor.html
### Special sponsors
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
## Special sponsors
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.la/)
- [KinhDown 百度云盘不限速下载永久免费已稳定运行3年非常可靠!](https://kinhdown.com/?Type=Tutorials)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## Contributors
Thanks goes to these wonderful people:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## License
The `AList` is open-source software licensed under the AGPL-3.0 license.
@ -135,4 +39,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
---
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@QQGroup](https://jq.qq.com/?_wv=1027&k=YJJj2Gwb)

View File

@ -1,136 +0,0 @@
<div align="center">
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
<img src="https://badges.crowdin.net/alist/localized.svg">
</a>
</div>
<div>
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
</a>
<a href="https://alist.nn.ci/zh/guide/sponsor.html">
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
</a>
</div>
</div>
---
[English](./README.md) | 中文 | [日本語](./README_ja.md) | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
## 功能
- [x] 多种存储
- [x] 本地存储
- [x] [阿里云盘](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint[国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
- [x] [GoogleDrive](https://drive.google.com/)
- [x] [123云盘](https://www.123pan.com/)
- [x] FTP / SFTP
- [x] [PikPak](https://www.mypikpak.com/)
- [x] [S3](https://aws.amazon.com/cn/s3/)
- [x] [Seafile](https://seafile.com/)
- [x] [又拍云对象存储](https://www.upyun.com/products/file-storage)
- [x] WebDav(支持无API的OneDrive/SharePoint)
- [x] Teambition[中国](https://www.teambition.com/ )[国际](https://us.teambition.com/ )
- [x] [分秒帧](https://www.mediatrack.cn/)
- [x] [和彩云](https://yun.139.com/) (个人云, 家庭云)
- [x] [Yandex.Disk](https://disk.yandex.com/)
- [x] [百度网盘](http://pan.baidu.com/)
- [x] [UC网盘](https://drive.uc.cn)
- [x] [夸克网盘](https://pan.quark.cn)
- [x] [迅雷网盘](https://pan.xunlei.com)
- [x] [蓝奏云](https://www.lanzou.com/)
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
- [x] [谷歌相册](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [一刻相册](https://photo.baidu.com/)
- [x] SMB
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] 部署方便,开箱即用
- [x] 文件预览PDF、markdown、代码、纯文本……
- [x] 画廊模式下的图像预览
- [x] 视频和音频预览,支持歌词和字幕
- [x] Office 文档预览docx、pptx、xlsx、...
- [x] `README.md` 预览渲染
- [x] 文件永久链接复制和直接文件下载
- [x] 黑暗模式
- [x] 国际化
- [x] 受保护的路由(密码保护和身份验证)
- [x] WebDav (具体见 https://alist.nn.ci/zh/guide/webdav.html)
- [x] [Docker 部署](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare workers 中转
- [x] 文件/文件夹打包下载
- [x] 网页上传(可以允许访客上传),删除,新建文件夹,重命名,移动,复制
- [x] 离线下载
- [x] 跨存储复制文件
- [x] 单线程下载/串流的多线程下载加速
## 文档
<https://alist.nn.ci/zh/>
## Demo
<https://al.nn.ci>
## 讨论
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) **issue仅针对错误报告和功能请求。**
## 赞助
AList 是一个开源软件如果你碰巧喜欢这个项目并希望我继续下去请考虑赞助我或提供一个单一的捐款感谢所有的爱和支持https://alist.nn.ci/zh/guide/sponsor.html
### 特别赞助
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## 贡献者
Thanks goes to these wonderful people:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## 许可
`AList` 是在 AGPL-3.0 许可下许可的开源软件。
## 免责声明
- 本程序为免费开源项目旨在分享网盘文件方便下载以及学习golang使用时请遵守相关法律法规请勿滥用
- 本程序通过调用官方sdk/接口实现,无破坏官方接口行为;
- 本程序仅做302重定向/流量转发,不拦截、存储、篡改任何用户数据;
- 在使用本程序之前你应了解并承担相应的风险包括但不限于账号被ban下载限速等与本程序无关
- 如有侵权,请通过[邮件](mailto:i@nn.ci)与我联系,会及时处理。
---
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)

View File

@ -1,138 +0,0 @@
<div align="center">
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
<img src="https://badges.crowdin.net/alist/localized.svg">
</a>
</div>
<div>
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
<img src="https://img.shields.io/docker/pulls/xhofe/alist?color=%2348BB78&logo=docker&label=pulls" alt="Downloads" />
</a>
<a href="https://alist.nn.ci/guide/sponsor.html">
<img src="https://img.shields.io/badge/%24-sponsor-F87171.svg" alt="sponsor" />
</a>
</div>
</div>
---
[English](./README.md) | [中文](./README_cn.md) | 日本語 | [Contributing](./CONTRIBUTING.md) | [CODE_OF_CONDUCT](./CODE_OF_CONDUCT.md)
## 特徴
- [x] マルチストレージ
- [x] ローカルストレージ
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
- [x] [123pan](https://www.123pan.com/)
- [x] FTP / SFTP
- [x] [PikPak](https://www.mypikpak.com/)
- [x] [S3](https://aws.amazon.com/s3/)
- [x] [Seafile](https://seafile.com/)
- [x] [UPYUN Storage Service](https://www.upyun.com/products/file-storage)
- [x] WebDav(Support OneDrive/SharePoint without API)
- [x] Teambition([China](https://www.teambition.com/ ),[International](https://us.teambition.com/ ))
- [x] [Mediatrack](https://www.mediatrack.cn/)
- [x] [139yun](https://yun.139.com/) (Personal, Family)
- [x] [YandexDisk](https://disk.yandex.com/)
- [x] [BaiduNetdisk](http://pan.baidu.com/)
- [x] [Terabox](https://www.terabox.com/main)
- [x] [UC](https://drive.uc.cn)
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
- [x] SMB
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] デプロイが簡単で、すぐに使える
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
- [x] ギャラリーモードでの画像プレビュー
- [x] ビデオとオーディオのプレビュー、歌詞と字幕のサポート
- [x] Office ドキュメントのプレビュー (docx, pptx, xlsx, ...)
- [x] `README.md` のプレビューレンダリング
- [x] ファイルのパーマリンクコピーと直接ダウンロード
- [x] ダークモード
- [x] 国際化
- [x] 保護されたルート (パスワード保護と認証)
- [x] WebDav (詳細は https://alist.nn.ci/guide/webdav.html を参照)
- [x] [Docker デプロイ](https://hub.docker.com/r/xhofe/alist)
- [x] Cloudflare ワーカープロキシ
- [x] ファイル/フォルダパッケージのダウンロード
- [x] ウェブアップロード(訪問者にアップロードを許可できる), 削除, mkdir, 名前変更, 移動, コピー
- [x] オフラインダウンロード
- [x] 二つのストレージ間でファイルをコピー
- [x] シングルスレッドのダウンロード/ストリーム向けのマルチスレッド ダウンロード アクセラレーション
## ドキュメント
<https://alist.nn.ci/>
## デモ
<https://al.nn.ci>
## ディスカッション
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
## スポンサー
AList はオープンソースのソフトウェアです。もしあなたがこのプロジェクトを気に入ってくださり、続けて欲しいと思ってくださるなら、ぜひスポンサーになってくださるか、1口でも寄付をしてくださるようご検討くださいすべての愛とサポートに感謝します:
https://alist.nn.ci/guide/sponsor.html
### スペシャルスポンサー
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## コントリビューター
これらの素晴らしい人々に感謝します:
[![Contributors](http://contributors.nn.ci/api?repo=alist-org/alist&repo=alist-org/alist-web&repo=alist-org/docs)](https://github.com/alist-org/alist/graphs/contributors)
## ライセンス
`AList` は AGPL-3.0 ライセンスの下でライセンスされたオープンソースソフトウェアです。
## 免責事項
- このプログラムはフリーでオープンソースのプロジェクトです。ネットワークディスク上でファイルを共有するように設計されており、golang のダウンロードや学習に便利です。利用にあたっては関連法規を遵守し、悪用しないようお願いします;
- このプログラムは、公式インターフェースの動作を破壊することなく、公式 sdk/インターフェースを呼び出すことで実装されています;
- このプログラムは、302リダイレクト/トラフィック転送のみを行い、いかなるユーザーデータも傍受、保存、改ざんしません;
- このプログラムを使用する前に、アカウントの禁止、ダウンロード速度の制限など、対応するリスクを理解し、負担する必要があります;
- もし侵害があれば、[メール](mailto:i@nn.ci)で私に連絡してください。
---
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)

209
build.sh
View File

@ -1,209 +0,0 @@
appName="alist"
builtAt="$(date +'%F %T %z')"
goVersion=$(go version | sed 's/go version //')
gitAuthor="Xhofe <i@nn.ci>"
gitCommit=$(git log --pretty=format:"%h" -1)
if [ "$1" = "dev" ]; then
version="dev"
webVersion="dev"
else
version=$(git describe --abbrev=0 --tags)
webVersion=$(wget -qO- -t1 -T2 "https://api.github.com/repos/alist-org/alist-web/releases/latest" | grep "tag_name" | head -n 1 | awk -F ":" '{print $2}' | sed 's/\"//g;s/,//g;s/ //g')
fi
echo "backend version: $version"
echo "frontend version: $webVersion"
ldflags="\
-w -s \
-X 'github.com/alist-org/alist/v3/internal/conf.BuiltAt=$builtAt' \
-X 'github.com/alist-org/alist/v3/internal/conf.GoVersion=$goVersion' \
-X 'github.com/alist-org/alist/v3/internal/conf.GitAuthor=$gitAuthor' \
-X 'github.com/alist-org/alist/v3/internal/conf.GitCommit=$gitCommit' \
-X 'github.com/alist-org/alist/v3/internal/conf.Version=$version' \
-X 'github.com/alist-org/alist/v3/internal/conf.WebVersion=$webVersion' \
"
FetchWebDev() {
curl -L https://codeload.github.com/alist-org/web-dist/tar.gz/refs/heads/dev -o web-dist-dev.tar.gz
tar -zxvf web-dist-dev.tar.gz
rm -rf public/dist
mv -f web-dist-dev/dist public
rm -rf web-dist-dev web-dist-dev.tar.gz
}
FetchWebRelease() {
curl -L https://github.com/alist-org/alist-web/releases/latest/download/dist.tar.gz -o dist.tar.gz
tar -zxvf dist.tar.gz
rm -rf public/dist
mv -f dist public
rm -rf dist.tar.gz
}
BuildWinArm64() {
echo building for windows-arm64
chmod +x ./wrapper/zcc-arm64
chmod +x ./wrapper/zcxx-arm64
export GOOS=windows
export GOARCH=arm64
export CC=$(pwd)/wrapper/zcc-arm64
export CXX=$(pwd)/wrapper/zcxx-arm64
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
}
BuildDev() {
rm -rf .git/
mkdir -p "dist"
muslflags="--extldflags '-static -fpic' $ldflags"
BASE="https://musl.nn.ci/"
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
curl -L -o "${i}.tgz" "${url}"
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
done
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64)
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
echo building for ${os_arch}
export GOOS=${os_arch%%-*}
export GOARCH=${os_arch##*-}
export CC=${cgo_cc}
export CGO_ENABLED=1
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
mv alist-* dist
cd dist
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
upx -9 ./alist-windows-amd64-upx.exe
find . -type f -print0 | xargs -0 md5sum >md5.txt
cat md5.txt
}
BuildDocker() {
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
}
BuildRelease() {
rm -rf .git/
mkdir -p "build"
BuildWinArm64 ./build/alist-windows-arm64.exe
xgo -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
# why? Because some target platforms seem to have issues with upx compression
upx -9 ./alist-linux-amd64
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
upx -9 ./alist-windows-amd64-upx.exe
mv alist-* build
}
BuildReleaseLinuxMusl() {
rm -rf .git/
mkdir -p "build"
muslflags="--extldflags '-static -fpic' $ldflags"
BASE="https://musl.nn.ci/"
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross mips-linux-musl-cross mips64-linux-musl-cross mips64el-linux-musl-cross mipsel-linux-musl-cross powerpc64le-linux-musl-cross s390x-linux-musl-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
curl -L -o "${i}.tgz" "${url}"
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
rm -f "${i}.tgz"
done
OS_ARCHES=(linux-musl-amd64 linux-musl-arm64 linux-musl-mips linux-musl-mips64 linux-musl-mips64le linux-musl-mipsle linux-musl-ppc64le linux-musl-s390x)
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc mips-linux-musl-gcc mips64-linux-musl-gcc mips64el-linux-musl-gcc mipsel-linux-musl-gcc powerpc64le-linux-musl-gcc s390x-linux-musl-gcc)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
echo building for ${os_arch}
export GOOS=${os_arch%%-*}
export GOARCH=${os_arch##*-}
export CC=${cgo_cc}
export CGO_ENABLED=1
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
}
BuildReleaseLinuxMuslArm() {
rm -rf .git/
mkdir -p "build"
muslflags="--extldflags '-static -fpic' $ldflags"
BASE="https://musl.nn.ci/"
# FILES=(arm-linux-musleabi-cross arm-linux-musleabihf-cross armeb-linux-musleabi-cross armeb-linux-musleabihf-cross armel-linux-musleabi-cross armel-linux-musleabihf-cross armv5l-linux-musleabi-cross armv5l-linux-musleabihf-cross armv6-linux-musleabi-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross armv7m-linux-musleabi-cross armv7r-linux-musleabihf-cross)
FILES=(arm-linux-musleabi-cross arm-linux-musleabihf-cross armel-linux-musleabi-cross armel-linux-musleabihf-cross armv5l-linux-musleabi-cross armv5l-linux-musleabihf-cross armv6-linux-musleabi-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross armv7m-linux-musleabi-cross armv7r-linux-musleabihf-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
curl -L -o "${i}.tgz" "${url}"
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
rm -f "${i}.tgz"
done
# OS_ARCHES=(linux-musleabi-arm linux-musleabihf-arm linux-musleabi-armeb linux-musleabihf-armeb linux-musleabi-armel linux-musleabihf-armel linux-musleabi-armv5l linux-musleabihf-armv5l linux-musleabi-armv6 linux-musleabihf-armv6 linux-musleabihf-armv7l linux-musleabi-armv7m linux-musleabihf-armv7r)
# CGO_ARGS=(arm-linux-musleabi-gcc arm-linux-musleabihf-gcc armeb-linux-musleabi-gcc armeb-linux-musleabihf-gcc armel-linux-musleabi-gcc armel-linux-musleabihf-gcc armv5l-linux-musleabi-gcc armv5l-linux-musleabihf-gcc armv6-linux-musleabi-gcc armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc armv7m-linux-musleabi-gcc armv7r-linux-musleabihf-gcc)
# GOARMS=('' '' '' '' '' '' '5' '5' '6' '6' '7' '7' '7')
OS_ARCHES=(linux-musleabi-arm linux-musleabihf-arm linux-musleabi-armel linux-musleabihf-armel linux-musleabi-armv5l linux-musleabihf-armv5l linux-musleabi-armv6 linux-musleabihf-armv6 linux-musleabihf-armv7l linux-musleabi-armv7m linux-musleabihf-armv7r)
CGO_ARGS=(arm-linux-musleabi-gcc arm-linux-musleabihf-gcc armel-linux-musleabi-gcc armel-linux-musleabihf-gcc armv5l-linux-musleabi-gcc armv5l-linux-musleabihf-gcc armv6-linux-musleabi-gcc armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc armv7m-linux-musleabi-gcc armv7r-linux-musleabihf-gcc)
GOARMS=('' '' '' '' '5' '5' '6' '6' '7' '7' '7')
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
arm=${GOARMS[$i]}
echo building for ${os_arch}
export GOOS=linux
export GOARCH=arm
export CC=${cgo_cc}
export CGO_ENABLED=1
export GOARM=${arm}
go build -o ./build/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
}
MakeRelease() {
cd build
mkdir compress
for i in $(find . -type f -name "$appName-linux-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-darwin-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-windows-*"); do
cp "$i" alist.exe
zip compress/$(echo $i | sed 's/\.[^.]*$//').zip alist.exe
rm -f alist.exe
done
cd compress
find . -type f -print0 | xargs -0 md5sum >"$1"
cat "$1"
cd ../..
}
if [ "$1" = "dev" ]; then
FetchWebDev
if [ "$2" = "docker" ]; then
BuildDocker
else
BuildDev
fi
elif [ "$1" = "release" ]; then
FetchWebRelease
if [ "$2" = "docker" ]; then
BuildDocker
elif [ "$2" = "linux_musl_arm" ]; then
BuildReleaseLinuxMuslArm
MakeRelease "md5-linux-musl-arm.txt"
elif [ "$2" = "linux_musl" ]; then
BuildReleaseLinuxMusl
MakeRelease "md5-linux-musl.txt"
else
BuildRelease
MakeRelease "md5.txt"
fi
else
echo -e "Parameter error"
fi

View File

@ -1,97 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/spf13/cobra"
)
// AdminCmd represents the password command
var AdminCmd = &cobra.Command{
Use: "admin",
Aliases: []string{"password"},
Short: "Show admin user's info and some operations about admin user's password",
Run: func(cmd *cobra.Command, args []string) {
Init()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
} else {
utils.Log.Infof("Admin user's username: %s", admin.Username)
utils.Log.Infof("The password can only be output at the first startup, and then stored as a hash value, which cannot be reversed")
utils.Log.Infof("You can reset the password with a random string by running [alist admin random]")
utils.Log.Infof("You can also set a new password by running [alist admin set NEW_PASSWORD]")
}
},
}
var RandomPasswordCmd = &cobra.Command{
Use: "random",
Short: "Reset admin user's password to a random string",
Run: func(cmd *cobra.Command, args []string) {
newPwd := random.String(8)
setAdminPassword(newPwd)
},
}
var SetPasswordCmd = &cobra.Command{
Use: "set",
Short: "Set admin user's password",
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
utils.Log.Errorf("Please enter the new password")
return
}
setAdminPassword(args[0])
},
}
var ShowTokenCmd = &cobra.Command{
Use: "token",
Short: "Show admin token",
Run: func(cmd *cobra.Command, args []string) {
Init()
token := setting.GetStr(conf.Token)
utils.Log.Infof("Admin token: %s", token)
},
}
func setAdminPassword(pwd string) {
Init()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed get admin user: %+v", err)
return
}
admin.SetPassword(pwd)
if err := op.UpdateUser(admin); err != nil {
utils.Log.Errorf("failed update admin user: %+v", err)
return
}
utils.Log.Infof("admin user has been updated:")
utils.Log.Infof("username: %s", admin.Username)
utils.Log.Infof("password: %s", pwd)
DelAdminCacheOnline()
}
func init() {
RootCmd.AddCommand(AdminCmd)
AdminCmd.AddCommand(RandomPasswordCmd)
AdminCmd.AddCommand(SetPasswordCmd)
AdminCmd.AddCommand(ShowTokenCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// passwordCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// passwordCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

59
cmd/alist.go Normal file
View File

@ -0,0 +1,59 @@
package main
import (
"flag"
"fmt"
bootstrap2 "github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/server"
"os"
"github.com/alist-org/alist/v3/cmd/args"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
)
func init() {
flag.StringVar(&args.Config, "conf", "data/config.json", "config file")
flag.BoolVar(&args.Debug, "debug", false, "start with debug mode")
flag.BoolVar(&args.Version, "version", false, "print version info")
flag.BoolVar(&args.Password, "password", false, "print current password")
flag.BoolVar(&args.NoPrefix, "no-prefix", false, "disable env prefix")
flag.BoolVar(&args.Dev, "dev", false, "start with dev mode")
flag.Parse()
}
func Init() {
if args.Version {
fmt.Printf("Built At: %s\nGo Version: %s\nAuthor: %s\nCommit ID: %s\nVersion: %s\nWebVersion: %s\n",
conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.Version, conf.WebVersion)
os.Exit(0)
}
bootstrap2.InitConfig()
bootstrap2.Log()
bootstrap2.InitDB()
data.InitData()
bootstrap2.InitAria2()
}
func main() {
Init()
if !args.Debug && !args.Dev {
gin.SetMode(gin.ReleaseMode)
}
r := gin.New()
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
server.Init(r)
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
log.Infof("start server @ %s", base)
var err error
if conf.Conf.Scheme.Https {
err = r.RunTLS(base, conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
} else {
err = r.Run(base)
}
if err != nil {
log.Errorf("failed to start: %s", err.Error())
}
}

10
cmd/args/config.go Normal file
View File

@ -0,0 +1,10 @@
package args
var (
Config string // config file
Debug bool
Version bool
Password bool
NoPrefix bool
Dev bool
)

View File

@ -1,45 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/spf13/cobra"
)
// Cancel2FACmd represents the delete2fa command
var Cancel2FACmd = &cobra.Command{
Use: "cancel2fa",
Short: "Delete 2FA of admin user",
Run: func(cmd *cobra.Command, args []string) {
Init()
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("failed to get admin user: %+v", err)
} else {
err := op.Cancel2FAByUser(admin)
if err != nil {
utils.Log.Errorf("failed to cancel 2FA: %+v", err)
} else {
utils.Log.Info("2FA canceled")
DelAdminCacheOnline()
}
}
},
}
func init() {
RootCmd.AddCommand(Cancel2FACmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// cancel2FACmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// cancel2FACmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,44 +0,0 @@
package cmd
import (
"os"
"path/filepath"
"strconv"
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
func Init() {
bootstrap.InitConfig()
bootstrap.Log()
bootstrap.InitDB()
data.InitData()
bootstrap.InitIndex()
}
var pid = -1
var pidFile string
func initDaemon() {
ex, err := os.Executable()
if err != nil {
log.Fatal(err)
}
exPath := filepath.Dir(ex)
_ = os.MkdirAll(filepath.Join(exPath, "daemon"), 0700)
pidFile = filepath.Join(exPath, "daemon/pid")
if utils.Exists(pidFile) {
bytes, err := os.ReadFile(pidFile)
if err != nil {
log.Fatal("failed to read pid file", err)
}
id, err := strconv.Atoi(string(bytes))
if err != nil {
log.Fatal("failed to parse pid data", err)
}
pid = id
}
}

View File

@ -1,10 +0,0 @@
package flags
var (
DataDir string
Debug bool
NoPrefix bool
Dev bool
ForceBinDir bool
LogStd bool
)

View File

@ -1,161 +0,0 @@
/*
Package cmd
Copyright © 2022 Noah Hsu<i@nn.ci>
*/
package cmd
import (
"fmt"
"io"
"os"
"reflect"
"strings"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap/data"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
type KV[V any] map[string]V
type Drivers KV[KV[interface{}]]
func firstUpper(s string) string {
if s == "" {
return ""
}
return strings.ToUpper(s[:1]) + s[1:]
}
func convert(s string) string {
ss := strings.Split(s, "_")
ans := strings.Join(ss, " ")
return firstUpper(ans)
}
func writeFile(name string, data interface{}) {
f, err := os.Open(fmt.Sprintf("../alist-web/src/lang/en/%s.json", name))
if err != nil {
log.Errorf("failed to open %s.json: %+v", name, err)
return
}
defer f.Close()
content, err := io.ReadAll(f)
if err != nil {
log.Errorf("failed to read %s.json: %+v", name, err)
return
}
oldData := make(map[string]interface{})
newData := make(map[string]interface{})
err = utils.Json.Unmarshal(content, &oldData)
if err != nil {
log.Errorf("failed to unmarshal %s.json: %+v", name, err)
return
}
content, err = utils.Json.Marshal(data)
if err != nil {
log.Errorf("failed to marshal json: %+v", err)
return
}
err = utils.Json.Unmarshal(content, &newData)
if err != nil {
log.Errorf("failed to unmarshal json: %+v", err)
return
}
if reflect.DeepEqual(oldData, newData) {
log.Infof("%s.json no changed, skip", name)
} else {
log.Infof("%s.json changed, update file", name)
//log.Infof("old: %+v\nnew:%+v", oldData, data)
utils.WriteJsonToFile(fmt.Sprintf("lang/%s.json", name), newData, true)
}
}
func generateDriversJson() {
drivers := make(Drivers)
drivers["drivers"] = make(KV[interface{}])
drivers["config"] = make(KV[interface{}])
driverInfoMap := op.GetDriverInfoMap()
for k, v := range driverInfoMap {
drivers["drivers"][k] = convert(k)
items := make(KV[interface{}])
config := map[string]string{}
if v.Config.Alert != "" {
alert := strings.SplitN(v.Config.Alert, "|", 2)
if len(alert) > 1 {
config["alert"] = alert[1]
}
}
drivers["config"][k] = config
for i := range v.Additional {
item := v.Additional[i]
items[item.Name] = convert(item.Name)
if item.Help != "" {
items[fmt.Sprintf("%s-tips", item.Name)] = item.Help
}
if item.Type == conf.TypeSelect && len(item.Options) > 0 {
options := make(KV[string])
_options := strings.Split(item.Options, ",")
for _, o := range _options {
options[o] = convert(o)
}
items[fmt.Sprintf("%ss", item.Name)] = options
}
}
drivers[k] = items
}
writeFile("drivers", drivers)
}
func generateSettingsJson() {
settings := data.InitialSettings()
settingsLang := make(KV[any])
for _, setting := range settings {
settingsLang[setting.Key] = convert(setting.Key)
if setting.Help != "" {
settingsLang[fmt.Sprintf("%s-tips", setting.Key)] = setting.Help
}
if setting.Type == conf.TypeSelect && len(setting.Options) > 0 {
options := make(KV[string])
_options := strings.Split(setting.Options, ",")
for _, o := range _options {
options[o] = convert(o)
}
settingsLang[fmt.Sprintf("%ss", setting.Key)] = options
}
}
writeFile("settings", settingsLang)
//utils.WriteJsonToFile("lang/settings.json", settingsLang)
}
// LangCmd represents the lang command
var LangCmd = &cobra.Command{
Use: "lang",
Short: "Generate language json file",
Run: func(cmd *cobra.Command, args []string) {
err := os.MkdirAll("lang", 0777)
if err != nil {
utils.Log.Fatal("failed create folder: %s", err.Error())
}
generateDriversJson()
generateSettingsJson()
},
}
func init() {
RootCmd.AddCommand(LangCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// langCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// langCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,32 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"github.com/spf13/cobra"
)
// RestartCmd represents the restart command
var RestartCmd = &cobra.Command{
Use: "restart",
Short: "Restart alist server by daemon/pid file",
Run: func(cmd *cobra.Command, args []string) {
stop()
start()
},
}
func init() {
RootCmd.AddCommand(RestartCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// restartCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// restartCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,33 +0,0 @@
package cmd
import (
"fmt"
"os"
"github.com/alist-org/alist/v3/cmd/flags"
"github.com/spf13/cobra"
)
var RootCmd = &cobra.Command{
Use: "alist",
Short: "A file list program that supports multiple storage.",
Long: `A file list program that supports multiple storage,
built with love by Xhofe and friends in Go/Solid.js.
Complete documentation is available at https://alist.nn.ci/`,
}
func Execute() {
if err := RootCmd.Execute(); err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
}
}
func init() {
RootCmd.PersistentFlags().StringVar(&flags.DataDir, "data", "data", "data folder")
RootCmd.PersistentFlags().BoolVar(&flags.Debug, "debug", false, "start with debug mode")
RootCmd.PersistentFlags().BoolVar(&flags.NoPrefix, "no-prefix", false, "disable env prefix")
RootCmd.PersistentFlags().BoolVar(&flags.Dev, "dev", false, "start with dev mode")
RootCmd.PersistentFlags().BoolVar(&flags.ForceBinDir, "force-bin-dir", false, "Force to use the directory where the binary file is located as data directory")
RootCmd.PersistentFlags().BoolVar(&flags.LogStd, "log-std", false, "Force to log to std")
}

View File

@ -1,160 +0,0 @@
package cmd
import (
"context"
"fmt"
"net"
"net/http"
"os"
"os/signal"
"strconv"
"sync"
"syscall"
"time"
"github.com/alist-org/alist/v3/cmd/flags"
_ "github.com/alist-org/alist/v3/drivers"
"github.com/alist-org/alist/v3/internal/bootstrap"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server"
"github.com/gin-gonic/gin"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
// ServerCmd represents the server command
var ServerCmd = &cobra.Command{
Use: "server",
Short: "Start the server at the specified address",
Long: `Start the server at the specified address
the address is defined in config file`,
Run: func(cmd *cobra.Command, args []string) {
Init()
if conf.Conf.DelayedStart != 0 {
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
}
bootstrap.InitAria2()
bootstrap.InitQbittorrent()
bootstrap.LoadStorages()
if !flags.Debug && !flags.Dev {
gin.SetMode(gin.ReleaseMode)
}
r := gin.New()
r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
server.Init(r)
var httpSrv, httpsSrv, unixSrv *http.Server
if conf.Conf.Scheme.HttpPort != -1 {
httpBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpPort)
utils.Log.Infof("start HTTP server @ %s", httpBase)
httpSrv = &http.Server{Addr: httpBase, Handler: r}
go func() {
err := httpSrv.ListenAndServe()
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start http: %s", err.Error())
}
}()
}
if conf.Conf.Scheme.HttpsPort != -1 {
httpsBase := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.Scheme.HttpsPort)
utils.Log.Infof("start HTTPS server @ %s", httpsBase)
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
go func() {
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start https: %s", err.Error())
}
}()
}
if conf.Conf.Scheme.UnixFile != "" {
utils.Log.Infof("start unix server @ %s", conf.Conf.Scheme.UnixFile)
unixSrv = &http.Server{Handler: r}
go func() {
listener, err := net.Listen("unix", conf.Conf.Scheme.UnixFile)
if err != nil {
utils.Log.Fatalf("failed to listen unix: %+v", err)
}
// set socket file permission
mode, err := strconv.ParseUint(conf.Conf.Scheme.UnixFilePerm, 8, 32)
if err != nil {
utils.Log.Errorf("failed to parse socket file permission: %+v", err)
} else {
err = os.Chmod(conf.Conf.Scheme.UnixFile, os.FileMode(mode))
if err != nil {
utils.Log.Errorf("failed to chmod socket file: %+v", err)
}
}
err = unixSrv.Serve(listener)
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start unix: %s", err.Error())
}
}()
}
// Wait for interrupt signal to gracefully shutdown the server with
// a timeout of 1 second.
quit := make(chan os.Signal, 1)
// kill (no param) default send syscanll.SIGTERM
// kill -2 is syscall.SIGINT
// kill -9 is syscall. SIGKILL but can"t be catch, so don't need add it
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
<-quit
utils.Log.Println("Shutdown server...")
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
defer cancel()
var wg sync.WaitGroup
if conf.Conf.Scheme.HttpPort != -1 {
wg.Add(1)
go func() {
defer wg.Done()
if err := httpSrv.Shutdown(ctx); err != nil {
utils.Log.Fatal("HTTP server shutdown err: ", err)
}
}()
}
if conf.Conf.Scheme.HttpsPort != -1 {
wg.Add(1)
go func() {
defer wg.Done()
if err := httpsSrv.Shutdown(ctx); err != nil {
utils.Log.Fatal("HTTPS server shutdown err: ", err)
}
}()
}
if conf.Conf.Scheme.UnixFile != "" {
wg.Add(1)
go func() {
defer wg.Done()
if err := unixSrv.Shutdown(ctx); err != nil {
utils.Log.Fatal("Unix server shutdown err: ", err)
}
}()
}
wg.Wait()
utils.Log.Println("Server exit")
},
}
func init() {
RootCmd.AddCommand(ServerCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// serverCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// serverCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
// OutAlistInit 暴露用于外部启动server的函数
func OutAlistInit() {
var (
cmd *cobra.Command
args []string
)
ServerCmd.Run(cmd, args)
}

View File

@ -1,71 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"os"
"os/exec"
"path/filepath"
"strconv"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
// StartCmd represents the start command
var StartCmd = &cobra.Command{
Use: "start",
Short: "Silent start alist server with `--force-bin-dir`",
Run: func(cmd *cobra.Command, args []string) {
start()
},
}
func start() {
initDaemon()
if pid != -1 {
_, err := os.FindProcess(pid)
if err == nil {
log.Info("alist already started, pid ", pid)
return
}
}
args := os.Args
args[1] = "server"
args = append(args, "--force-bin-dir")
cmd := &exec.Cmd{
Path: args[0],
Args: args,
Env: os.Environ(),
}
stdout, err := os.OpenFile(filepath.Join(filepath.Dir(pidFile), "start.log"), os.O_WRONLY|os.O_APPEND|os.O_CREATE, 0666)
if err != nil {
log.Fatal(os.Getpid(), ": failed to open start log file:", err)
}
cmd.Stderr = stdout
cmd.Stdout = stdout
err = cmd.Start()
if err != nil {
log.Fatal("failed to start children process: ", err)
}
log.Infof("success start pid: %d", cmd.Process.Pid)
err = os.WriteFile(pidFile, []byte(strconv.Itoa(cmd.Process.Pid)), 0666)
if err != nil {
log.Warn("failed to record pid, you may not be able to stop the program with `./alist stop`")
}
}
func init() {
RootCmd.AddCommand(StartCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// startCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// startCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,58 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"os"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
// StopCmd represents the stop command
var StopCmd = &cobra.Command{
Use: "stop",
Short: "Stop alist server by daemon/pid file",
Run: func(cmd *cobra.Command, args []string) {
stop()
},
}
func stop() {
initDaemon()
if pid == -1 {
log.Info("Seems not have been started. Try use `alist start` to start server.")
return
}
process, err := os.FindProcess(pid)
if err != nil {
log.Errorf("failed to find process by pid: %d, reason: %v", pid, process)
return
}
err = process.Kill()
if err != nil {
log.Errorf("failed to kill process %d: %v", pid, err)
} else {
log.Info("killed process: ", pid)
}
err = os.Remove(pidFile)
if err != nil {
log.Errorf("failed to remove pid file")
}
pid = -1
}
func init() {
RootCmd.AddCommand(StopCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// stopCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// stopCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,161 +0,0 @@
/*
Copyright © 2023 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"os"
"strconv"
"github.com/alist-org/alist/v3/internal/db"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/charmbracelet/bubbles/table"
tea "github.com/charmbracelet/bubbletea"
"github.com/charmbracelet/lipgloss"
"github.com/spf13/cobra"
)
// storageCmd represents the storage command
var storageCmd = &cobra.Command{
Use: "storage",
Short: "Manage storage",
}
var disableStorageCmd = &cobra.Command{
Use: "disable",
Short: "Disable a storage",
Run: func(cmd *cobra.Command, args []string) {
if len(args) < 1 {
utils.Log.Errorf("mount path is required")
return
}
mountPath := args[0]
Init()
storage, err := db.GetStorageByMountPath(mountPath)
if err != nil {
utils.Log.Errorf("failed to query storage: %+v", err)
} else {
storage.Disabled = true
err = db.UpdateStorage(storage)
if err != nil {
utils.Log.Errorf("failed to update storage: %+v", err)
} else {
utils.Log.Infof("Storage with mount path [%s] have been disabled", mountPath)
}
}
},
}
var baseStyle = lipgloss.NewStyle().
BorderStyle(lipgloss.NormalBorder()).
BorderForeground(lipgloss.Color("240"))
type model struct {
table table.Model
}
func (m model) Init() tea.Cmd { return nil }
func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) {
var cmd tea.Cmd
switch msg := msg.(type) {
case tea.KeyMsg:
switch msg.String() {
case "esc":
if m.table.Focused() {
m.table.Blur()
} else {
m.table.Focus()
}
case "q", "ctrl+c":
return m, tea.Quit
//case "enter":
// return m, tea.Batch(
// tea.Printf("Let's go to %s!", m.table.SelectedRow()[1]),
// )
}
}
m.table, cmd = m.table.Update(msg)
return m, cmd
}
func (m model) View() string {
return baseStyle.Render(m.table.View()) + "\n"
}
var storageTableHeight int
var listStorageCmd = &cobra.Command{
Use: "list",
Short: "List all storages",
Run: func(cmd *cobra.Command, args []string) {
Init()
storages, _, err := db.GetStorages(1, -1)
if err != nil {
utils.Log.Errorf("failed to query storages: %+v", err)
} else {
utils.Log.Infof("Found %d storages", len(storages))
columns := []table.Column{
{Title: "ID", Width: 4},
{Title: "Driver", Width: 16},
{Title: "Mount Path", Width: 30},
{Title: "Enabled", Width: 7},
}
var rows []table.Row
for i := range storages {
storage := storages[i]
enabled := "true"
if storage.Disabled {
enabled = "false"
}
rows = append(rows, table.Row{
strconv.Itoa(int(storage.ID)),
storage.Driver,
storage.MountPath,
enabled,
})
}
t := table.New(
table.WithColumns(columns),
table.WithRows(rows),
table.WithFocused(true),
table.WithHeight(storageTableHeight),
)
s := table.DefaultStyles()
s.Header = s.Header.
BorderStyle(lipgloss.NormalBorder()).
BorderForeground(lipgloss.Color("240")).
BorderBottom(true).
Bold(false)
s.Selected = s.Selected.
Foreground(lipgloss.Color("229")).
Background(lipgloss.Color("57")).
Bold(false)
t.SetStyles(s)
m := model{t}
if _, err := tea.NewProgram(m).Run(); err != nil {
utils.Log.Errorf("failed to run program: %+v", err)
os.Exit(1)
}
}
},
}
func init() {
RootCmd.AddCommand(storageCmd)
storageCmd.AddCommand(disableStorageCmd)
storageCmd.AddCommand(listStorageCmd)
storageCmd.PersistentFlags().IntVarP(&storageTableHeight, "height", "H", 10, "Table height")
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// storageCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// storageCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,52 +0,0 @@
package cmd
import (
"crypto/tls"
"fmt"
"time"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
func DelAdminCacheOnline() {
admin, err := op.GetAdmin()
if err != nil {
utils.Log.Errorf("[del_admin_cache] get admin error: %+v", err)
return
}
DelUserCacheOnline(admin.Username)
}
func DelUserCacheOnline(username string) {
client := resty.New().SetTimeout(1 * time.Second).SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
token := setting.GetStr(conf.Token)
port := conf.Conf.Scheme.HttpPort
u := fmt.Sprintf("http://localhost:%d/api/admin/user/del_cache", port)
if port == -1 {
if conf.Conf.Scheme.HttpsPort == -1 {
utils.Log.Warnf("[del_user_cache] no open port")
return
}
u = fmt.Sprintf("https://localhost:%d/api/admin/user/del_cache", conf.Conf.Scheme.HttpsPort)
}
res, err := client.R().SetHeader("Authorization", token).SetQueryParam("username", username).Post(u)
if err != nil {
utils.Log.Warnf("[del_user_cache_online] failed: %+v", err)
return
}
if res.StatusCode() != 200 {
utils.Log.Warnf("[del_user_cache_online] failed: %+v", res.String())
return
}
code := utils.Json.Get(res.Body(), "code").ToInt()
msg := utils.Json.Get(res.Body(), "message").ToString()
if code != 200 {
utils.Log.Errorf("[del_user_cache_online] error: %s", msg)
return
}
utils.Log.Debugf("[del_user_cache_online] del user [%s] cache success", username)
}

View File

@ -1,43 +0,0 @@
/*
Copyright © 2022 NAME HERE <EMAIL ADDRESS>
*/
package cmd
import (
"fmt"
"os"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/spf13/cobra"
)
// VersionCmd represents the version command
var VersionCmd = &cobra.Command{
Use: "version",
Short: "Show current version of AList",
Run: func(cmd *cobra.Command, args []string) {
fmt.Printf(`Built At: %s
Go Version: %s
Author: %s
Commit ID: %s
Version: %s
WebVersion: %s
`,
conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.Version, conf.WebVersion)
os.Exit(0)
},
}
func init() {
RootCmd.AddCommand(VersionCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// versionCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// versionCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}

View File

@ -1,16 +0,0 @@
version: '3.3'
services:
alist:
restart: always
volumes:
- '/etc/alist:/opt/alist/data'
ports:
- '5244:5244'
- '5245:5245'
environment:
- PUID=0
- PGID=0
- UMASK=022
- TZ=UTC
container_name: alist
image: 'xhofe/alist:latest'

View File

@ -1,149 +0,0 @@
package _115
import (
"context"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
"strings"
)
type Pan115 struct {
model.Storage
Addition
client *driver115.Pan115Client
}
func (d *Pan115) Config() driver.Config {
return config
}
func (d *Pan115) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan115) Init(ctx context.Context) error {
return d.login()
}
func (d *Pan115) Drop(ctx context.Context) error {
return nil
}
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
return nil, err
}
return utils.SliceConvert(files, func(src FileObj) (model.Obj, error) {
return &src, nil
})
}
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
downloadInfo, err := d.client.
SetUserAgent(driver115.UA115Browser).
Download(file.(*FileObj).PickCode)
// recover for upload
d.client.SetUserAgent(driver115.UA115Desktop)
if err != nil {
return nil, err
}
link := &model.Link{
URL: downloadInfo.Url.Url,
Header: downloadInfo.Header,
}
return link, nil
}
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
return err
}
return nil
}
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return d.client.Move(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return d.client.Rename(srcObj.GetID(), newName)
}
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
}
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
return d.client.Delete(obj.GetID())
}
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
var (
fastInfo *driver115.UploadInitResp
dirID = dstDir.GetID()
)
if ok, err := d.client.UploadAvailable(); err != nil || !ok {
return err
}
if stream.GetSize() > d.client.UploadMetaInfo.SizeLimit {
return driver115.ErrUploadTooLarge
}
//if digest, err = d.client.GetDigestResult(stream); err != nil {
// return err
//}
const PreHashSize int64 = 128 * utils.KB
hashSize := PreHashSize
if stream.GetSize() < PreHashSize {
hashSize = stream.GetSize()
}
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: hashSize})
if err != nil {
return err
}
preHash, err := utils.HashReader(utils.SHA1, reader)
if err != nil {
return err
}
preHash = strings.ToUpper(preHash)
fullHash := stream.GetHash().GetHash(utils.SHA1)
if len(fullHash) <= 0 {
tmpF, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
fullHash, err = utils.HashFile(utils.SHA1, tmpF)
if err != nil {
return err
}
}
fullHash = strings.ToUpper(fullHash)
// rapid-upload
// note that 115 add timeout for rapid-upload,
// and "sig invalid" err is thrown even when the hash is correct after timeout.
if fastInfo, err = d.rapidUpload(stream.GetSize(), stream.GetName(), dirID, preHash, fullHash, stream); err != nil {
return err
}
if matched, err := fastInfo.Ok(); err != nil {
return err
} else if matched {
return nil
}
// 闪传失败,上传
if stream.GetSize() <= utils.KB { // 文件大小小于1KB改用普通模式上传
return d.client.UploadByOSS(&fastInfo.UploadOSSParams, stream, dirID)
}
// 分片上传
return d.UploadByMultipart(&fastInfo.UploadOSSParams, stream.GetSize(), stream, dirID)
}
var _ driver.Driver = (*Pan115)(nil)

View File

@ -1,27 +0,0 @@
package _115
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
driver.RootID
}
var config = driver.Config{
Name: "115 Cloud",
DefaultRoot: "0",
OnlyProxy: true,
OnlyLocal: true,
NoOverwriteUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan115{}
})
}

View File

@ -1,22 +0,0 @@
package _115
import (
"github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"time"
)
var _ model.Obj = (*FileObj)(nil)
type FileObj struct {
driver.File
}
func (f *FileObj) CreateTime() time.Time {
return f.File.CreateTime
}
func (f *FileObj) GetHash() utils.HashInfo {
return utils.NewHashInfo(utils.SHA1, f.Sha1)
}

View File

@ -1,422 +0,0 @@
package _115
import (
"bytes"
"crypto/tls"
"encoding/json"
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
"github.com/orzogc/fake115uploader/cipher"
"io"
"net/url"
"path/filepath"
"strconv"
"strings"
"sync"
"time"
"github.com/SheltonZhu/115driver/pkg/driver"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/pkg/errors"
)
var UserAgent = driver.UA115Desktop
func (d *Pan115) login() error {
var err error
opts := []driver.Option{
driver.UA(UserAgent),
func(c *driver.Pan115Client) {
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
},
}
d.client = driver.New(opts...)
cr := &driver.Credential{}
if d.Addition.QRCodeToken != "" {
s := &driver.QRCodeSession{
UID: d.Addition.QRCodeToken,
}
if cr, err = d.client.QRCodeLogin(s); err != nil {
return errors.Wrap(err, "failed to login by qrcode")
}
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
d.Addition.QRCodeToken = ""
} else if d.Addition.Cookie != "" {
if err = cr.FromCookie(d.Addition.Cookie); err != nil {
return errors.Wrap(err, "failed to login by cookies")
}
d.client.ImportCredential(cr)
} else {
return errors.New("missing cookie or qrcode account")
}
return d.client.LoginCheck()
}
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
res := make([]FileObj, 0)
if d.PageSize <= 0 {
d.PageSize = driver.FileListLimit
}
files, err := d.client.ListWithLimit(fileId, d.PageSize)
if err != nil {
return nil, err
}
for _, file := range *files {
res = append(res, FileObj{file})
}
return res, nil
}
const (
appVer = "2.0.3.6"
)
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
var (
ecdhCipher *cipher.EcdhCipher
encrypted []byte
decrypted []byte
encodedToken string
err error
target = "U_1_" + dirID
bodyBytes []byte
result = driver115.UploadInitResp{}
fileSizeStr = strconv.FormatInt(fileSize, 10)
)
if ecdhCipher, err = cipher.NewEcdhCipher(); err != nil {
return nil, err
}
userID := strconv.FormatInt(d.client.UserID, 10)
form := url.Values{}
form.Set("appid", "0")
form.Set("appversion", appVer)
form.Set("userid", userID)
form.Set("filename", fileName)
form.Set("filesize", fileSizeStr)
form.Set("fileid", fileID)
form.Set("target", target)
form.Set("sig", d.client.GenerateSignature(fileID, target))
signKey, signVal := "", ""
for retry := true; retry; {
t := driver115.Now()
if encodedToken, err = ecdhCipher.EncodeToken(t.ToInt64()); err != nil {
return nil, err
}
params := map[string]string{
"k_ec": encodedToken,
}
form.Set("t", t.String())
form.Set("token", d.client.GenerateToken(fileID, preID, t.String(), fileSizeStr, signKey, signVal))
if signKey != "" && signVal != "" {
form.Set("sign_key", signKey)
form.Set("sign_val", signVal)
}
if encrypted, err = ecdhCipher.Encrypt([]byte(form.Encode())); err != nil {
return nil, err
}
req := d.client.NewRequest().
SetQueryParams(params).
SetBody(encrypted).
SetHeaderVerbatim("Content-Type", "application/x-www-form-urlencoded").
SetDoNotParseResponse(true)
resp, err := req.Post(driver115.ApiUploadInit)
if err != nil {
return nil, err
}
data := resp.RawBody()
defer data.Close()
if bodyBytes, err = io.ReadAll(data); err != nil {
return nil, err
}
if decrypted, err = ecdhCipher.Decrypt(bodyBytes); err != nil {
return nil, err
}
if err = driver115.CheckErr(json.Unmarshal(decrypted, &result), &result, resp); err != nil {
return nil, err
}
if result.Status == 7 {
// Update signKey & signVal
signKey = result.SignKey
signVal, err = UploadDigestRange(stream, result.SignCheck)
if err != nil {
return nil, err
}
} else {
retry = false
}
result.SHA1 = fileID
}
return &result, nil
}
func UploadDigestRange(stream model.FileStreamer, rangeSpec string) (result string, err error) {
var start, end int64
if _, err = fmt.Sscanf(rangeSpec, "%d-%d", &start, &end); err != nil {
return
}
length := end - start + 1
reader, err := stream.RangeRead(http_range.Range{Start: start, Length: length})
hashStr, err := utils.HashReader(utils.SHA1, reader)
if err != nil {
return "", err
}
result = strings.ToUpper(hashStr)
return
}
// UploadByMultipart upload by mutipart blocks
func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize int64, stream model.FileStreamer, dirID string, opts ...driver115.UploadMultipartOption) error {
var (
chunks []oss.FileChunk
parts []oss.UploadPart
imur oss.InitiateMultipartUploadResult
ossClient *oss.Client
bucket *oss.Bucket
ossToken *driver115.UploadOSSTokenResp
err error
)
tmpF, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
options := driver115.DefalutUploadMultipartOptions()
if len(opts) > 0 {
for _, f := range opts {
f(options)
}
}
if ossToken, err = d.client.GetOSSToken(); err != nil {
return err
}
if ossClient, err = oss.New(driver115.OSSEndpoint, ossToken.AccessKeyID, ossToken.AccessKeySecret); err != nil {
return err
}
if bucket, err = ossClient.Bucket(params.Bucket); err != nil {
return err
}
// ossToken一小时后就会失效所以每50分钟重新获取一次
ticker := time.NewTicker(options.TokenRefreshTime)
defer ticker.Stop()
// 设置超时
timeout := time.NewTimer(options.Timeout)
if chunks, err = SplitFile(fileSize); err != nil {
return err
}
if imur, err = bucket.InitiateMultipartUpload(params.Object,
oss.SetHeader(driver115.OssSecurityTokenHeaderName, ossToken.SecurityToken),
oss.UserAgentHeader(driver115.OSSUserAgent),
); err != nil {
return err
}
wg := sync.WaitGroup{}
wg.Add(len(chunks))
chunksCh := make(chan oss.FileChunk)
errCh := make(chan error)
UploadedPartsCh := make(chan oss.UploadPart)
quit := make(chan struct{})
// producer
go chunksProducer(chunksCh, chunks)
go func() {
wg.Wait()
quit <- struct{}{}
}()
// consumers
for i := 0; i < options.ThreadsNum; i++ {
go func(threadId int) {
defer func() {
if r := recover(); r != nil {
errCh <- fmt.Errorf("Recovered in %v", r)
}
}()
for chunk := range chunksCh {
var part oss.UploadPart // 出现错误就继续尝试共尝试3次
for retry := 0; retry < 3; retry++ {
select {
case <-ticker.C:
if ossToken, err = d.client.GetOSSToken(); err != nil { // 到时重新获取ossToken
errCh <- errors.Wrap(err, "刷新token时出现错误")
}
default:
}
buf := make([]byte, chunk.Size)
if _, err = tmpF.ReadAt(buf, chunk.Offset); err != nil && !errors.Is(err, io.EOF) {
continue
}
b := bytes.NewBuffer(buf)
if part, err = bucket.UploadPart(imur, b, chunk.Size, chunk.Number, driver115.OssOption(params, ossToken)...); err == nil {
break
}
}
if err != nil {
errCh <- errors.Wrap(err, fmt.Sprintf("上传 %s 的第%d个分片时出现错误%v", stream.GetName(), chunk.Number, err))
}
UploadedPartsCh <- part
}
}(i)
}
go func() {
for part := range UploadedPartsCh {
parts = append(parts, part)
wg.Done()
}
}()
LOOP:
for {
select {
case <-ticker.C:
// 到时重新获取ossToken
if ossToken, err = d.client.GetOSSToken(); err != nil {
return err
}
case <-quit:
break LOOP
case <-errCh:
return err
case <-timeout.C:
return fmt.Errorf("time out")
}
}
// EOF错误是xml的Unmarshal导致的响应其实是json格式所以实际上上传是成功的
if _, err = bucket.CompleteMultipartUpload(imur, parts, driver115.OssOption(params, ossToken)...); err != nil && !errors.Is(err, io.EOF) {
// 当文件名含有 &< 这两个字符之一时响应的xml解析会出现错误实际上上传是成功的
if filename := filepath.Base(stream.GetName()); !strings.ContainsAny(filename, "&<") {
return err
}
}
return d.checkUploadStatus(dirID, params.SHA1)
}
func chunksProducer(ch chan oss.FileChunk, chunks []oss.FileChunk) {
for _, chunk := range chunks {
ch <- chunk
}
}
func (d *Pan115) checkUploadStatus(dirID, sha1 string) error {
// 验证上传是否成功
req := d.client.NewRequest().ForceContentType("application/json;charset=UTF-8")
opts := []driver115.GetFileOptions{
driver115.WithOrder(driver115.FileOrderByTime),
driver115.WithShowDirEnable(false),
driver115.WithAsc(false),
driver115.WithLimit(500),
}
fResp, err := driver115.GetFiles(req, dirID, opts...)
if err != nil {
return err
}
for _, fileInfo := range fResp.Files {
if fileInfo.Sha1 == sha1 {
return nil
}
}
return driver115.ErrUploadFailed
}
func SplitFile(fileSize int64) (chunks []oss.FileChunk, err error) {
for i := int64(1); i < 10; i++ {
if fileSize < i*utils.GB { // 文件大小小于iGB时分为i*1000片
if chunks, err = SplitFileByPartNum(fileSize, int(i*1000)); err != nil {
return
}
break
}
}
if fileSize > 9*utils.GB { // 文件大小大于9GB时分为10000片
if chunks, err = SplitFileByPartNum(fileSize, 10000); err != nil {
return
}
}
// 单个分片大小不能小于100KB
if chunks[0].Size < 100*utils.KB {
if chunks, err = SplitFileByPartSize(fileSize, 100*utils.KB); err != nil {
return
}
}
return
}
// SplitFileByPartNum splits big file into parts by the num of parts.
// Split the file with specified parts count, returns the split result when error is nil.
func SplitFileByPartNum(fileSize int64, chunkNum int) ([]oss.FileChunk, error) {
if chunkNum <= 0 || chunkNum > 10000 {
return nil, errors.New("chunkNum invalid")
}
if int64(chunkNum) > fileSize {
return nil, errors.New("oss: chunkNum invalid")
}
var chunks []oss.FileChunk
var chunk = oss.FileChunk{}
var chunkN = (int64)(chunkNum)
for i := int64(0); i < chunkN; i++ {
chunk.Number = int(i + 1)
chunk.Offset = i * (fileSize / chunkN)
if i == chunkN-1 {
chunk.Size = fileSize/chunkN + fileSize%chunkN
} else {
chunk.Size = fileSize / chunkN
}
chunks = append(chunks, chunk)
}
return chunks, nil
}
// SplitFileByPartSize splits big file into parts by the size of parts.
// Splits the file by the part size. Returns the FileChunk when error is nil.
func SplitFileByPartSize(fileSize int64, chunkSize int64) ([]oss.FileChunk, error) {
if chunkSize <= 0 {
return nil, errors.New("chunkSize invalid")
}
var chunkN = fileSize / chunkSize
if chunkN >= 10000 {
return nil, errors.New("Too many parts, please increase part size")
}
var chunks []oss.FileChunk
var chunk = oss.FileChunk{}
for i := int64(0); i < chunkN; i++ {
chunk.Number = int(i + 1)
chunk.Offset = i * chunkSize
chunk.Size = chunkSize
chunks = append(chunks, chunk)
}
if fileSize%chunkSize > 0 {
chunk.Number = len(chunks) + 1
chunk.Offset = int64(len(chunks)) * chunkSize
chunk.Size = fileSize % chunkSize
chunks = append(chunks, chunk)
}
return chunks, nil
}

View File

@ -1,253 +0,0 @@
package _123
import (
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"fmt"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"net/url"
)
type Pan123 struct {
model.Storage
Addition
}
func (d *Pan123) Config() driver.Config {
return config
}
func (d *Pan123) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan123) Init(ctx context.Context) error {
_, err := d.request(UserInfo, http.MethodGet, nil, nil)
return err
}
func (d *Pan123) Drop(ctx context.Context) error {
_, _ = d.request(Logout, http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{})
}, nil)
return nil
}
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return src, nil
})
}
func (d *Pan123) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if f, ok := file.(File); ok {
//var resp DownResp
var headers map[string]string
if !utils.IsLocalIPAddr(args.IP) {
headers = map[string]string{
//"X-Real-IP": "1.1.1.1",
"X-Forwarded-For": args.IP,
}
}
data := base.Json{
"driveId": 0,
"etag": f.Etag,
"fileId": f.FileId,
"fileName": f.FileName,
"s3keyFlag": f.S3KeyFlag,
"size": f.Size,
"type": f.Type,
}
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetHeaders(headers)
}, nil)
if err != nil {
return nil, err
}
downloadUrl := utils.Json.Get(resp, "data", "DownloadUrl").ToString()
u, err := url.Parse(downloadUrl)
if err != nil {
return nil, err
}
nu := u.Query().Get("params")
if nu != "" {
du, _ := base64.StdEncoding.DecodeString(nu)
u, err = url.Parse(string(du))
if err != nil {
return nil, err
}
}
u_ := u.String()
log.Debug("download url: ", u_)
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
if err != nil {
return nil, err
}
log.Debug(res.String())
link := model.Link{
URL: u_,
}
log.Debugln("res code: ", res.StatusCode())
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
} else if res.StatusCode() < 300 {
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
}
link.Header = http.Header{
"Referer": []string{"https://www.123pan.com/"},
}
return &link, nil
} else {
return nil, fmt.Errorf("can't convert obj")
}
}
func (d *Pan123) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"driveId": 0,
"etag": "",
"fileName": dirName,
"parentFileId": parentDir.GetID(),
"size": 0,
"type": 1,
}
_, err := d.request(Mkdir, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"fileIdList": []base.Json{{"FileId": srcObj.GetID()}},
"parentFileId": dstDir.GetID(),
}
_, err := d.request(Move, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"driveId": 0,
"fileId": srcObj.GetID(),
"fileName": newName,
}
_, err := d.request(Rename, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *Pan123) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan123) Remove(ctx context.Context, obj model.Obj) error {
if f, ok := obj.(File); ok {
data := base.Json{
"driveId": 0,
"operation": true,
"fileTrashInfoList": []File{f},
}
_, err := d.request(Trash, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
} else {
return fmt.Errorf("can't convert obj")
}
}
func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
// const DEFAULT int64 = 10485760
h := md5.New()
// need to calculate md5 of the full content
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
}()
if _, err = io.Copy(h, tempFile); err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return err
}
etag := hex.EncodeToString(h.Sum(nil))
data := base.Json{
"driveId": 0,
"duplicate": 2, // 2->覆盖 1->重命名 0->默认
"etag": etag,
"fileName": stream.GetName(),
"parentFileId": dstDir.GetID(),
"size": stream.GetSize(),
"type": 0,
}
var resp UploadResp
res, err := d.request(UploadRequest, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetContext(ctx)
}, &resp)
if err != nil {
return err
}
log.Debugln("upload request res: ", string(res))
if resp.Data.Reuse || resp.Data.Key == "" {
return nil
}
if resp.Data.AccessKeyId == "" || resp.Data.SecretAccessKey == "" || resp.Data.SessionToken == "" {
err = d.newUpload(ctx, &resp, stream, tempFile, up)
return err
} else {
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(resp.Data.AccessKeyId, resp.Data.SecretAccessKey, resp.Data.SessionToken),
Region: aws.String("123pan"),
Endpoint: aws.String(resp.Data.EndPoint),
S3ForcePathStyle: aws.Bool(true),
}
s, err := session.NewSession(cfg)
if err != nil {
return err
}
uploader := s3manager.NewUploader(s)
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Key,
Body: tempFile,
}
_, err = uploader.UploadWithContext(ctx, input)
}
if err != nil {
return err
}
_, err = d.request(UploadComplete, http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileId": resp.Data.FileId,
}).SetContext(ctx)
}, nil)
return err
}
var _ driver.Driver = (*Pan123)(nil)

View File

@ -1,26 +0,0 @@
package _123
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
AccessToken string
}
var config = driver.Config{
Name: "123Pan",
DefaultRoot: "0",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan123{}
})
}

View File

@ -1,122 +0,0 @@
package _123
import (
"github.com/alist-org/alist/v3/pkg/utils"
"net/url"
"path"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type File struct {
FileName string `json:"FileName"`
Size int64 `json:"Size"`
UpdateAt time.Time `json:"UpdateAt"`
FileId int64 `json:"FileId"`
Type int `json:"Type"`
Etag string `json:"Etag"`
S3KeyFlag string `json:"S3KeyFlag"`
DownloadUrl string `json:"DownloadUrl"`
}
func (f File) CreateTime() time.Time {
return f.UpdateAt
}
func (f File) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (f File) GetPath() string {
return ""
}
func (f File) GetSize() int64 {
return f.Size
}
func (f File) GetName() string {
return f.FileName
}
func (f File) ModTime() time.Time {
return f.UpdateAt
}
func (f File) IsDir() bool {
return f.Type == 1
}
func (f File) GetID() string {
return strconv.FormatInt(f.FileId, 10)
}
func (f File) Thumb() string {
if f.DownloadUrl == "" {
return ""
}
du, err := url.Parse(f.DownloadUrl)
if err != nil {
return ""
}
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
query := du.Query()
query.Set("w", "70")
query.Set("h", "70")
if !query.Has("type") {
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
}
if !query.Has("trade_key") {
query.Set("trade_key", "123pan-thumbnail")
}
du.RawQuery = query.Encode()
return du.String()
}
var _ model.Obj = (*File)(nil)
var _ model.Thumb = (*File)(nil)
//func (f File) Thumb() string {
//
//}
//var _ model.Thumb = (*File)(nil)
type Files struct {
//BaseResp
Data struct {
InfoList []File `json:"InfoList"`
Next string `json:"Next"`
} `json:"data"`
}
//type DownResp struct {
// //BaseResp
// Data struct {
// DownloadUrl string `json:"DownloadUrl"`
// } `json:"data"`
//}
type UploadResp struct {
//BaseResp
Data struct {
AccessKeyId string `json:"AccessKeyId"`
Bucket string `json:"Bucket"`
Key string `json:"Key"`
SecretAccessKey string `json:"SecretAccessKey"`
SessionToken string `json:"SessionToken"`
FileId int64 `json:"FileId"`
Reuse bool `json:"Reuse"`
EndPoint string `json:"EndPoint"`
StorageNode string `json:"StorageNode"`
UploadId string `json:"UploadId"`
} `json:"data"`
}
type S3PreSignedURLs struct {
Data struct {
PreSignedUrls map[string]string `json:"presignedUrls"`
} `json:"data"`
}

View File

@ -1,155 +0,0 @@
package _123
import (
"context"
"fmt"
"io"
"math"
"net/http"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
func (d *Pan123) getS3PreSignedUrls(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
data := base.Json{
"bucket": upReq.Data.Bucket,
"key": upReq.Data.Key,
"partNumberEnd": end,
"partNumberStart": start,
"uploadId": upReq.Data.UploadId,
"StorageNode": upReq.Data.StorageNode,
}
var s3PreSignedUrls S3PreSignedURLs
_, err := d.request(S3PreSignedUrls, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetContext(ctx)
}, &s3PreSignedUrls)
if err != nil {
return nil, err
}
return &s3PreSignedUrls, nil
}
func (d *Pan123) getS3Auth(ctx context.Context, upReq *UploadResp, start, end int) (*S3PreSignedURLs, error) {
data := base.Json{
"StorageNode": upReq.Data.StorageNode,
"bucket": upReq.Data.Bucket,
"key": upReq.Data.Key,
"partNumberEnd": end,
"partNumberStart": start,
"uploadId": upReq.Data.UploadId,
}
var s3PreSignedUrls S3PreSignedURLs
_, err := d.request(S3Auth, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetContext(ctx)
}, &s3PreSignedUrls)
if err != nil {
return nil, err
}
return &s3PreSignedUrls, nil
}
func (d *Pan123) completeS3(ctx context.Context, upReq *UploadResp, file model.FileStreamer, isMultipart bool) error {
data := base.Json{
"StorageNode": upReq.Data.StorageNode,
"bucket": upReq.Data.Bucket,
"fileId": upReq.Data.FileId,
"fileSize": file.GetSize(),
"isMultipart": isMultipart,
"key": upReq.Data.Key,
"uploadId": upReq.Data.UploadId,
}
_, err := d.request(UploadCompleteV2, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetContext(ctx)
}, nil)
return err
}
func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.FileStreamer, reader io.Reader, up driver.UpdateProgress) error {
chunkSize := int64(1024 * 1024 * 16)
// fetch s3 pre signed urls
chunkCount := int(math.Ceil(float64(file.GetSize()) / float64(chunkSize)))
// only 1 batch is allowed
isMultipart := chunkCount > 1
batchSize := 1
getS3UploadUrl := d.getS3Auth
if isMultipart {
batchSize = 10
getS3UploadUrl = d.getS3PreSignedUrls
}
for i := 1; i <= chunkCount; i += batchSize {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
start := i
end := i + batchSize
if end > chunkCount+1 {
end = chunkCount + 1
}
s3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, start, end)
if err != nil {
return err
}
// upload each chunk
for j := start; j < end; j++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
curSize := chunkSize
if j == chunkCount {
curSize = file.GetSize() - (int64(chunkCount)-1)*chunkSize
}
err = d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, j, end, io.LimitReader(reader, chunkSize), curSize, false, getS3UploadUrl)
if err != nil {
return err
}
up(j * 100 / chunkCount)
}
}
// complete s3 upload
return d.completeS3(ctx, upReq, file, chunkCount > 1)
}
func (d *Pan123) uploadS3Chunk(ctx context.Context, upReq *UploadResp, s3PreSignedUrls *S3PreSignedURLs, cur, end int, reader io.Reader, curSize int64, retry bool, getS3UploadUrl func(ctx context.Context, upReq *UploadResp, start int, end int) (*S3PreSignedURLs, error)) error {
uploadUrl := s3PreSignedUrls.Data.PreSignedUrls[strconv.Itoa(cur)]
if uploadUrl == "" {
return fmt.Errorf("upload url is empty, s3PreSignedUrls: %+v", s3PreSignedUrls)
}
req, err := http.NewRequest("PUT", uploadUrl, reader)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.ContentLength = curSize
//req.Header.Set("Content-Length", strconv.FormatInt(curSize, 10))
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode == http.StatusForbidden {
if retry {
return fmt.Errorf("upload s3 chunk %d failed, status code: %d", cur, res.StatusCode)
}
// refresh s3 pre signed urls
newS3PreSignedUrls, err := getS3UploadUrl(ctx, upReq, cur, end)
if err != nil {
return err
}
s3PreSignedUrls.Data.PreSignedUrls = newS3PreSignedUrls.Data.PreSignedUrls
// retry
return d.uploadS3Chunk(ctx, upReq, s3PreSignedUrls, cur, end, reader, curSize, true, getS3UploadUrl)
}
if res.StatusCode != http.StatusOK {
body, err := io.ReadAll(res.Body)
if err != nil {
return err
}
return fmt.Errorf("upload s3 chunk %d failed, status code: %d, body: %s", cur, res.StatusCode, body)
}
return nil
}

View File

@ -1,159 +0,0 @@
package _123
import (
"errors"
"fmt"
"net/http"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
)
// do others that not defined in Driver interface
const (
Api = "https://www.123pan.com/api"
AApi = "https://www.123pan.com/a/api"
BApi = "https://www.123pan.com/b/api"
MainApi = Api
SignIn = MainApi + "/user/sign_in"
Logout = MainApi + "/user/logout"
UserInfo = MainApi + "/user/info"
FileList = MainApi + "/file/list/new"
DownloadInfo = MainApi + "/file/download_info"
Mkdir = MainApi + "/file/upload_request"
Move = MainApi + "/file/mod_pid"
Rename = MainApi + "/file/rename"
Trash = MainApi + "/file/trash"
UploadRequest = MainApi + "/file/upload_request"
UploadComplete = MainApi + "/file/upload_complete"
S3PreSignedUrls = MainApi + "/file/s3_repare_upload_parts_batch"
S3Auth = MainApi + "/file/s3_upload_object/auth"
UploadCompleteV2 = MainApi + "/file/upload_complete/v2"
S3Complete = MainApi + "/file/s3_complete_multipart_upload"
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
)
func (d *Pan123) login() error {
var body base.Json
if utils.IsEmailFormat(d.Username) {
body = base.Json{
"mail": d.Username,
"password": d.Password,
"type": 2,
}
} else {
body = base.Json{
"passport": d.Username,
"password": d.Password,
"remember": true,
}
}
res, err := base.RestyClient.R().
SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
//"user-agent": base.UserAgent,
}).
SetBody(body).Post(SignIn)
if err != nil {
return err
}
if utils.Json.Get(res.Body(), "code").ToInt() != 200 {
err = fmt.Errorf(utils.Json.Get(res.Body(), "message").ToString())
} else {
d.AccessToken = utils.Json.Get(res.Body(), "data", "token").ToString()
}
return err
}
//func authKey(reqUrl string) (*string, error) {
// reqURL, err := url.Parse(reqUrl)
// if err != nil {
// return nil, err
// }
//
// nowUnix := time.Now().Unix()
// random := rand.Intn(0x989680)
//
// p4 := fmt.Sprintf("%d|%d|%s|%s|%s|%s", nowUnix, random, reqURL.Path, "web", "3", AuthKeySalt)
// authKey := fmt.Sprintf("%d-%d-%x", nowUnix, random, md5.Sum([]byte(p4)))
// return &authKey, nil
//}
func (d *Pan123) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"authorization": "Bearer " + d.AccessToken,
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
//"user-agent": base.UserAgent,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
//authKey, err := authKey(url)
//if err != nil {
// return nil, err
//}
//req.SetQueryParam("auth-key", *authKey)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
body := res.Body()
code := utils.Json.Get(body, "code").ToInt()
if code != 0 {
if code == 401 {
err := d.login()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
}
return nil, errors.New(jsoniter.Get(body, "message").ToString())
}
return body, nil
}
func (d *Pan123) getFiles(parentId string) ([]File, error) {
page := 1
res := make([]File, 0)
for {
var resp Files
query := map[string]string{
"driveId": "0",
"limit": "100",
"next": "0",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"trashed": "false",
"Page": strconv.Itoa(page),
}
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
page++
res = append(res, resp.Data.InfoList...)
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
break
}
}
return res, nil
}

View File

@ -1,149 +0,0 @@
package _123Share
import (
"context"
"encoding/base64"
"fmt"
"net/http"
"net/url"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Pan123Share struct {
model.Storage
Addition
}
func (d *Pan123Share) Config() driver.Config {
return config
}
func (d *Pan123Share) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan123Share) Init(ctx context.Context) error {
// TODO login / refresh token
//op.MustSaveDriverStorage(d)
return nil
}
func (d *Pan123Share) Drop(ctx context.Context) error {
return nil
}
func (d *Pan123Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
// TODO return the files list, required
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return src, nil
})
}
func (d *Pan123Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
// TODO return link of file, required
if f, ok := file.(File); ok {
//var resp DownResp
var headers map[string]string
if !utils.IsLocalIPAddr(args.IP) {
headers = map[string]string{
//"X-Real-IP": "1.1.1.1",
"X-Forwarded-For": args.IP,
}
}
data := base.Json{
"shareKey": d.ShareKey,
"SharePwd": d.SharePwd,
"etag": f.Etag,
"fileId": f.FileId,
"s3keyFlag": f.S3KeyFlag,
"size": f.Size,
}
resp, err := d.request(DownloadInfo, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetHeaders(headers)
}, nil)
if err != nil {
return nil, err
}
downloadUrl := utils.Json.Get(resp, "data", "DownloadURL").ToString()
u, err := url.Parse(downloadUrl)
if err != nil {
return nil, err
}
nu := u.Query().Get("params")
if nu != "" {
du, _ := base64.StdEncoding.DecodeString(nu)
u, err = url.Parse(string(du))
if err != nil {
return nil, err
}
}
u_ := u.String()
log.Debug("download url: ", u_)
res, err := base.NoRedirectClient.R().SetHeader("Referer", "https://www.123pan.com/").Get(u_)
if err != nil {
return nil, err
}
log.Debug(res.String())
link := model.Link{
URL: u_,
}
log.Debugln("res code: ", res.StatusCode())
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
} else if res.StatusCode() < 300 {
link.URL = utils.Json.Get(res.Body(), "data", "redirect_url").ToString()
}
link.Header = http.Header{
"Referer": []string{"https://www.123pan.com/"},
}
return &link, nil
}
return nil, fmt.Errorf("can't convert obj")
}
func (d *Pan123Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
// TODO create folder, optional
return errs.NotSupport
}
func (d *Pan123Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
// TODO move obj, optional
return errs.NotSupport
}
func (d *Pan123Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
// TODO rename obj, optional
return errs.NotSupport
}
func (d *Pan123Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
// TODO copy obj, optional
return errs.NotSupport
}
func (d *Pan123Share) Remove(ctx context.Context, obj model.Obj) error {
// TODO remove obj, optional
return errs.NotSupport
}
func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
// TODO upload file, optional
return errs.NotSupport
}
//func (d *Pan123Share) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*Pan123Share)(nil)

View File

@ -1,34 +0,0 @@
package _123Share
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
ShareKey string `json:"sharekey" required:"true"`
SharePwd string `json:"sharepassword" required:"true"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
}
var config = driver.Config{
Name: "123PanShare",
LocalSort: true,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: true,
NeedMs: false,
DefaultRoot: "0",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan123Share{}
})
}

View File

@ -1,99 +0,0 @@
package _123Share
import (
"github.com/alist-org/alist/v3/pkg/utils"
"net/url"
"path"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type File struct {
FileName string `json:"FileName"`
Size int64 `json:"Size"`
UpdateAt time.Time `json:"UpdateAt"`
FileId int64 `json:"FileId"`
Type int `json:"Type"`
Etag string `json:"Etag"`
S3KeyFlag string `json:"S3KeyFlag"`
DownloadUrl string `json:"DownloadUrl"`
}
func (f File) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (f File) GetPath() string {
return ""
}
func (f File) GetSize() int64 {
return f.Size
}
func (f File) GetName() string {
return f.FileName
}
func (f File) ModTime() time.Time {
return f.UpdateAt
}
func (f File) CreateTime() time.Time {
return f.UpdateAt
}
func (f File) IsDir() bool {
return f.Type == 1
}
func (f File) GetID() string {
return strconv.FormatInt(f.FileId, 10)
}
func (f File) Thumb() string {
if f.DownloadUrl == "" {
return ""
}
du, err := url.Parse(f.DownloadUrl)
if err != nil {
return ""
}
du.Path = strings.TrimSuffix(du.Path, "_24_24") + "_70_70"
query := du.Query()
query.Set("w", "70")
query.Set("h", "70")
if !query.Has("type") {
query.Set("type", strings.TrimPrefix(path.Base(f.FileName), "."))
}
if !query.Has("trade_key") {
query.Set("trade_key", "123pan-thumbnail")
}
du.RawQuery = query.Encode()
return du.String()
}
var _ model.Obj = (*File)(nil)
var _ model.Thumb = (*File)(nil)
//func (f File) Thumb() string {
//
//}
//var _ model.Thumb = (*File)(nil)
type Files struct {
//BaseResp
Data struct {
InfoList []File `json:"InfoList"`
Next string `json:"Next"`
} `json:"data"`
}
//type DownResp struct {
// //BaseResp
// Data struct {
// DownloadUrl string `json:"DownloadUrl"`
// } `json:"data"`
//}

View File

@ -1,81 +0,0 @@
package _123Share
import (
"errors"
"net/http"
"strconv"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
)
const (
Api = "https://www.123pan.com/api"
AApi = "https://www.123pan.com/a/api"
BApi = "https://www.123pan.com/b/api"
MainApi = Api
FileList = MainApi + "/share/get"
DownloadInfo = MainApi + "/share/download/info"
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
)
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
body := res.Body()
code := utils.Json.Get(body, "code").ToInt()
if code != 0 {
return nil, errors.New(jsoniter.Get(body, "message").ToString())
}
return body, nil
}
func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
page := 1
res := make([]File, 0)
for {
var resp Files
query := map[string]string{
"limit": "100",
"next": "0",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"Page": strconv.Itoa(page),
"shareKey": d.ShareKey,
"SharePwd": d.SharePwd,
}
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
page++
res = append(res, resp.Data.InfoList...)
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
break
}
}
return res, nil
}
// do others that not defined in Driver interface

View File

@ -1,347 +0,0 @@
package _139
import (
"context"
"encoding/base64"
"fmt"
"io"
"net/http"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
type Yun139 struct {
model.Storage
Addition
Account string
}
func (d *Yun139) Config() driver.Config {
return config
}
func (d *Yun139) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Yun139) Init(ctx context.Context) error {
if d.Authorization == "" {
return fmt.Errorf("authorization is empty")
}
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
if len(splits) < 2 {
return fmt.Errorf("authorization is invalid, splits < 2")
}
d.Account = splits[1]
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}, nil)
return err
}
func (d *Yun139) Drop(ctx context.Context) error {
return nil
}
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.isFamily() {
return d.familyGetFiles(dir.GetID())
} else {
return d.getFiles(dir.GetID())
}
}
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
u, err := d.getLink(file.GetID())
if err != nil {
return nil, err
}
return &model.Link{URL: u}, nil
}
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
if d.isFamily() {
data = base.Json{
"cloudID": d.CloudID,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"docLibName": dirName,
}
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if d.isFamily() {
return errs.NotImplement
}
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
}
_, err := d.post(pathname, data, nil)
return err
}
const (
_ = iota //ignore first value by assigning to blank identifier
KB = 1 << (10 * iota)
MB
GB
TB
)
func getPartSize(size int64) int64 {
// 网盘对于分片数量存在上限
if size/GB > 30 {
return 512 * MB
}
return 100 * MB
}
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": 0, // 去除上传大小限制
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0, // 去除上传大小限制
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
data = d.newJson(base.Json{
"fileCount": 1,
"manualRename": 2,
"operation": 0,
"path": "",
"seqNo": "",
"totalSize": 0,
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0,
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
})
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
var partSize = getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
limitReader := io.LimitReader(stream, byteSize)
// Update Progress
r := io.TeeReader(limitReader, p)
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
req.Header.Set("rangeType", "0")
req.ContentLength = byteSize
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
}
return nil
}
var _ driver.Driver = (*Yun139)(nil)

View File

@ -1,25 +0,0 @@
package _139
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
//Account string `json:"account" required:"true"`
Authorization string `json:"authorization" type:"text" required:"true"`
driver.RootID
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
CloudID string `json:"cloud_id"`
}
var config = driver.Config{
Name: "139Yun",
LocalSort: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Yun139{}
})
}

View File

@ -1,187 +0,0 @@
package _139
type BaseResp struct {
Success bool `json:"success"`
Code string `json:"code"`
Message string `json:"message"`
}
type Catalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CatalogType int `json:"catalogType"`
//CreateTime string `json:"createTime"`
UpdateTime string `json:"updateTime"`
//IsShared bool `json:"isShared"`
//CatalogLevel int `json:"catalogLevel"`
//ShareDoneeCount int `json:"shareDoneeCount"`
//OpenType int `json:"openType"`
//ParentCatalogID string `json:"parentCatalogId"`
//DirEtag int `json:"dirEtag"`
//Tombstoned int `json:"tombstoned"`
//ProxyID interface{} `json:"proxyID"`
//Moved int `json:"moved"`
//IsFixedDir int `json:"isFixedDir"`
//IsSynced interface{} `json:"isSynced"`
//Owner string `json:"owner"`
//Modifier interface{} `json:"modifier"`
//Path string `json:"path"`
//ShareType int `json:"shareType"`
//SoftLink interface{} `json:"softLink"`
//ExtProp1 interface{} `json:"extProp1"`
//ExtProp2 interface{} `json:"extProp2"`
//ExtProp3 interface{} `json:"extProp3"`
//ExtProp4 interface{} `json:"extProp4"`
//ExtProp5 interface{} `json:"extProp5"`
//ETagOprType int `json:"ETagOprType"`
}
type Content struct {
ContentID string `json:"contentID"`
ContentName string `json:"contentName"`
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//ContentType int `json:"contentType"`
//ContentOrigin int `json:"contentOrigin"`
UpdateTime string `json:"updateTime"`
//CommentCount int `json:"commentCount"`
ThumbnailURL string `json:"thumbnailURL"`
//BigthumbnailURL string `json:"bigthumbnailURL"`
//PresentURL string `json:"presentURL"`
//PresentLURL string `json:"presentLURL"`
//PresentHURL string `json:"presentHURL"`
//ContentTAGList interface{} `json:"contentTAGList"`
//ShareDoneeCount int `json:"shareDoneeCount"`
//Safestate int `json:"safestate"`
//Transferstate int `json:"transferstate"`
//IsFocusContent int `json:"isFocusContent"`
//UpdateShareTime interface{} `json:"updateShareTime"`
//UploadTime string `json:"uploadTime"`
//OpenType int `json:"openType"`
//AuditResult int `json:"auditResult"`
//ParentCatalogID string `json:"parentCatalogId"`
//Channel string `json:"channel"`
//GeoLocFlag string `json:"geoLocFlag"`
//Digest string `json:"digest"`
//Version string `json:"version"`
//FileEtag string `json:"fileEtag"`
//FileVersion string `json:"fileVersion"`
//Tombstoned int `json:"tombstoned"`
//ProxyID string `json:"proxyID"`
//Moved int `json:"moved"`
//MidthumbnailURL string `json:"midthumbnailURL"`
//Owner string `json:"owner"`
//Modifier string `json:"modifier"`
//ShareType int `json:"shareType"`
//ExtInfo struct {
// Uploader string `json:"uploader"`
// Address string `json:"address"`
//} `json:"extInfo"`
//Exif struct {
// CreateTime string `json:"createTime"`
// Longitude interface{} `json:"longitude"`
// Latitude interface{} `json:"latitude"`
// LocalSaveTime interface{} `json:"localSaveTime"`
//} `json:"exif"`
//CollectionFlag interface{} `json:"collectionFlag"`
//TreeInfo interface{} `json:"treeInfo"`
//IsShared bool `json:"isShared"`
//ETagOprType int `json:"ETagOprType"`
}
type GetDiskResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc interface{} `json:"resultDesc"`
} `json:"result"`
GetDiskResult struct {
ParentCatalogID string `json:"parentCatalogID"`
NodeCount int `json:"nodeCount"`
CatalogList []Catalog `json:"catalogList"`
ContentList []Content `json:"contentList"`
IsCompleted int `json:"isCompleted"`
} `json:"getDiskResult"`
} `json:"data"`
}
type UploadResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc interface{} `json:"resultDesc"`
} `json:"result"`
UploadResult struct {
UploadTaskID string `json:"uploadTaskID"`
RedirectionURL string `json:"redirectionUrl"`
NewContentIDList []struct {
ContentID string `json:"contentID"`
ContentName string `json:"contentName"`
IsNeedUpload string `json:"isNeedUpload"`
FileEtag int64 `json:"fileEtag"`
FileVersion int64 `json:"fileVersion"`
OverridenFlag int `json:"overridenFlag"`
} `json:"newContentIDList"`
CatalogIDList interface{} `json:"catalogIDList"`
IsSlice interface{} `json:"isSlice"`
} `json:"uploadResult"`
} `json:"data"`
}
type CloudContent struct {
ContentID string `json:"contentID"`
//Modifier string `json:"modifier"`
//Nickname string `json:"nickname"`
//CloudNickName string `json:"cloudNickName"`
ContentName string `json:"contentName"`
//ContentType int `json:"contentType"`
//ContentSuffix string `json:"contentSuffix"`
ContentSize int64 `json:"contentSize"`
//ContentDesc string `json:"contentDesc"`
//CreateTime string `json:"createTime"`
//Shottime interface{} `json:"shottime"`
LastUpdateTime string `json:"lastUpdateTime"`
ThumbnailURL string `json:"thumbnailURL"`
//MidthumbnailURL string `json:"midthumbnailURL"`
//BigthumbnailURL string `json:"bigthumbnailURL"`
//PresentURL string `json:"presentURL"`
//PresentLURL string `json:"presentLURL"`
//PresentHURL string `json:"presentHURL"`
//ParentCatalogID string `json:"parentCatalogID"`
//Uploader string `json:"uploader"`
//UploaderNickName string `json:"uploaderNickName"`
//TreeInfo interface{} `json:"treeInfo"`
//UpdateTime interface{} `json:"updateTime"`
//ExtInfo struct {
// Uploader string `json:"uploader"`
//} `json:"extInfo"`
//EtagOprType interface{} `json:"etagOprType"`
}
type CloudCatalog struct {
CatalogID string `json:"catalogID"`
CatalogName string `json:"catalogName"`
//CloudID string `json:"cloudID"`
//CreateTime string `json:"createTime"`
LastUpdateTime string `json:"lastUpdateTime"`
//Creator string `json:"creator"`
//CreatorNickname string `json:"creatorNickname"`
}
type QueryContentListResp struct {
BaseResp
Data struct {
Result struct {
ResultCode string `json:"resultCode"`
ResultDesc string `json:"resultDesc"`
} `json:"result"`
Path string `json:"path"`
CloudContentList []CloudContent `json:"cloudContentList"`
CloudCatalogList []CloudCatalog `json:"cloudCatalogList"`
TotalCount int `json:"totalCount"`
RecallContent interface{} `json:"recallContent"`
} `json:"data"`
}

View File

@ -1,250 +0,0 @@
package _139
import (
"encoding/base64"
"errors"
"fmt"
"net/http"
"net/url"
"sort"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *Yun139) isFamily() bool {
return d.Type == "family"
}
func encodeURIComponent(str string) string {
r := url.QueryEscape(str)
r = strings.Replace(r, "+", "%20", -1)
r = strings.Replace(r, "%21", "!", -1)
r = strings.Replace(r, "%27", "'", -1)
r = strings.Replace(r, "%28", "(", -1)
r = strings.Replace(r, "%29", ")", -1)
r = strings.Replace(r, "%2A", "*", -1)
return r
}
func calSign(body, ts, randStr string) string {
body = encodeURIComponent(body)
strs := strings.Split(body, "")
sort.Strings(strs)
body = strings.Join(strs, "")
body = base64.StdEncoding.EncodeToString([]byte(body))
res := utils.GetMD5EncodeStr(body) + utils.GetMD5EncodeStr(ts+":"+randStr)
res = strings.ToUpper(utils.GetMD5EncodeStr(res))
return res
}
func getTime(t string) time.Time {
stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
return stamp
}
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
url := "https://yun.139.com" + pathname
req := base.RestyClient.R()
randStr := random.String(16)
ts := time.Now().Format("2006-01-02 15:04:05")
if callback != nil {
callback(req)
}
body, err := utils.Json.Marshal(req.Body)
if err != nil {
return nil, err
}
sign := calSign(string(body), ts, randStr)
svcType := "1"
if d.isFamily() {
svcType = "2"
}
req.SetHeaders(map[string]string{
"Accept": "application/json, text/plain, */*",
"CMS-DEVICE": "default",
"Authorization": "Basic " + d.Authorization,
"mcloud-channel": "1000101",
"mcloud-client": "10701",
//"mcloud-route": "001",
"mcloud-sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
//"mcloud-skey":"",
"mcloud-version": "6.6.0",
"Origin": "https://yun.139.com",
"Referer": "https://yun.139.com/w/",
"x-DeviceInfo": "||9|6.6.0|chrome|95.0.4638.69|uwIy75obnsRPIwlJSd7D9GhUvFwG96ce||macos 10.15.2||zh-CN|||",
"x-huawei-channelSrc": "10000034",
"x-inner-ntwk": "2",
"x-m4c-caller": "PC",
"x-m4c-src": "10002",
"x-SvcType": svcType,
})
var e BaseResp
req.SetResult(&e)
res, err := req.Execute(method, url)
log.Debugln(res.String())
if !e.Success {
return nil, errors.New(e.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
if err != nil {
return nil, err
}
}
return res.Body(), nil
}
func (d *Yun139) post(pathname string, data interface{}, resp interface{}) ([]byte, error) {
return d.request(pathname, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, resp)
}
func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
start := 0
limit := 100
files := make([]model.Obj, 0)
for {
data := base.Json{
"catalogID": catalogID,
"sortDirection": 1,
"startNumber": start + 1,
"endNumber": start + limit,
"filterType": 0,
"catalogSortType": 0,
"contentSortType": 0,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
var resp GetDiskResp
_, err := d.post("/orchestration/personalCloud/catalog/v1.0/getDisk", data, &resp)
if err != nil {
return nil, err
}
for _, catalog := range resp.Data.GetDiskResult.CatalogList {
f := model.Object{
ID: catalog.CatalogID,
Name: catalog.CatalogName,
Size: 0,
Modified: getTime(catalog.UpdateTime),
IsFolder: true,
}
files = append(files, &f)
}
for _, content := range resp.Data.GetDiskResult.ContentList {
f := model.ObjThumb{
Object: model.Object{
ID: content.ContentID,
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.UpdateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
}
files = append(files, &f)
}
if start+limit >= resp.Data.GetDiskResult.NodeCount {
break
}
start += limit
}
return files, nil
}
func (d *Yun139) newJson(data map[string]interface{}) base.Json {
common := map[string]interface{}{
"catalogType": 3,
"cloudID": d.CloudID,
"cloudType": 1,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
return utils.MergeMap(data, common)
}
func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
pageNum := 1
files := make([]model.Obj, 0)
for {
data := d.newJson(base.Json{
"catalogID": catalogID,
"contentSortType": 0,
"pageInfo": base.Json{
"pageNum": pageNum,
"pageSize": 100,
},
"sortDirection": 1,
})
var resp QueryContentListResp
_, err := d.post("/orchestration/familyCloud/content/v1.0/queryContentList", data, &resp)
if err != nil {
return nil, err
}
for _, catalog := range resp.Data.CloudCatalogList {
f := model.Object{
ID: catalog.CatalogID,
Name: catalog.CatalogName,
Size: 0,
IsFolder: true,
Modified: getTime(catalog.LastUpdateTime),
}
files = append(files, &f)
}
for _, content := range resp.Data.CloudContentList {
f := model.ObjThumb{
Object: model.Object{
ID: content.ContentID,
Name: content.ContentName,
Size: content.ContentSize,
Modified: getTime(content.LastUpdateTime),
},
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
//Thumbnail: content.BigthumbnailURL,
}
files = append(files, &f)
}
if 100*pageNum > resp.Data.TotalCount {
break
}
pageNum++
}
return files, nil
}
func (d *Yun139) getLink(contentId string) (string, error) {
data := base.Json{
"appName": "",
"contentID": contentId,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
res, err := d.post("/orchestration/personalCloud/uploadAndDownload/v1.0/downloadRequest",
data, nil)
if err != nil {
return "", err
}
return jsoniter.Get(res, "data", "downloadURL").ToString(), nil
}
func unicode(str string) string {
textQuoted := strconv.QuoteToASCII(str)
textUnquoted := textQuoted[1 : len(textQuoted)-1]
return textUnquoted
}

View File

@ -1,197 +0,0 @@
package _189
import (
"context"
"net/http"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Cloud189 struct {
model.Storage
Addition
client *resty.Client
rsa Rsa
sessionKey string
}
func (d *Cloud189) Config() driver.Config {
return config
}
func (d *Cloud189) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Cloud189) Init(ctx context.Context) error {
d.client = base.NewRestyClient().
SetHeader("Referer", "https://cloud.189.cn/")
return d.newLogin()
}
func (d *Cloud189) Drop(ctx context.Context) error {
return nil
}
func (d *Cloud189) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return d.getFiles(dir.GetID())
}
func (d *Cloud189) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownResp
u := "https://cloud.189.cn/api/portal/getFileInfo.action"
_, err := d.request(u, http.MethodGet, func(req *resty.Request) {
req.SetQueryParam("fileId", file.GetID())
}, &resp)
if err != nil {
return nil, err
}
client := resty.NewWithClient(d.client.GetClient()).SetRedirectPolicy(
resty.RedirectPolicyFunc(func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse
}))
res, err := client.R().SetHeader("User-Agent", base.UserAgent).Get("https:" + resp.FileDownloadUrl)
if err != nil {
return nil, err
}
log.Debugln(res.Status())
log.Debugln(res.String())
link := model.Link{}
log.Debugln("first url:", resp.FileDownloadUrl)
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
log.Debugln("second url:", link.URL)
_, _ = client.R().Get(link.URL)
if res.StatusCode() == 302 {
link.URL = res.Header().Get("location")
}
log.Debugln("third url:", link.URL)
} else {
link.URL = resp.FileDownloadUrl
}
link.URL = strings.Replace(link.URL, "http://", "https://", 1)
return &link, nil
}
func (d *Cloud189) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
form := map[string]string{
"parentFolderId": parentDir.GetID(),
"folderName": dirName,
}
_, err := d.request("https://cloud.189.cn/api/open/file/createFolder.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
isFolder := 0
if srcObj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": srcObj.GetID(),
"fileName": srcObj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "MOVE",
"targetFolderId": dstDir.GetID(),
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
url := "https://cloud.189.cn/api/open/file/renameFile.action"
idKey := "fileId"
nameKey := "destFileName"
if srcObj.IsDir() {
url = "https://cloud.189.cn/api/open/file/renameFolder.action"
idKey = "folderId"
nameKey = "destFolderName"
}
form := map[string]string{
idKey: srcObj.GetID(),
nameKey: newName,
}
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
isFolder := 0
if srcObj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": srcObj.GetID(),
"fileName": srcObj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "COPY",
"targetFolderId": dstDir.GetID(),
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Remove(ctx context.Context, obj model.Obj) error {
isFolder := 0
if obj.IsDir() {
isFolder = 1
}
taskInfos := []base.Json{
{
"fileId": obj.GetID(),
"fileName": obj.GetName(),
"isFolder": isFolder,
},
}
taskInfosBytes, err := utils.Json.Marshal(taskInfos)
if err != nil {
return err
}
form := map[string]string{
"type": "DELETE",
"targetFolderId": "",
"taskInfos": string(taskInfosBytes),
}
_, err = d.request("https://cloud.189.cn/api/open/batch/createBatchTask.action", http.MethodPost, func(req *resty.Request) {
req.SetFormData(form)
}, nil)
return err
}
func (d *Cloud189) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return d.newUpload(ctx, dstDir, stream, up)
}
var _ driver.Driver = (*Cloud189)(nil)

View File

@ -1,186 +0,0 @@
package _189
import (
"bytes"
"crypto/aes"
"crypto/hmac"
"crypto/md5"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/base64"
"encoding/hex"
"encoding/pem"
"fmt"
"net/url"
"regexp"
"strconv"
"strings"
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
log "github.com/sirupsen/logrus"
)
func random() string {
return fmt.Sprintf("0.%17v", myrand.Rand.Int63n(100000000000000000))
}
func RsaEncode(origData []byte, j_rsakey string, hex bool) string {
publicKey := []byte("-----BEGIN PUBLIC KEY-----\n" + j_rsakey + "\n-----END PUBLIC KEY-----")
block, _ := pem.Decode(publicKey)
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
pub := pubInterface.(*rsa.PublicKey)
b, err := rsa.EncryptPKCS1v15(rand.Reader, pub, origData)
if err != nil {
log.Errorf("err: %s", err.Error())
}
res := base64.StdEncoding.EncodeToString(b)
if hex {
return b64tohex(res)
}
return res
}
var b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
func int2char(a int) string {
return strings.Split(BI_RM, "")[a]
}
func b64tohex(a string) string {
d := ""
e := 0
c := 0
for i := 0; i < len(a); i++ {
m := strings.Split(a, "")[i]
if m != "=" {
v := strings.Index(b64map, m)
if 0 == e {
e = 1
d += int2char(v >> 2)
c = 3 & v
} else if 1 == e {
e = 2
d += int2char(c<<2 | v>>4)
c = 15 & v
} else if 2 == e {
e = 3
d += int2char(c)
d += int2char(v >> 2)
c = 3 & v
} else {
e = 0
d += int2char(c<<2 | v>>4)
d += int2char(15 & v)
}
}
}
if e == 1 {
d += int2char(c << 2)
}
return d
}
func qs(form map[string]string) string {
f := make(url.Values)
for k, v := range form {
f.Set(k, v)
}
return EncodeParam(f)
//strList := make([]string, 0)
//for k, v := range form {
// strList = append(strList, fmt.Sprintf("%s=%s", k, url.QueryEscape(v)))
//}
//return strings.Join(strList, "&")
}
func EncodeParam(v url.Values) string {
if v == nil {
return ""
}
var buf strings.Builder
keys := make([]string, 0, len(v))
for k := range v {
keys = append(keys, k)
}
for _, k := range keys {
vs := v[k]
for _, v := range vs {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.WriteString(k)
buf.WriteByte('=')
//if k == "fileName" {
// buf.WriteString(encode(v))
//} else {
buf.WriteString(v)
//}
}
}
return buf.String()
}
func encode(str string) string {
//str = strings.ReplaceAll(str, "%", "%25")
//str = strings.ReplaceAll(str, "&", "%26")
//str = strings.ReplaceAll(str, "+", "%2B")
//return str
return url.QueryEscape(str)
}
func AesEncrypt(data, key []byte) []byte {
block, _ := aes.NewCipher(key)
if block == nil {
return []byte{}
}
data = PKCS7Padding(data, block.BlockSize())
decrypted := make([]byte, len(data))
size := block.BlockSize()
for bs, be := 0, size; bs < len(data); bs, be = bs+size, be+size {
block.Encrypt(decrypted[bs:be], data[bs:be])
}
return decrypted
}
func PKCS7Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
func hmacSha1(data string, secret string) string {
h := hmac.New(sha1.New, []byte(secret))
h.Write([]byte(data))
return hex.EncodeToString(h.Sum(nil))
}
func getMd5(data []byte) []byte {
h := md5.New()
h.Write(data)
return h.Sum(nil)
}
func decodeURIComponent(str string) string {
r, _ := url.PathUnescape(str)
//r = strings.ReplaceAll(r, " ", "+")
return r
}
func Random(v string) string {
reg := regexp.MustCompilePOSIX("[xy]")
data := reg.ReplaceAllFunc([]byte(v), func(msg []byte) []byte {
var i int64
t := int64(16 * myrand.Rand.Float32())
if msg[0] == 120 {
i = t
} else {
i = 3&t | 8
}
return []byte(strconv.FormatInt(i, 16))
})
return string(data)
}

View File

@ -1,126 +0,0 @@
package _189
import (
"errors"
"strconv"
"github.com/alist-org/alist/v3/pkg/utils"
log "github.com/sirupsen/logrus"
)
type AppConf struct {
Data struct {
AccountType string `json:"accountType"`
AgreementCheck string `json:"agreementCheck"`
AppKey string `json:"appKey"`
ClientType int `json:"clientType"`
IsOauth2 bool `json:"isOauth2"`
LoginSort string `json:"loginSort"`
MailSuffix string `json:"mailSuffix"`
PageKey string `json:"pageKey"`
ParamId string `json:"paramId"`
RegReturnUrl string `json:"regReturnUrl"`
ReqId string `json:"reqId"`
ReturnUrl string `json:"returnUrl"`
ShowFeedback string `json:"showFeedback"`
ShowPwSaveName string `json:"showPwSaveName"`
ShowQrSaveName string `json:"showQrSaveName"`
ShowSmsSaveName string `json:"showSmsSaveName"`
Sso string `json:"sso"`
} `json:"data"`
Msg string `json:"msg"`
Result string `json:"result"`
}
type EncryptConf struct {
Result int `json:"result"`
Data struct {
UpSmsOn string `json:"upSmsOn"`
Pre string `json:"pre"`
PreDomain string `json:"preDomain"`
PubKey string `json:"pubKey"`
} `json:"data"`
}
func (d *Cloud189) newLogin() error {
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
res, err := d.client.R().Get(url)
if err != nil {
return err
}
// Is logged in
redirectURL := res.RawResponse.Request.URL
if redirectURL.String() == "https://cloud.189.cn/web/main" {
return nil
}
lt := redirectURL.Query().Get("lt")
reqId := redirectURL.Query().Get("reqId")
appId := redirectURL.Query().Get("appId")
headers := map[string]string{
"lt": lt,
"reqid": reqId,
"referer": redirectURL.String(),
"origin": "https://open.e.189.cn",
}
// get app Conf
var appConf AppConf
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
"version": "2.0",
"appKey": appId,
}).SetResult(&appConf).Post("https://open.e.189.cn/api/logbox/oauth2/appConf.do")
if err != nil {
return err
}
log.Debugf("189 AppConf resp body: %s", res.String())
if appConf.Result != "0" {
return errors.New(appConf.Msg)
}
// get encrypt conf
var encryptConf EncryptConf
res, err = d.client.R().SetHeaders(headers).SetFormData(map[string]string{
"appId": appId,
}).Post("https://open.e.189.cn/api/logbox/config/encryptConf.do")
if err != nil {
return err
}
err = utils.Json.Unmarshal(res.Body(), &encryptConf)
if err != nil {
return err
}
log.Debugf("189 EncryptConf resp body: %s\n%+v", res.String(), encryptConf)
if encryptConf.Result != 0 {
return errors.New("get EncryptConf error:" + res.String())
}
// TODO: getUUID? needcaptcha
// login
loginData := map[string]string{
"version": "v2.0",
"apToken": "",
"appKey": appId,
"accountType": appConf.Data.AccountType,
"userName": encryptConf.Data.Pre + RsaEncode([]byte(d.Username), encryptConf.Data.PubKey, true),
"epd": encryptConf.Data.Pre + RsaEncode([]byte(d.Password), encryptConf.Data.PubKey, true),
"captchaType": "",
"validateCode": "",
"smsValidateCode": "",
"captchaToken": "",
"returnUrl": appConf.Data.ReturnUrl,
"mailSuffix": appConf.Data.MailSuffix,
"dynamicCheck": "FALSE",
"clientType": strconv.Itoa(appConf.Data.ClientType),
"cb_SaveName": "3",
"isOauth2": strconv.FormatBool(appConf.Data.IsOauth2),
"state": "",
"paramId": appConf.Data.ParamId,
}
res, err = d.client.R().SetHeaders(headers).SetFormData(loginData).Post("https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do")
if err != nil {
return err
}
log.Debugf("189 login resp body: %s", res.String())
loginResult := utils.Json.Get(res.Body(), "result").ToInt()
if loginResult != 0 {
return errors.New(utils.Json.Get(res.Body(), "msg").ToString())
}
return nil
}

View File

@ -1,26 +0,0 @@
package _189
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
Cookie string `json:"cookie" help:"Fill in the cookie if need captcha"`
driver.RootID
}
var config = driver.Config{
Name: "189Cloud",
LocalSort: true,
DefaultRoot: "-11",
Alert: `info|You can try to use 189PC driver if this driver does not work.`,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Cloud189{}
})
}

View File

@ -1,68 +0,0 @@
package _189
type LoginResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
ToUrl string `json:"toUrl"`
}
type Error struct {
ErrorCode string `json:"errorCode"`
ErrorMsg string `json:"errorMsg"`
}
type File struct {
Id int64 `json:"id"`
LastOpTime string `json:"lastOpTime"`
Name string `json:"name"`
Size int64 `json:"size"`
Icon struct {
SmallUrl string `json:"smallUrl"`
//LargeUrl string `json:"largeUrl"`
} `json:"icon"`
Url string `json:"url"`
}
type Folder struct {
Id int64 `json:"id"`
LastOpTime string `json:"lastOpTime"`
Name string `json:"name"`
}
type Files struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileListAO struct {
Count int `json:"count"`
FileList []File `json:"fileList"`
FolderList []Folder `json:"folderList"`
} `json:"fileListAO"`
}
type UploadUrlsResp struct {
Code string `json:"code"`
UploadUrls map[string]Part `json:"uploadUrls"`
}
type Part struct {
RequestURL string `json:"requestURL"`
RequestHeader string `json:"requestHeader"`
}
type Rsa struct {
Expire int64 `json:"expire"`
PkId string `json:"pkId"`
PubKey string `json:"pubKey"`
}
type Down struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileDownloadUrl string `json:"fileDownloadUrl"`
}
type DownResp struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
FileDownloadUrl string `json:"downloadUrl"`
}

View File

@ -1,398 +0,0 @@
package _189
import (
"bytes"
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"math"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
myrand "github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
//func (d *Cloud189) login() error {
// url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
// b := ""
// lt := ""
// ltText := regexp.MustCompile(`lt = "(.+?)"`)
// var res *resty.Response
// var err error
// for i := 0; i < 3; i++ {
// res, err = d.client.R().Get(url)
// if err != nil {
// return err
// }
// // 已经登陆
// if res.RawResponse.Request.URL.String() == "https://cloud.189.cn/web/main" {
// return nil
// }
// b = res.String()
// ltTextArr := ltText.FindStringSubmatch(b)
// if len(ltTextArr) > 0 {
// lt = ltTextArr[1]
// break
// } else {
// <-time.After(time.Second)
// }
// }
// if lt == "" {
// return fmt.Errorf("get page: %s \nstatus: %d \nrequest url: %s\nredirect url: %s",
// b, res.StatusCode(), res.RawResponse.Request.URL.String(), res.Header().Get("location"))
// }
// captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
// returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
// paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
// //reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
// jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
// vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
// vCodeRS := ""
// if vCodeID != "" {
// // need ValidateCode
// log.Debugf("try to identify verification codes")
// timeStamp := strconv.FormatInt(time.Now().UnixNano()/1e6, 10)
// u := "https://open.e.189.cn/api/logbox/oauth2/picCaptcha.do?token=" + vCodeID + timeStamp
// imgRes, err := d.client.R().SetHeaders(map[string]string{
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0",
// "Referer": "https://open.e.189.cn/api/logbox/oauth2/unifyAccountLogin.do",
// "Sec-Fetch-Dest": "image",
// "Sec-Fetch-Mode": "no-cors",
// "Sec-Fetch-Site": "same-origin",
// }).Get(u)
// if err != nil {
// return err
// }
// // Enter the verification code manually
// //err = message.GetMessenger().WaitSend(message.Message{
// // Type: "image",
// // Content: "data:image/png;base64," + base64.StdEncoding.EncodeToString(imgRes.Body()),
// //}, 10)
// //if err != nil {
// // return err
// //}
// //vCodeRS, err = message.GetMessenger().WaitReceive(30)
// // use ocr api
// vRes, err := base.RestyClient.R().SetMultipartField(
// "image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
// Post(setting.GetStr(conf.OcrApi))
// if err != nil {
// return err
// }
// if jsoniter.Get(vRes.Body(), "status").ToInt() != 200 {
// return errors.New("ocr error:" + jsoniter.Get(vRes.Body(), "msg").ToString())
// }
// vCodeRS = jsoniter.Get(vRes.Body(), "result").ToString()
// log.Debugln("code: ", vCodeRS)
// }
// userRsa := RsaEncode([]byte(d.Username), jRsakey, true)
// passwordRsa := RsaEncode([]byte(d.Password), jRsakey, true)
// url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
// var loginResp LoginResp
// res, err = d.client.R().
// SetHeaders(map[string]string{
// "lt": lt,
// "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
// "Referer": "https://open.e.189.cn/",
// "accept": "application/json;charset=UTF-8",
// }).SetFormData(map[string]string{
// "appKey": "cloud",
// "accountType": "01",
// "userName": "{RSA}" + userRsa,
// "password": "{RSA}" + passwordRsa,
// "validateCode": vCodeRS,
// "captchaToken": captchaToken,
// "returnUrl": returnUrl,
// "mailSuffix": "@pan.cn",
// "paramId": paramId,
// "clientType": "10010",
// "dynamicCheck": "FALSE",
// "cb_SaveName": "1",
// "isOauth2": "false",
// }).Post(url)
// if err != nil {
// return err
// }
// err = utils.Json.Unmarshal(res.Body(), &loginResp)
// if err != nil {
// log.Error(err.Error())
// return err
// }
// if loginResp.Result != 0 {
// return fmt.Errorf(loginResp.Msg)
// }
// _, err = d.client.R().Get(loginResp.ToUrl)
// return err
//}
func (d *Cloud189) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
var e Error
req := d.client.R().SetError(&e).
SetHeader("Accept", "application/json;charset=UTF-8").
SetQueryParams(map[string]string{
"noCache": random(),
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
//log.Debug(res.String())
if e.ErrorCode != "" {
if e.ErrorCode == "InvalidSessionKey" {
err = d.newLogin()
if err != nil {
return nil, err
}
return d.request(url, method, callback, resp)
}
}
if jsoniter.Get(res.Body(), "res_code").ToInt() != 0 {
err = errors.New(jsoniter.Get(res.Body(), "res_message").ToString())
}
return res.Body(), err
}
func (d *Cloud189) getFiles(fileId string) ([]model.Obj, error) {
res := make([]model.Obj, 0)
pageNum := 1
for {
var resp Files
_, err := d.request("https://cloud.189.cn/api/open/file/listFiles.action", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
//"noCache": random(),
"pageSize": "60",
"pageNum": strconv.Itoa(pageNum),
"mediaType": "0",
"folderId": fileId,
"iconOption": "5",
"orderBy": "lastOpTime", //account.OrderBy
"descending": "true", //account.OrderDirection
})
}, &resp)
if err != nil {
return nil, err
}
if resp.FileListAO.Count == 0 {
break
}
for _, folder := range resp.FileListAO.FolderList {
lastOpTime := utils.MustParseCNTime(folder.LastOpTime)
res = append(res, &model.Object{
ID: strconv.FormatInt(folder.Id, 10),
Name: folder.Name,
Modified: lastOpTime,
IsFolder: true,
})
}
for _, file := range resp.FileListAO.FileList {
lastOpTime := utils.MustParseCNTime(file.LastOpTime)
res = append(res, &model.ObjThumb{
Object: model.Object{
ID: strconv.FormatInt(file.Id, 10),
Name: file.Name,
Modified: lastOpTime,
Size: file.Size,
},
Thumbnail: model.Thumbnail{Thumbnail: file.Icon.SmallUrl},
})
}
pageNum++
}
return res, nil
}
func (d *Cloud189) oldUpload(dstDir model.Obj, file model.FileStreamer) error {
res, err := d.client.R().SetMultipartFormData(map[string]string{
"parentId": dstDir.GetID(),
"sessionKey": "??",
"opertype": "1",
"fname": file.GetName(),
}).SetMultipartField("Filedata", file.GetName(), file.GetMimetype(), file).Post("https://hb02.upload.cloud.189.cn/v1/DCIWebUploadAction")
if err != nil {
return err
}
if utils.Json.Get(res.Body(), "MD5").ToString() != "" {
return nil
}
log.Debugf(res.String())
return errors.New(res.String())
}
func (d *Cloud189) getSessionKey() (string, error) {
resp, err := d.request("https://cloud.189.cn/v2/getUserBriefInfo.action", http.MethodGet, nil, nil)
if err != nil {
return "", err
}
sessionKey := utils.Json.Get(resp, "sessionKey").ToString()
return sessionKey, nil
}
func (d *Cloud189) getResKey() (string, string, error) {
now := time.Now().UnixMilli()
if d.rsa.Expire > now {
return d.rsa.PubKey, d.rsa.PkId, nil
}
resp, err := d.request("https://cloud.189.cn/api/security/generateRsaKey.action", http.MethodGet, nil, nil)
if err != nil {
return "", "", err
}
pubKey, pkId := utils.Json.Get(resp, "pubKey").ToString(), utils.Json.Get(resp, "pkId").ToString()
d.rsa.PubKey, d.rsa.PkId = pubKey, pkId
d.rsa.Expire = utils.Json.Get(resp, "expire").ToInt64()
return pubKey, pkId, nil
}
func (d *Cloud189) uploadRequest(uri string, form map[string]string, resp interface{}) ([]byte, error) {
c := strconv.FormatInt(time.Now().UnixMilli(), 10)
r := Random("xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx")
l := Random("xxxxxxxxxxxx4xxxyxxxxxxxxxxxxxxx")
l = l[0 : 16+int(16*myrand.Rand.Float32())]
e := qs(form)
data := AesEncrypt([]byte(e), []byte(l[0:16]))
h := hex.EncodeToString(data)
sessionKey := d.sessionKey
signature := hmacSha1(fmt.Sprintf("SessionKey=%s&Operate=GET&RequestURI=%s&Date=%s&params=%s", sessionKey, uri, c, h), l)
pubKey, pkId, err := d.getResKey()
if err != nil {
return nil, err
}
b := RsaEncode([]byte(l), pubKey, false)
req := d.client.R().SetHeaders(map[string]string{
"accept": "application/json;charset=UTF-8",
"SessionKey": sessionKey,
"Signature": signature,
"X-Request-Date": c,
"X-Request-ID": r,
"EncryptionText": b,
"PkId": pkId,
})
if resp != nil {
req.SetResult(resp)
}
res, err := req.Get("https://upload.cloud.189.cn" + uri + "?params=" + h)
if err != nil {
return nil, err
}
data = res.Body()
if utils.Json.Get(data, "code").ToString() != "SUCCESS" {
return nil, errors.New(uri + "---" + jsoniter.Get(data, "msg").ToString())
}
return data, nil
}
func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) error {
sessionKey, err := d.getSessionKey()
if err != nil {
return err
}
d.sessionKey = sessionKey
const DEFAULT int64 = 10485760
var count = int64(math.Ceil(float64(file.GetSize()) / float64(DEFAULT)))
res, err := d.uploadRequest("/person/initMultiUpload", map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": encode(file.GetName()),
"fileSize": strconv.FormatInt(file.GetSize(), 10),
"sliceSize": strconv.FormatInt(DEFAULT, 10),
"lazyCheck": "1",
}, nil)
if err != nil {
return err
}
uploadFileId := jsoniter.Get(res, "data", "uploadFileId").ToString()
//_, err = d.uploadRequest("/person/getUploadedPartsInfo", map[string]string{
// "uploadFileId": uploadFileId,
//}, nil)
var finish int64 = 0
var i int64
var byteSize int64
md5s := make([]string, 0)
md5Sum := md5.New()
for i = 1; i <= count; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
byteSize = file.GetSize() - finish
if DEFAULT < byteSize {
byteSize = DEFAULT
}
//log.Debugf("%d,%d", byteSize, finish)
byteData := make([]byte, byteSize)
n, err := io.ReadFull(file, byteData)
//log.Debug(err, n)
if err != nil {
return err
}
finish += int64(n)
md5Bytes := getMd5(byteData)
md5Hex := hex.EncodeToString(md5Bytes)
md5Base64 := base64.StdEncoding.EncodeToString(md5Bytes)
md5s = append(md5s, strings.ToUpper(md5Hex))
md5Sum.Write(byteData)
var resp UploadUrlsResp
res, err = d.uploadRequest("/person/getMultiUploadUrls", map[string]string{
"partInfo": fmt.Sprintf("%s-%s", strconv.FormatInt(i, 10), md5Base64),
"uploadFileId": uploadFileId,
}, &resp)
if err != nil {
return err
}
uploadData := resp.UploadUrls["partNumber_"+strconv.FormatInt(i, 10)]
log.Debugf("uploadData: %+v", uploadData)
requestURL := uploadData.RequestURL
uploadHeaders := strings.Split(decodeURIComponent(uploadData.RequestHeader), "&")
req, err := http.NewRequest(http.MethodPut, requestURL, bytes.NewReader(byteData))
if err != nil {
return err
}
req = req.WithContext(ctx)
for _, v := range uploadHeaders {
i := strings.Index(v, "=")
req.Header.Set(v[0:i], v[i+1:])
}
r, err := base.HttpClient.Do(req)
log.Debugf("%+v %+v", r, r.Request.Header)
r.Body.Close()
if err != nil {
return err
}
up(int(i * 100 / count))
}
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
sliceMd5 := fileMd5
if file.GetSize() > DEFAULT {
sliceMd5 = utils.GetMD5EncodeStr(strings.Join(md5s, "\n"))
}
res, err = d.uploadRequest("/person/commitMultiUploadFile", map[string]string{
"uploadFileId": uploadFileId,
"fileMd5": fileMd5,
"sliceMd5": sliceMd5,
"lazyCheck": "1",
"opertype": "3",
}, nil)
return err
}

View File

@ -1,318 +0,0 @@
package _189pc
import (
"context"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type Cloud189PC struct {
model.Storage
Addition
identity string
client *resty.Client
loginParam *LoginParam
tokenInfo *AppSessionResp
uploadThread int
}
func (y *Cloud189PC) Config() driver.Config {
return config
}
func (y *Cloud189PC) GetAddition() driver.Additional {
return &y.Addition
}
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
// 处理个人云和家庭云参数
if y.isFamily() && y.RootFolderID == "-11" {
y.RootFolderID = ""
}
if !y.isFamily() && y.RootFolderID == "" {
y.RootFolderID = "-11"
y.FamilyID = ""
}
// 限制上传线程数
y.uploadThread, _ = strconv.Atoi(y.UploadThread)
if y.uploadThread < 1 || y.uploadThread > 32 {
y.uploadThread, y.UploadThread = 3, "3"
}
// 初始化请求客户端
if y.client == nil {
y.client = base.NewRestyClient().SetHeaders(map[string]string{
"Accept": "application/json;charset=UTF-8",
"Referer": WEB_URL,
})
}
// 避免重复登陆
identity := utils.GetMD5EncodeStr(y.Username + y.Password)
if !y.isLogin() || y.identity != identity {
y.identity = identity
if err = y.login(); err != nil {
return
}
}
// 处理家庭云ID
if y.isFamily() && y.FamilyID == "" {
if y.FamilyID, err = y.getFamilyID(); err != nil {
return err
}
}
return
}
func (y *Cloud189PC) Drop(ctx context.Context) error {
return nil
}
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return y.getFiles(ctx, dir.GetID())
}
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var downloadUrl struct {
URL string `json:"fileDownloadUrl"`
}
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/getFileDownloadUrl.action"
_, err := y.get(fullUrl, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParam("fileId", file.GetID())
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
})
} else {
r.SetQueryParams(map[string]string{
"dt": "3",
"flag": "1",
})
}
}, &downloadUrl)
if err != nil {
return nil, err
}
// 重定向获取真实链接
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
downloadUrl.URL = res.Header().Get("location")
}
like := &model.Link{
URL: downloadUrl.URL,
Header: http.Header{
"User-Agent": []string{base.UserAgent},
},
}
/*
// 获取链接有效时常
strs := regexp.MustCompile(`(?i)expire[^=]*=([0-9]*)`).FindStringSubmatch(downloadUrl.URL)
if len(strs) == 2 {
timestamp, err := strconv.ParseInt(strs[1], 10, 64)
if err == nil {
expired := time.Duration(timestamp-time.Now().Unix()) * time.Second
like.Expiration = &expired
}
}
*/
return like, nil
}
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/createFolder.action"
var newFolder Cloud189Folder
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
req.SetQueryParams(map[string]string{
"folderName": dirName,
"relativePath": "",
})
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentDir.GetID(),
})
} else {
req.SetQueryParams(map[string]string{
"parentFolderId": parentDir.GetID(),
})
}
}, &newFolder)
if err != nil {
return nil, err
}
return &newFolder, nil
}
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "MOVE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return nil, err
}
if err = y.WaitBatchTask("MOVE", resp.TaskID, time.Millisecond*400); err != nil {
return nil, err
}
return srcObj, nil
}
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
queryParam := make(map[string]string)
fullUrl := API_URL
method := http.MethodPost
if y.isFamily() {
fullUrl += "/family/file"
method = http.MethodGet
queryParam["familyId"] = y.FamilyID
}
var newObj model.Obj
switch f := srcObj.(type) {
case *Cloud189File:
fullUrl += "/renameFile.action"
queryParam["fileId"] = srcObj.GetID()
queryParam["destFileName"] = newName
newObj = &Cloud189File{Icon: f.Icon} // 复用预览
case *Cloud189Folder:
fullUrl += "/renameFolder.action"
queryParam["folderId"] = srcObj.GetID()
queryParam["destFolderName"] = newName
newObj = &Cloud189Folder{}
default:
return nil, errs.NotSupport
}
_, err := y.request(fullUrl, method, func(req *resty.Request) {
req.SetContext(ctx).SetQueryParams(queryParam)
}, nil, newObj)
if err != nil {
return nil, err
}
return newObj, nil
}
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "COPY",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
"targetFileName": dstDir.GetName(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return err
}
return y.WaitBatchTask("COPY", resp.TaskID, time.Second)
}
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "DELETE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]*BatchTaskInfo{
{
FileId: obj.GetID(),
FileName: obj.GetName(),
IsFolder: BoolToNumber(obj.IsDir()),
},
})),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return err
}
// 批量任务数量限制,过快会导致无法删除
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)
case "rapid":
return y.FastUpload(ctx, dstDir, stream, up)
case "stream":
if stream.GetSize() == 0 {
return y.FastUpload(ctx, dstDir, stream, up)
}
fallthrough
default:
return y.StreamUpload(ctx, dstDir, stream, up)
}
}

View File

@ -1,194 +0,0 @@
package _189pc
import (
"bytes"
"crypto/aes"
"crypto/hmac"
"crypto/rand"
"crypto/rsa"
"crypto/sha1"
"crypto/x509"
"encoding/hex"
"encoding/pem"
"encoding/xml"
"fmt"
"math"
"net/http"
"regexp"
"strings"
"time"
"github.com/alist-org/alist/v3/pkg/utils/random"
)
func clientSuffix() map[string]string {
rand := random.Rand
return map[string]string{
"clientType": PC,
"version": VERSION,
"channelId": CHANNEL_ID,
"rand": fmt.Sprintf("%d_%d", rand.Int63n(1e5), rand.Int63n(1e10)),
}
}
// 带params的SignatureOfHmac HMAC签名
func signatureOfHmac(sessionSecret, sessionKey, operate, fullUrl, dateOfGmt, param string) string {
urlpath := regexp.MustCompile(`://[^/]+((/[^/\s?#]+)*)`).FindStringSubmatch(fullUrl)[1]
mac := hmac.New(sha1.New, []byte(sessionSecret))
data := fmt.Sprintf("SessionKey=%s&Operate=%s&RequestURI=%s&Date=%s", sessionKey, operate, urlpath, dateOfGmt)
if param != "" {
data += fmt.Sprintf("&params=%s", param)
}
mac.Write([]byte(data))
return strings.ToUpper(hex.EncodeToString(mac.Sum(nil)))
}
// RAS 加密用户名密码
func RsaEncrypt(publicKey, origData string) string {
block, _ := pem.Decode([]byte(publicKey))
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
data, _ := rsa.EncryptPKCS1v15(rand.Reader, pubInterface.(*rsa.PublicKey), []byte(origData))
return strings.ToUpper(hex.EncodeToString(data))
}
// aes 加密params
func AesECBEncrypt(data, key string) string {
block, _ := aes.NewCipher([]byte(key))
paddingData := PKCS7Padding([]byte(data), block.BlockSize())
decrypted := make([]byte, len(paddingData))
size := block.BlockSize()
for src, dst := paddingData, decrypted; len(src) > 0; src, dst = src[size:], dst[size:] {
block.Encrypt(dst[:size], src[:size])
}
return strings.ToUpper(hex.EncodeToString(decrypted))
}
func PKCS7Padding(ciphertext []byte, blockSize int) []byte {
padding := blockSize - len(ciphertext)%blockSize
padtext := bytes.Repeat([]byte{byte(padding)}, padding)
return append(ciphertext, padtext...)
}
// 获取http规范的时间
func getHttpDateStr() string {
return time.Now().UTC().Format(http.TimeFormat)
}
// 时间戳
func timestamp() int64 {
return time.Now().UTC().UnixNano() / 1e6
}
func MustParseTime(str string) *time.Time {
lastOpTime, _ := time.ParseInLocation("2006-01-02 15:04:05 -07", str+" +08", time.Local)
return &lastOpTime
}
type Time time.Time
func (t *Time) UnmarshalJSON(b []byte) error { return t.Unmarshal(b) }
func (t *Time) UnmarshalXML(e *xml.Decoder, ee xml.StartElement) error {
b, err := e.Token()
if err != nil {
return err
}
if b, ok := b.(xml.CharData); ok {
if err = t.Unmarshal(b); err != nil {
return err
}
}
return e.Skip()
}
func (t *Time) Unmarshal(b []byte) error {
bs := strings.Trim(string(b), "\"")
var v time.Time
var err error
for _, f := range []string{"2006-01-02 15:04:05 -07", "Jan 2, 2006 15:04:05 PM -07"} {
v, err = time.ParseInLocation(f, bs+" +08", time.Local)
if err == nil {
break
}
}
*t = Time(v)
return err
}
type String string
func (t *String) UnmarshalJSON(b []byte) error { return t.Unmarshal(b) }
func (t *String) UnmarshalXML(e *xml.Decoder, ee xml.StartElement) error {
b, err := e.Token()
if err != nil {
return err
}
if b, ok := b.(xml.CharData); ok {
if err = t.Unmarshal(b); err != nil {
return err
}
}
return e.Skip()
}
func (s *String) Unmarshal(b []byte) error {
*s = String(bytes.Trim(b, "\""))
return nil
}
func toFamilyOrderBy(o string) string {
switch o {
case "filename":
return "1"
case "filesize":
return "2"
case "lastOpTime":
return "3"
default:
return "1"
}
}
func toDesc(o string) string {
switch o {
case "desc":
return "true"
case "asc":
fallthrough
default:
return "false"
}
}
func ParseHttpHeader(str string) map[string]string {
header := make(map[string]string)
for _, value := range strings.Split(str, "&") {
if k, v, found := strings.Cut(value, "="); found {
header[k] = v
}
}
return header
}
func MustString(str string, err error) string {
return str
}
func BoolToNumber(b bool) int {
if b {
return 1
}
return 0
}
// 计算分片大小
// 对分片数量有限制
// 10MIB 20 MIB 999片
// 50MIB 60MIB 70MIB 80MIB ∞MIB 1999片
func partSize(size int64) int64 {
const DEFAULT = 1024 * 1024 * 10 // 10MIB
if size > DEFAULT*2*999 {
return int64(math.Max(math.Ceil((float64(size)/1999) /*=单个切片大小*/ /float64(DEFAULT)) /*=倍率*/, 5) * DEFAULT)
}
if size > DEFAULT*999 {
return DEFAULT * 2 // 20MIB
}
return DEFAULT
}

View File

@ -1,32 +0,0 @@
package _189pc
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
VCode string `json:"validate_code"`
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"filename,filesize,lastOpTime" default:"filename"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
NoUseOcr bool `json:"no_use_ocr"`
}
var config = driver.Config{
Name: "189CloudPC",
DefaultRoot: "-11",
CheckStatus: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Cloud189PC{}
})
}

View File

@ -1,384 +0,0 @@
package _189pc
import (
"encoding/xml"
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"sort"
"strings"
"time"
)
// 居然有四种返回方式
type RespErr struct {
ResCode any `json:"res_code"` // int or string
ResMessage string `json:"res_message"`
Error_ string `json:"error"`
XMLName xml.Name `xml:"error"`
Code string `json:"code" xml:"code"`
Message string `json:"message" xml:"message"`
Msg string `json:"msg"`
ErrorCode string `json:"errorCode"`
ErrorMsg string `json:"errorMsg"`
}
func (e *RespErr) HasError() bool {
switch v := e.ResCode.(type) {
case int, int64, int32:
return v != 0
case string:
return e.ResCode != ""
}
return (e.Code != "" && e.Code != "SUCCESS") || e.ErrorCode != "" || e.Error_ != ""
}
func (e *RespErr) Error() string {
switch v := e.ResCode.(type) {
case int, int64, int32:
if v != 0 {
return fmt.Sprintf("res_code: %d ,res_msg: %s", v, e.ResMessage)
}
case string:
if e.ResCode != "" {
return fmt.Sprintf("res_code: %s ,res_msg: %s", e.ResCode, e.ResMessage)
}
}
if e.Code != "" && e.Code != "SUCCESS" {
if e.Msg != "" {
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Msg)
}
if e.Message != "" {
return fmt.Sprintf("code: %s ,msg: %s", e.Code, e.Message)
}
return "code: " + e.Code
}
if e.ErrorCode != "" {
return fmt.Sprintf("err_code: %s ,err_msg: %s", e.ErrorCode, e.ErrorMsg)
}
if e.Error_ != "" {
return fmt.Sprintf("error: %s ,message: %s", e.ErrorCode, e.Message)
}
return ""
}
// 登陆需要的参数
type LoginParam struct {
// 加密后的用户名和密码
RsaUsername string
RsaPassword string
// rsa密钥
jRsaKey string
// 请求头参数
Lt string
ReqId string
// 表单参数
ParamId string
// 验证码
CaptchaToken string
}
// 登陆加密相关
type EncryptConfResp struct {
Result int `json:"result"`
Data struct {
UpSmsOn string `json:"upSmsOn"`
Pre string `json:"pre"`
PreDomain string `json:"preDomain"`
PubKey string `json:"pubKey"`
} `json:"data"`
}
type LoginResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
ToUrl string `json:"toUrl"`
}
// 刷新session返回
type UserSessionResp struct {
ResCode int `json:"res_code"`
ResMessage string `json:"res_message"`
LoginName string `json:"loginName"`
KeepAlive int `json:"keepAlive"`
GetFileDiffSpan int `json:"getFileDiffSpan"`
GetUserInfoSpan int `json:"getUserInfoSpan"`
// 个人云
SessionKey string `json:"sessionKey"`
SessionSecret string `json:"sessionSecret"`
// 家庭云
FamilySessionKey string `json:"familySessionKey"`
FamilySessionSecret string `json:"familySessionSecret"`
}
// 登录返回
type AppSessionResp struct {
UserSessionResp
IsSaveName string `json:"isSaveName"`
// 会话刷新Token
AccessToken string `json:"accessToken"`
//Token刷新
RefreshToken string `json:"refreshToken"`
}
// 家庭云账户
type FamilyInfoListResp struct {
FamilyInfoResp []FamilyInfoResp `json:"familyInfoResp"`
}
type FamilyInfoResp struct {
Count int `json:"count"`
CreateTime string `json:"createTime"`
FamilyID int `json:"familyId"`
RemarkName string `json:"remarkName"`
Type int `json:"type"`
UseFlag int `json:"useFlag"`
UserRole int `json:"userRole"`
}
/*文件部分*/
// 文件
type Cloud189File struct {
ID String `json:"id"`
Name string `json:"name"`
Size int64 `json:"size"`
Md5 string `json:"md5"`
LastOpTime Time `json:"lastOpTime"`
CreateDate Time `json:"createDate"`
Icon struct {
//iconOption 5
SmallUrl string `json:"smallUrl"`
LargeUrl string `json:"largeUrl"`
// iconOption 10
Max600 string `json:"max600"`
MediumURL string `json:"mediumUrl"`
} `json:"icon"`
// Orientation int64 `json:"orientation"`
// FileCata int64 `json:"fileCata"`
// MediaType int `json:"mediaType"`
// Rev string `json:"rev"`
// StarLabel int64 `json:"starLabel"`
}
func (c *Cloud189File) CreateTime() time.Time {
return time.Time(c.CreateDate)
}
func (c *Cloud189File) GetHash() utils.HashInfo {
return utils.NewHashInfo(utils.MD5, c.Md5)
}
func (c *Cloud189File) GetSize() int64 { return c.Size }
func (c *Cloud189File) GetName() string { return c.Name }
func (c *Cloud189File) ModTime() time.Time { return time.Time(c.LastOpTime) }
func (c *Cloud189File) IsDir() bool { return false }
func (c *Cloud189File) GetID() string { return string(c.ID) }
func (c *Cloud189File) GetPath() string { return "" }
func (c *Cloud189File) Thumb() string { return c.Icon.SmallUrl }
// 文件夹
type Cloud189Folder struct {
ID String `json:"id"`
ParentID int64 `json:"parentId"`
Name string `json:"name"`
LastOpTime Time `json:"lastOpTime"`
CreateDate Time `json:"createDate"`
// FileListSize int64 `json:"fileListSize"`
// FileCount int64 `json:"fileCount"`
// FileCata int64 `json:"fileCata"`
// Rev string `json:"rev"`
// StarLabel int64 `json:"starLabel"`
}
func (c *Cloud189Folder) CreateTime() time.Time {
return time.Time(c.CreateDate)
}
func (c *Cloud189Folder) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (c *Cloud189Folder) GetSize() int64 { return 0 }
func (c *Cloud189Folder) GetName() string { return c.Name }
func (c *Cloud189Folder) ModTime() time.Time { return time.Time(c.LastOpTime) }
func (c *Cloud189Folder) IsDir() bool { return true }
func (c *Cloud189Folder) GetID() string { return string(c.ID) }
func (c *Cloud189Folder) GetPath() string { return "" }
type Cloud189FilesResp struct {
//ResCode int `json:"res_code"`
//ResMessage string `json:"res_message"`
FileListAO struct {
Count int `json:"count"`
FileList []Cloud189File `json:"fileList"`
FolderList []Cloud189Folder `json:"folderList"`
} `json:"fileListAO"`
}
// TaskInfo 任务信息
type BatchTaskInfo struct {
// FileId 文件ID
FileId string `json:"fileId"`
// FileName 文件名
FileName string `json:"fileName"`
// IsFolder 是否是文件夹0-否1-是
IsFolder int `json:"isFolder"`
// SrcParentId 文件所在父目录ID
//SrcParentId string `json:"srcParentId"`
}
/* 上传部分 */
type InitMultiUploadResp struct {
//Code string `json:"code"`
Data struct {
UploadType int `json:"uploadType"`
UploadHost string `json:"uploadHost"`
UploadFileID string `json:"uploadFileId"`
FileDataExists int `json:"fileDataExists"`
} `json:"data"`
}
type UploadUrlsResp struct {
Code string `json:"code"`
Data map[string]UploadUrlsData `json:"uploadUrls"`
}
type UploadUrlsData struct {
RequestURL string `json:"requestURL"`
RequestHeader string `json:"requestHeader"`
}
type UploadUrlInfo struct {
PartNumber int
Headers map[string]string
UploadUrlsData
}
type UploadProgress struct {
UploadInfo InitMultiUploadResp
UploadParts []string
}
/* 第二种上传方式 */
type CreateUploadFileResp struct {
// 上传文件请求ID
UploadFileId int64 `json:"uploadFileId"`
// 上传文件数据的URL路径
FileUploadUrl string `json:"fileUploadUrl"`
// 上传文件完成后确认路径
FileCommitUrl string `json:"fileCommitUrl"`
// 文件是否已存在云盘中0-未存在1-已存在
FileDataExists int `json:"fileDataExists"`
}
type GetUploadFileStatusResp struct {
CreateUploadFileResp
// 已上传的大小
DataSize int64 `json:"dataSize"`
Size int64 `json:"size"`
}
func (r *GetUploadFileStatusResp) GetSize() int64 {
return r.DataSize + r.Size
}
type CommitMultiUploadFileResp struct {
File struct {
UserFileID String `json:"userFileId"`
FileName string `json:"fileName"`
FileSize int64 `json:"fileSize"`
FileMd5 string `json:"fileMd5"`
CreateDate Time `json:"createDate"`
} `json:"file"`
}
func (f *CommitMultiUploadFileResp) toFile() *Cloud189File {
return &Cloud189File{
ID: f.File.UserFileID,
Name: f.File.FileName,
Size: f.File.FileSize,
Md5: f.File.FileMd5,
LastOpTime: f.File.CreateDate,
CreateDate: f.File.CreateDate,
}
}
type OldCommitUploadFileResp struct {
XMLName xml.Name `xml:"file"`
ID String `xml:"id"`
Name string `xml:"name"`
Size int64 `xml:"size"`
Md5 string `xml:"md5"`
CreateDate Time `xml:"createDate"`
}
func (f *OldCommitUploadFileResp) toFile() *Cloud189File {
return &Cloud189File{
ID: f.ID,
Name: f.Name,
Size: f.Size,
Md5: f.Md5,
CreateDate: f.CreateDate,
LastOpTime: f.CreateDate,
}
}
type CreateBatchTaskResp struct {
TaskID string `json:"taskId"`
}
type BatchTaskStateResp struct {
FailedCount int `json:"failedCount"`
Process int `json:"process"`
SkipCount int `json:"skipCount"`
SubTaskCount int `json:"subTaskCount"`
SuccessedCount int `json:"successedCount"`
SuccessedFileIDList []int64 `json:"successedFileIdList"`
TaskID string `json:"taskId"`
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中4 完成
}
/* query 加密参数*/
type Params map[string]string
func (p Params) Set(k, v string) {
p[k] = v
}
func (p Params) Encode() string {
if p == nil {
return ""
}
var buf strings.Builder
keys := make([]string, 0, len(p))
for k := range p {
keys = append(keys, k)
}
sort.Strings(keys)
for i := range keys {
if buf.Len() > 0 {
buf.WriteByte('&')
}
buf.WriteString(keys[i])
buf.WriteByte('=')
buf.WriteString(p[keys[i]])
}
return buf.String()
}

View File

@ -1,941 +0,0 @@
package _189pc
import (
"bytes"
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"encoding/xml"
"fmt"
"io"
"math"
"net/http"
"net/http/cookiejar"
"net/url"
"regexp"
"sort"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/setting"
"github.com/alist-org/alist/v3/pkg/errgroup"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/avast/retry-go"
"github.com/go-resty/resty/v2"
"github.com/google/uuid"
jsoniter "github.com/json-iterator/go"
"github.com/pkg/errors"
)
const (
ACCOUNT_TYPE = "02"
APP_ID = "8025431004"
CLIENT_TYPE = "10020"
VERSION = "6.2"
WEB_URL = "https://cloud.189.cn"
AUTH_URL = "https://open.e.189.cn"
API_URL = "https://api.cloud.189.cn"
UPLOAD_URL = "https://upload.cloud.189.cn"
RETURN_URL = "https://m.cloud.189.cn/zhuanti/2020/loginErrorPc/index.html"
PC = "TELEPC"
MAC = "TELEMAC"
CHANNEL_ID = "web_cloud.189.cn"
)
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
dateOfGmt := getHttpDateStr()
sessionKey := y.tokenInfo.SessionKey
sessionSecret := y.tokenInfo.SessionSecret
if y.isFamily() {
sessionKey = y.tokenInfo.FamilySessionKey
sessionSecret = y.tokenInfo.FamilySessionSecret
}
header := map[string]string{
"Date": dateOfGmt,
"SessionKey": sessionKey,
"X-Request-ID": uuid.NewString(),
"Signature": signatureOfHmac(sessionSecret, sessionKey, method, url, dateOfGmt, params),
}
return header
}
func (y *Cloud189PC) EncryptParams(params Params) string {
sessionSecret := y.tokenInfo.SessionSecret
if y.isFamily() {
sessionSecret = y.tokenInfo.FamilySessionSecret
}
if params != nil {
return AesECBEncrypt(params.Encode(), sessionSecret[:16])
}
return ""
}
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
req := y.client.R().SetQueryParams(clientSuffix())
// 设置params
paramsData := y.EncryptParams(params)
if paramsData != "" {
req.SetQueryParam("params", paramsData)
}
// Signature
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
var erron RespErr
req.SetError(&erron)
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
if strings.Contains(res.String(), "userSessionBO is null") {
if err = y.refreshSession(); err != nil {
return nil, err
}
return y.request(url, method, callback, params, resp)
}
// 处理错误
if erron.HasError() {
if erron.ErrorCode == "InvalidSessionKey" {
if err = y.refreshSession(); err != nil {
return nil, err
}
return y.request(url, method, callback, params, resp)
}
return nil, &erron
}
return res.Body(), nil
}
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodGet, callback, nil, resp)
}
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodPost, callback, nil, resp)
}
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
if err != nil {
return nil, err
}
query := req.URL.Query()
for key, value := range clientSuffix() {
query.Add(key, value)
}
req.URL.RawQuery = query.Encode()
for key, value := range headers {
req.Header.Add(key, value)
}
if sign {
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
req.Header.Add(key, value)
}
}
resp, err := base.HttpClient.Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
var erron RespErr
jsoniter.Unmarshal(body, &erron)
xml.Unmarshal(body, &erron)
if erron.HasError() {
return nil, &erron
}
if resp.StatusCode != http.StatusOK {
return nil, errors.Errorf("put fail,err:%s", string(body))
}
return body, nil
}
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
fullUrl := API_URL
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/listFiles.action"
res := make([]model.Obj, 0, 130)
for pageNum := 1; ; pageNum++ {
var resp Cloud189FilesResp
_, err := y.get(fullUrl, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParams(map[string]string{
"folderId": fileId,
"fileType": "0",
"mediaAttr": "0",
"iconOption": "5",
"pageNum": fmt.Sprint(pageNum),
"pageSize": "130",
})
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"orderBy": toFamilyOrderBy(y.OrderBy),
"descending": toDesc(y.OrderDirection),
})
} else {
r.SetQueryParams(map[string]string{
"recursive": "0",
"orderBy": y.OrderBy,
"descending": toDesc(y.OrderDirection),
})
}
}, &resp)
if err != nil {
return nil, err
}
// 获取完毕跳出
if resp.FileListAO.Count == 0 {
break
}
for i := 0; i < len(resp.FileListAO.FolderList); i++ {
res = append(res, &resp.FileListAO.FolderList[i])
}
for i := 0; i < len(resp.FileListAO.FileList); i++ {
res = append(res, &resp.FileListAO.FileList[i])
}
}
return res, nil
}
func (y *Cloud189PC) login() (err error) {
// 初始化登陆所需参数
if y.loginParam == nil {
if err = y.initLoginParam(); err != nil {
// 验证码也通过错误返回
return err
}
}
defer func() {
// 销毁验证码
y.VCode = ""
// 销毁登陆参数
y.loginParam = nil
// 遇到错误,重新加载登陆参数(刷新验证码)
if err != nil && y.NoUseOcr {
if err1 := y.initLoginParam(); err1 != nil {
err = fmt.Errorf("err1: %s \nerr2: %s", err, err1)
}
}
}()
param := y.loginParam
var loginresp LoginResp
_, err = y.client.R().
ForceContentType("application/json;charset=UTF-8").SetResult(&loginresp).
SetHeaders(map[string]string{
"REQID": param.ReqId,
"lt": param.Lt,
}).
SetFormData(map[string]string{
"appKey": APP_ID,
"accountType": ACCOUNT_TYPE,
"userName": param.RsaUsername,
"password": param.RsaPassword,
"validateCode": y.VCode,
"captchaToken": param.CaptchaToken,
"returnUrl": RETURN_URL,
// "mailSuffix": "@189.cn",
"dynamicCheck": "FALSE",
"clientType": CLIENT_TYPE,
"cb_SaveName": "1",
"isOauth2": "false",
"state": "",
"paramId": param.ParamId,
}).
Post(AUTH_URL + "/api/logbox/oauth2/loginSubmit.do")
if err != nil {
return err
}
if loginresp.ToUrl == "" {
return fmt.Errorf("login failed,No toUrl obtained, msg: %s", loginresp.Msg)
}
// 获取Session
var erron RespErr
var tokenInfo AppSessionResp
_, err = y.client.R().
SetResult(&tokenInfo).SetError(&erron).
SetQueryParams(clientSuffix()).
SetQueryParam("redirectURL", url.QueryEscape(loginresp.ToUrl)).
Post(API_URL + "/getSessionForPC.action")
if err != nil {
return
}
if erron.HasError() {
return &erron
}
if tokenInfo.ResCode != 0 {
err = fmt.Errorf(tokenInfo.ResMessage)
return
}
y.tokenInfo = &tokenInfo
return
}
/* 初始化登陆需要的参数
* 如果遇到验证码返回错误
*/
func (y *Cloud189PC) initLoginParam() error {
// 清除cookie
jar, _ := cookiejar.New(nil)
y.client.SetCookieJar(jar)
res, err := y.client.R().
SetQueryParams(map[string]string{
"appId": APP_ID,
"clientType": CLIENT_TYPE,
"returnURL": RETURN_URL,
"timeStamp": fmt.Sprint(timestamp()),
}).
Get(WEB_URL + "/api/portal/unifyLoginForPC.action")
if err != nil {
return err
}
param := LoginParam{
CaptchaToken: regexp.MustCompile(`'captchaToken' value='(.+?)'`).FindStringSubmatch(res.String())[1],
Lt: regexp.MustCompile(`lt = "(.+?)"`).FindStringSubmatch(res.String())[1],
ParamId: regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(res.String())[1],
ReqId: regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(res.String())[1],
// jRsaKey: regexp.MustCompile(`"j_rsaKey" value="(.+?)"`).FindStringSubmatch(res.String())[1],
}
// 获取rsa公钥
var encryptConf EncryptConfResp
_, err = y.client.R().
ForceContentType("application/json;charset=UTF-8").SetResult(&encryptConf).
SetFormData(map[string]string{"appId": APP_ID}).
Post(AUTH_URL + "/api/logbox/config/encryptConf.do")
if err != nil {
return err
}
param.jRsaKey = fmt.Sprintf("-----BEGIN PUBLIC KEY-----\n%s\n-----END PUBLIC KEY-----", encryptConf.Data.PubKey)
param.RsaUsername = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Username)
param.RsaPassword = encryptConf.Data.Pre + RsaEncrypt(param.jRsaKey, y.Password)
y.loginParam = &param
// 判断是否需要验证码
resp, err := y.client.R().
SetHeader("REQID", param.ReqId).
SetFormData(map[string]string{
"appKey": APP_ID,
"accountType": ACCOUNT_TYPE,
"userName": param.RsaUsername,
}).Post(AUTH_URL + "/api/logbox/oauth2/needcaptcha.do")
if err != nil {
return err
}
if resp.String() == "0" {
return nil
}
// 拉取验证码
imgRes, err := y.client.R().
SetQueryParams(map[string]string{
"token": param.CaptchaToken,
"REQID": param.ReqId,
"rnd": fmt.Sprint(timestamp()),
}).
Get(AUTH_URL + "/api/logbox/oauth2/picCaptcha.do")
if err != nil {
return fmt.Errorf("failed to obtain verification code")
}
if imgRes.Size() > 20 {
if setting.GetStr(conf.OcrApi) != "" && !y.NoUseOcr {
vRes, err := base.RestyClient.R().
SetMultipartField("image", "validateCode.png", "image/png", bytes.NewReader(imgRes.Body())).
Post(setting.GetStr(conf.OcrApi))
if err != nil {
return err
}
if jsoniter.Get(vRes.Body(), "status").ToInt() == 200 {
y.VCode = jsoniter.Get(vRes.Body(), "result").ToString()
return nil
}
}
// 返回验证码图片给前端
return fmt.Errorf(`need img validate code: <img src="data:image/png;base64,%s"/>`, base64.StdEncoding.EncodeToString(imgRes.Body()))
}
return nil
}
// 刷新会话
func (y *Cloud189PC) refreshSession() (err error) {
var erron RespErr
var userSessionResp UserSessionResp
_, err = y.client.R().
SetResult(&userSessionResp).SetError(&erron).
SetQueryParams(clientSuffix()).
SetQueryParams(map[string]string{
"appId": APP_ID,
"accessToken": y.tokenInfo.AccessToken,
}).
SetHeader("X-Request-ID", uuid.NewString()).
Get(API_URL + "/getSessionForPC.action")
if err != nil {
return err
}
// 错误影响正常访问,下线该储存
defer func() {
if err != nil {
y.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
op.MustSaveDriverStorage(y)
}
}()
if erron.HasError() {
if erron.ResCode == "UserInvalidOpenToken" {
if err = y.login(); err != nil {
return err
}
}
return &erron
}
y.tokenInfo.UserSessionResp = userSessionResp
return
}
// 普通上传
// 无法上传大小为0的文件
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
lastPartSize := file.GetSize() % sliceSize
if file.GetSize() > 0 && lastPartSize == 0 {
lastPartSize = sliceSize
}
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileSize": fmt.Sprint(file.GetSize()),
"sliceSize": fmt.Sprint(sliceSize),
"lazyCheck": "1",
}
fullUrl := UPLOAD_URL
if y.isFamily() {
params.Set("familyId", y.FamilyID)
fullUrl += "/family"
} else {
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
fullUrl += "/person"
}
// 初始化上传
var initMultiUpload InitMultiUploadResp
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &initMultiUpload)
if err != nil {
return nil, err
}
threadG, upCtx := errgroup.NewGroupWithContext(ctx, y.uploadThread,
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
fileMd5 := md5.New()
silceMd5 := md5.New()
silceMd5Hexs := make([]string, 0, count)
for i := 1; i <= count; i++ {
if utils.IsCanceled(upCtx) {
break
}
byteData := make([]byte, sliceSize)
if i == count {
byteData = byteData[:lastPartSize]
}
// 读取块
silceMd5.Reset()
if _, err := io.ReadFull(io.TeeReader(file, io.MultiWriter(fileMd5, silceMd5)), byteData); err != io.EOF && err != nil {
return nil, err
}
// 计算块md5并进行hex和base64编码
md5Bytes := silceMd5.Sum(nil)
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Bytes)))
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
threadG.Go(func(ctx context.Context) error {
uploadUrls, err := y.GetMultiUploadUrls(ctx, initMultiUpload.Data.UploadFileID, partInfo)
if err != nil {
return err
}
// step.4 上传切片
uploadUrl := uploadUrls[0]
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData))
if err != nil {
return err
}
up(int(threadG.Success()) * 100 / count)
return nil
})
}
if err = threadG.Wait(); err != nil {
return nil, err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
sliceMd5Hex := fileMd5Hex
if file.GetSize() > sliceSize {
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
}
// 提交上传
var resp CommitMultiUploadFileResp
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": initMultiUpload.Data.UploadFileID,
"fileMd5": fileMd5Hex,
"sliceMd5": sliceMd5Hex,
"lazyCheck": "1",
"isLog": "0",
"opertype": "3",
}, &resp)
if err != nil {
return nil, err
}
return resp.toFile(), nil
}
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
lastSliceSize := file.GetSize() % sliceSize
if file.GetSize() > 0 && lastSliceSize == 0 {
lastSliceSize = sliceSize
}
//step.1 优先计算所需信息
byteSize := sliceSize
fileMd5 := md5.New()
silceMd5 := md5.New()
silceMd5Hexs := make([]string, 0, count)
partInfos := make([]string, 0, count)
for i := 1; i <= count; i++ {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
if i == count {
byteSize = lastSliceSize
}
silceMd5.Reset()
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
return nil, err
}
md5Byte := silceMd5.Sum(nil)
silceMd5Hexs = append(silceMd5Hexs, strings.ToUpper(hex.EncodeToString(md5Byte)))
partInfos = append(partInfos, fmt.Sprint(i, "-", base64.StdEncoding.EncodeToString(md5Byte)))
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
sliceMd5Hex := fileMd5Hex
if file.GetSize() > sliceSize {
sliceMd5Hex = strings.ToUpper(utils.GetMD5EncodeStr(strings.Join(silceMd5Hexs, "\n")))
}
fullUrl := UPLOAD_URL
if y.isFamily() {
fullUrl += "/family"
} else {
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
fullUrl += "/person"
}
// 尝试恢复进度
uploadProgress, ok := base.GetUploadProgress[*UploadProgress](y, y.tokenInfo.SessionKey, fileMd5Hex)
if !ok {
//step.2 预上传
params := Params{
"parentFolderId": dstDir.GetID(),
"fileName": url.QueryEscape(file.GetName()),
"fileSize": fmt.Sprint(file.GetSize()),
"fileMd5": fileMd5Hex,
"sliceSize": fmt.Sprint(sliceSize),
"sliceMd5": sliceMd5Hex,
}
if y.isFamily() {
params.Set("familyId", y.FamilyID)
}
var uploadInfo InitMultiUploadResp
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &uploadInfo)
if err != nil {
return nil, err
}
uploadProgress = &UploadProgress{
UploadInfo: uploadInfo,
UploadParts: partInfos,
}
}
uploadInfo := uploadProgress.UploadInfo.Data
// 网盘中不存在该文件,开始上传
if uploadInfo.FileDataExists != 1 {
threadG, upCtx := errgroup.NewGroupWithContext(ctx, y.uploadThread,
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
for i, uploadPart := range uploadProgress.UploadParts {
if utils.IsCanceled(upCtx) {
break
}
i, uploadPart := i, uploadPart
threadG.Go(func(ctx context.Context) error {
// step.3 获取上传链接
uploadUrls, err := y.GetMultiUploadUrls(ctx, uploadInfo.UploadFileID, uploadPart)
if err != nil {
return err
}
uploadUrl := uploadUrls[0]
byteSize, offset := sliceSize, int64(uploadUrl.PartNumber-1)*sliceSize
if uploadUrl.PartNumber == count {
byteSize = lastSliceSize
}
// step.4 上传切片
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize))
if err != nil {
return err
}
up(int(threadG.Success()) * 100 / len(uploadUrls))
uploadProgress.UploadParts[i] = ""
return nil
})
}
if err = threadG.Wait(); err != nil {
if errors.Is(err, context.Canceled) {
uploadProgress.UploadParts = utils.SliceFilter(uploadProgress.UploadParts, func(s string) bool { return s != "" })
base.SaveUploadProgress(y, uploadProgress, y.tokenInfo.SessionKey, fileMd5Hex)
}
return nil, err
}
}
// step.5 提交
var resp CommitMultiUploadFileResp
_, err = y.request(fullUrl+"/commitMultiUploadFile", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": uploadInfo.UploadFileID,
"isLog": "0",
"opertype": "3",
}, &resp)
if err != nil {
return nil, err
}
return resp.toFile(), nil
}
// 获取上传切片信息
// 对http body有大小限制分片信息太多会出错
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
fullUrl := UPLOAD_URL
if y.isFamily() {
fullUrl += "/family"
} else {
fullUrl += "/person"
}
var uploadUrlsResp UploadUrlsResp
_, err := y.request(fullUrl+"/getMultiUploadUrls", http.MethodGet,
func(req *resty.Request) {
req.SetContext(ctx)
}, Params{
"uploadFileId": uploadFileId,
"partInfo": strings.Join(partInfo, ","),
}, &uploadUrlsResp)
if err != nil {
return nil, err
}
uploadUrls := uploadUrlsResp.Data
if len(uploadUrls) != len(partInfo) {
return nil, fmt.Errorf("uploadUrls get error, due to get length %d, real length %d", len(partInfo), len(uploadUrls))
}
uploadUrlInfos := make([]UploadUrlInfo, 0, len(uploadUrls))
for k, uploadUrl := range uploadUrls {
partNumber, err := strconv.Atoi(strings.TrimPrefix(k, "partNumber_"))
if err != nil {
return nil, err
}
uploadUrlInfos = append(uploadUrlInfos, UploadUrlInfo{
PartNumber: partNumber,
Headers: ParseHttpHeader(uploadUrl.RequestHeader),
UploadUrlsData: uploadUrl,
})
}
sort.Slice(uploadUrlInfos, func(i, j int) bool {
return uploadUrlInfos[i].PartNumber < uploadUrlInfos[j].PartNumber
})
return uploadUrlInfos, nil
}
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 需要获取完整文件md5,必须支持 io.Seek
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
// 计算md5
fileMd5 := md5.New()
if _, err := io.Copy(fileMd5, tempFile); err != nil {
return nil, err
}
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
return nil, err
}
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
// 创建上传会话
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err = y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"fileMd5": fileMd5Hex,
"fileName": file.GetName(),
"fileSize": fmt.Sprint(file.GetSize()),
"parentId": dstDir.GetID(),
"resumePolicy": "1",
})
} else {
req.SetFormData(map[string]string{
"parentFolderId": dstDir.GetID(),
"fileName": file.GetName(),
"size": fmt.Sprint(file.GetSize()),
"md5": fileMd5Hex,
"opertype": "3",
"flag": "1",
"resumePolicy": "1",
"isLog": "0",
// "baseFileId": "",
// "lastWrite":"",
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
// "fileExt": "",
})
}
}, &uploadInfo)
if err != nil {
return nil, err
}
// 网盘中不存在该文件,开始上传
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
for status.Size < file.GetSize() && status.FileDataExists != 1 {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
header := map[string]string{
"ResumePolicy": "1",
"Expect": "100-continue",
}
if y.isFamily() {
header["FamilyId"] = fmt.Sprint(y.FamilyID)
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
} else {
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
}
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
return nil, err
}
// 获取断点状态
fullUrl := API_URL + "/getUploadFileStatus.action"
if y.isFamily() {
fullUrl = API_URL + "/family/file/getFamilyFileStatus.action"
}
_, err = y.get(fullUrl, func(req *resty.Request) {
req.SetContext(ctx).SetQueryParams(map[string]string{
"uploadFileId": fmt.Sprint(status.UploadFileId),
"resumePolicy": "1",
})
if y.isFamily() {
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
}
}, &status)
if err != nil {
return nil, err
}
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
return nil, err
}
up(int(status.Size / file.GetSize()))
}
// 提交
var resp OldCommitUploadFileResp
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
"UploadFileId": fmt.Sprint(status.UploadFileId),
"FamilyId": fmt.Sprint(y.FamilyID),
})
} else {
req.SetFormData(map[string]string{
"opertype": "3",
"resumePolicy": "1",
"uploadFileId": fmt.Sprint(status.UploadFileId),
"isLog": "0",
})
}
}, &resp)
if err != nil {
return nil, err
}
return resp.toFile(), nil
}
func (y *Cloud189PC) isFamily() bool {
return y.Type == "family"
}
func (y *Cloud189PC) isLogin() bool {
if y.tokenInfo == nil {
return false
}
_, err := y.get(API_URL+"/getUserInfo.action", nil, nil)
return err == nil
}
// 获取家庭云所有用户信息
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
var resp FamilyInfoListResp
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
if err != nil {
return nil, err
}
return resp.FamilyInfoResp, nil
}
// 抽取家庭云ID
func (y *Cloud189PC) getFamilyID() (string, error) {
infos, err := y.getFamilyInfoList()
if err != nil {
return "", err
}
if len(infos) == 0 {
return "", fmt.Errorf("cannot get automatically,please input family_id")
}
for _, info := range infos {
if strings.Contains(y.tokenInfo.LoginName, info.RemarkName) {
return fmt.Sprint(info.FamilyID), nil
}
}
return fmt.Sprint(infos[0].FamilyID), nil
}
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
var resp BatchTaskStateResp
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
req.SetFormData(map[string]string{
"type": aType,
"taskId": taskID,
})
}, &resp)
if err != nil {
return nil, err
}
return &resp, nil
}
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
for {
state, err := y.CheckBatchTask(aType, taskID)
if err != nil {
return err
}
switch state.TaskStatus {
case 2:
return errors.New("there is a conflict with the target object")
case 4:
return nil
}
time.Sleep(t)
}
}

View File

@ -1,114 +0,0 @@
package alias
import (
"context"
"errors"
"strings"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
)
type Alias struct {
model.Storage
Addition
pathMap map[string][]string
autoFlatten bool
oneKey string
}
func (d *Alias) Config() driver.Config {
return config
}
func (d *Alias) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Alias) Init(ctx context.Context) error {
if d.Paths == "" {
return errors.New("paths is required")
}
d.pathMap = make(map[string][]string)
for _, path := range strings.Split(d.Paths, "\n") {
path = strings.TrimSpace(path)
if path == "" {
continue
}
k, v := getPair(path)
d.pathMap[k] = append(d.pathMap[k], v)
}
if len(d.pathMap) == 1 {
for k := range d.pathMap {
d.oneKey = k
}
d.autoFlatten = true
}
return nil
}
func (d *Alias) Drop(ctx context.Context) error {
d.pathMap = nil
return nil
}
func (d *Alias) Get(ctx context.Context, path string) (model.Obj, error) {
if utils.PathEqual(path, "/") {
return &model.Object{
Name: "Root",
IsFolder: true,
Path: "/",
}, nil
}
root, sub := d.getRootAndPath(path)
dsts, ok := d.pathMap[root]
if !ok {
return nil, errs.ObjectNotFound
}
for _, dst := range dsts {
obj, err := d.get(ctx, path, dst, sub)
if err == nil {
return obj, nil
}
}
return nil, errs.ObjectNotFound
}
func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
path := dir.GetPath()
if utils.PathEqual(path, "/") && !d.autoFlatten {
return d.listRoot(), nil
}
root, sub := d.getRootAndPath(path)
dsts, ok := d.pathMap[root]
if !ok {
return nil, errs.ObjectNotFound
}
var objs []model.Obj
for _, dst := range dsts {
tmp, err := d.list(ctx, dst, sub)
if err == nil {
objs = append(objs, tmp...)
}
}
return objs, nil
}
func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
root, sub := d.getRootAndPath(file.GetPath())
dsts, ok := d.pathMap[root]
if !ok {
return nil, errs.ObjectNotFound
}
for _, dst := range dsts {
link, err := d.link(ctx, dst, sub, args)
if err == nil {
return link, nil
}
}
return nil, errs.ObjectNotFound
}
var _ driver.Driver = (*Alias)(nil)

View File

@ -1,27 +0,0 @@
package alias
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
// Usually one of two
// driver.RootPath
// define other
Paths string `json:"paths" required:"true" type:"text"`
}
var config = driver.Config{
Name: "Alias",
LocalSort: true,
NoCache: true,
NoUpload: true,
DefaultRoot: "/",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Alias{}
})
}

View File

@ -1 +0,0 @@
package alias

View File

@ -1,114 +0,0 @@
package alias
import (
"context"
"fmt"
stdpath "path"
"strings"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/sign"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
)
func (d *Alias) listRoot() []model.Obj {
var objs []model.Obj
for k, _ := range d.pathMap {
obj := model.Object{
Name: k,
IsFolder: true,
Modified: d.Modified,
}
objs = append(objs, &obj)
}
return objs
}
// do others that not defined in Driver interface
func getPair(path string) (string, string) {
//path = strings.TrimSpace(path)
if strings.Contains(path, ":") {
pair := strings.SplitN(path, ":", 2)
if !strings.Contains(pair[0], "/") {
return pair[0], pair[1]
}
}
return stdpath.Base(path), path
}
func (d *Alias) getRootAndPath(path string) (string, string) {
if d.autoFlatten {
return d.oneKey, path
}
path = strings.TrimPrefix(path, "/")
parts := strings.SplitN(path, "/", 2)
if len(parts) == 1 {
return parts[0], ""
}
return parts[0], parts[1]
}
func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Obj, error) {
obj, err := fs.Get(ctx, stdpath.Join(dst, sub), &fs.GetArgs{NoLog: true})
if err != nil {
return nil, err
}
return &model.Object{
Path: path,
Name: obj.GetName(),
Size: obj.GetSize(),
Modified: obj.ModTime(),
IsFolder: obj.IsDir(),
}, nil
}
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
// the obj must implement the model.SetPath interface
// return objs, err
if err != nil {
return nil, err
}
return utils.SliceConvert(objs, func(obj model.Obj) (model.Obj, error) {
thumb, ok := model.GetThumb(obj)
objRes := model.Object{
Name: obj.GetName(),
Size: obj.GetSize(),
Modified: obj.ModTime(),
IsFolder: obj.IsDir(),
}
if !ok {
return &objRes, nil
}
return &model.ObjThumb{
Object: objRes,
Thumbnail: model.Thumbnail{
Thumbnail: thumb,
},
}, nil
})
}
func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs) (*model.Link, error) {
reqPath := stdpath.Join(dst, sub)
storage, err := fs.GetStorage(reqPath, &fs.GetStoragesArgs{})
if err != nil {
return nil, err
}
_, err = fs.Get(ctx, reqPath, &fs.GetArgs{NoLog: true})
if err != nil {
return nil, err
}
if common.ShouldProxy(storage, stdpath.Base(sub)) {
return &model.Link{
URL: fmt.Sprintf("%s/p%s?sign=%s",
common.GetApiUrl(args.HttpReq),
utils.EncodePath(reqPath, true),
sign.Sign(reqPath)),
}, nil
}
link, _, err := fs.Link(ctx, reqPath, args)
return link, err
}

View File

@ -1,118 +0,0 @@
package alist_v2
import (
"context"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/server/common"
)
type AListV2 struct {
model.Storage
Addition
}
func (d *AListV2) Config() driver.Config {
return config
}
func (d *AListV2) GetAddition() driver.Additional {
return &d.Addition
}
func (d *AListV2) Init(ctx context.Context) error {
if len(d.Addition.Address) > 0 && string(d.Addition.Address[len(d.Addition.Address)-1]) == "/" {
d.Addition.Address = d.Addition.Address[0 : len(d.Addition.Address)-1]
}
// TODO login / refresh token
//op.MustSaveDriverStorage(d)
return nil
}
func (d *AListV2) Drop(ctx context.Context) error {
return nil
}
func (d *AListV2) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
url := d.Address + "/api/public/path"
var resp common.Resp[PathResp]
_, err := base.RestyClient.R().
SetResult(&resp).
SetHeader("Authorization", d.AccessToken).
SetBody(PathReq{
PageNum: 0,
PageSize: 0,
Path: dir.GetPath(),
Password: d.Password,
}).Post(url)
if err != nil {
return nil, err
}
var files []model.Obj
for _, f := range resp.Data.Files {
file := model.ObjThumb{
Object: model.Object{
Name: f.Name,
Modified: *f.UpdatedAt,
Size: f.Size,
IsFolder: f.Type == 1,
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
}
files = append(files, &file)
}
return files, nil
}
func (d *AListV2) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
url := d.Address + "/api/public/path"
var resp common.Resp[PathResp]
_, err := base.RestyClient.R().
SetResult(&resp).
SetHeader("Authorization", d.AccessToken).
SetBody(PathReq{
PageNum: 0,
PageSize: 0,
Path: file.GetPath(),
Password: d.Password,
}).Post(url)
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Data.Files[0].Url,
}, nil
}
func (d *AListV2) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return errs.NotImplement
}
func (d *AListV2) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotImplement
}
func (d *AListV2) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return errs.NotImplement
}
func (d *AListV2) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotImplement
}
func (d *AListV2) Remove(ctx context.Context, obj model.Obj) error {
return errs.NotImplement
}
func (d *AListV2) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return errs.NotImplement
}
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*AListV2)(nil)

View File

@ -1,26 +0,0 @@
package alist_v2
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
Address string `json:"url" required:"true"`
Password string `json:"password"`
AccessToken string `json:"access_token"`
}
var config = driver.Config{
Name: "AList V2",
LocalSort: true,
NoUpload: true,
DefaultRoot: "/",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &AListV2{}
})
}

View File

@ -1,31 +0,0 @@
package alist_v2
import (
"time"
)
type File struct {
Id string `json:"-"`
Name string `json:"name"`
Size int64 `json:"size"`
Type int `json:"type"`
Driver string `json:"driver"`
UpdatedAt *time.Time `json:"updated_at"`
Thumbnail string `json:"thumbnail"`
Url string `json:"url"`
SizeStr string `json:"size_str"`
TimeStr string `json:"time_str"`
}
type PathResp struct {
Type string `json:"type"`
//Meta Meta `json:"meta"`
Files []File `json:"files"`
}
type PathReq struct {
PageNum int `json:"page_num"`
PageSize int `json:"page_size"`
Password string `json:"password"`
Path string `json:"path"`
}

View File

@ -1 +0,0 @@
package alist_v2

View File

@ -1,191 +0,0 @@
package alist_v3
import (
"context"
"fmt"
"io"
"net/http"
"path"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
"github.com/go-resty/resty/v2"
)
type AListV3 struct {
model.Storage
Addition
}
func (d *AListV3) Config() driver.Config {
return config
}
func (d *AListV3) GetAddition() driver.Additional {
return &d.Addition
}
func (d *AListV3) Init(ctx context.Context) error {
d.Addition.Address = strings.TrimSuffix(d.Addition.Address, "/")
var resp common.Resp[MeResp]
_, err := d.request("/me", http.MethodGet, func(req *resty.Request) {
req.SetResult(&resp)
})
if err != nil {
return err
}
// if the username is not empty and the username is not the same as the current username, then login again
if d.Username != "" && d.Username != resp.Data.Username {
err = d.login()
if err != nil {
return err
}
}
// re-get the user info
_, err = d.request("/me", http.MethodGet, func(req *resty.Request) {
req.SetResult(&resp)
})
if err != nil {
return err
}
if resp.Data.Role == model.GUEST {
url := d.Address + "/api/public/settings"
res, err := base.RestyClient.R().Get(url)
if err != nil {
return err
}
allowMounted := utils.Json.Get(res.Body(), "data", conf.AllowMounted).ToString() == "true"
if !allowMounted {
return fmt.Errorf("the site does not allow mounted")
}
}
return err
}
func (d *AListV3) Drop(ctx context.Context) error {
return nil
}
func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var resp common.Resp[FsListResp]
_, err := d.request("/fs/list", http.MethodPost, func(req *resty.Request) {
req.SetResult(&resp).SetBody(ListReq{
PageReq: model.PageReq{
Page: 1,
PerPage: 0,
},
Path: dir.GetPath(),
Password: d.MetaPassword,
Refresh: false,
})
})
if err != nil {
return nil, err
}
var files []model.Obj
for _, f := range resp.Data.Content {
file := model.ObjThumb{
Object: model.Object{
Name: f.Name,
Modified: f.Modified,
Ctime: f.Created,
Size: f.Size,
IsFolder: f.IsDir,
HashInfo: utils.FromString(f.HashInfo),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumb},
}
files = append(files, &file)
}
return files, nil
}
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp common.Resp[FsGetResp]
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
req.SetResult(&resp).SetBody(FsGetReq{
Path: file.GetPath(),
Password: d.MetaPassword,
})
})
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Data.RawURL,
}, nil
}
func (d *AListV3) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err := d.request("/fs/mkdir", http.MethodPost, func(req *resty.Request) {
req.SetBody(MkdirOrLinkReq{
Path: path.Join(parentDir.GetPath(), dirName),
})
})
return err
}
func (d *AListV3) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := d.request("/fs/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(MoveCopyReq{
SrcDir: path.Dir(srcObj.GetPath()),
DstDir: dstDir.GetPath(),
Names: []string{srcObj.GetName()},
})
})
return err
}
func (d *AListV3) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err := d.request("/fs/rename", http.MethodPost, func(req *resty.Request) {
req.SetBody(RenameReq{
Path: srcObj.GetPath(),
Name: newName,
})
})
return err
}
func (d *AListV3) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := d.request("/fs/copy", http.MethodPost, func(req *resty.Request) {
req.SetBody(MoveCopyReq{
SrcDir: path.Dir(srcObj.GetPath()),
DstDir: dstDir.GetPath(),
Names: []string{srcObj.GetName()},
})
})
return err
}
func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
_, err := d.request("/fs/remove", http.MethodPost, func(req *resty.Request) {
req.SetBody(RemoveReq{
Dir: path.Dir(obj.GetPath()),
Names: []string{obj.GetName()},
})
})
return err
}
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
SetHeader("Password", d.MetaPassword).
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
SetContentLength(true).
SetBody(io.ReadCloser(stream))
})
return err
}
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*AListV3)(nil)

View File

@ -1,28 +0,0 @@
package alist_v3
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootPath
Address string `json:"url" required:"true"`
MetaPassword string `json:"meta_password"`
Username string `json:"username"`
Password string `json:"password"`
Token string `json:"token"`
}
var config = driver.Config{
Name: "AList V3",
LocalSort: true,
DefaultRoot: "/",
CheckStatus: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &AListV3{}
})
}

View File

@ -1,83 +0,0 @@
package alist_v3
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type ListReq struct {
model.PageReq
Path string `json:"path" form:"path"`
Password string `json:"password" form:"password"`
Refresh bool `json:"refresh"`
}
type ObjResp struct {
Name string `json:"name"`
Size int64 `json:"size"`
IsDir bool `json:"is_dir"`
Modified time.Time `json:"modified"`
Created time.Time `json:"created"`
Sign string `json:"sign"`
Thumb string `json:"thumb"`
Type int `json:"type"`
HashInfo string `json:"hashinfo"`
}
type FsListResp struct {
Content []ObjResp `json:"content"`
Total int64 `json:"total"`
Readme string `json:"readme"`
Write bool `json:"write"`
Provider string `json:"provider"`
}
type FsGetReq struct {
Path string `json:"path" form:"path"`
Password string `json:"password" form:"password"`
}
type FsGetResp struct {
ObjResp
RawURL string `json:"raw_url"`
Readme string `json:"readme"`
Provider string `json:"provider"`
Related []ObjResp `json:"related"`
}
type MkdirOrLinkReq struct {
Path string `json:"path" form:"path"`
}
type MoveCopyReq struct {
SrcDir string `json:"src_dir"`
DstDir string `json:"dst_dir"`
Names []string `json:"names"`
}
type RenameReq struct {
Path string `json:"path"`
Name string `json:"name"`
}
type RemoveReq struct {
Dir string `json:"dir"`
Names []string `json:"names"`
}
type LoginResp struct {
Token string `json:"token"`
}
type MeResp struct {
Id int `json:"id"`
Username string `json:"username"`
Password string `json:"password"`
BasePath string `json:"base_path"`
Role int `json:"role"`
Disabled bool `json:"disabled"`
Permission int `json:"permission"`
SsoId string `json:"sso_id"`
Otp bool `json:"otp"`
}

View File

@ -1,58 +0,0 @@
package alist_v3
import (
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
func (d *AListV3) login() error {
var resp common.Resp[LoginResp]
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
req.SetResult(&resp).SetBody(base.Json{
"username": d.Username,
"password": d.Password,
})
})
if err != nil {
return err
}
d.Token = resp.Data.Token
op.MustSaveDriverStorage(d)
return nil
}
func (d *AListV3) request(api, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
url := d.Address + "/api" + api
req := base.RestyClient.R()
req.SetHeader("Authorization", d.Token)
if callback != nil {
callback(req)
}
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
log.Debugf("[alist_v3] response body: %s", res.String())
if res.StatusCode() >= 400 {
return nil, fmt.Errorf("request failed, status: %s", res.Status())
}
code := utils.Json.Get(res.Body(), "code").ToInt()
if code != 200 {
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
err = d.login()
if err != nil {
return nil, err
}
return d.request(api, method, callback, true)
}
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
}
return res.Body(), nil
}

View File

@ -1,354 +0,0 @@
package aliyundrive
import (
"bytes"
"context"
"crypto/sha1"
"encoding/base64"
"encoding/hex"
"fmt"
"github.com/alist-org/alist/v3/internal/stream"
"io"
"math"
"math/big"
"net/http"
"os"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type AliDrive struct {
model.Storage
Addition
AccessToken string
cron *cron.Cron
DriveId string
UserID string
}
func (d *AliDrive) Config() driver.Config {
return config
}
func (d *AliDrive) GetAddition() driver.Additional {
return &d.Addition
}
func (d *AliDrive) Init(ctx context.Context) error {
// TODO login / refresh token
//op.MustSaveDriverStorage(d)
err := d.refreshToken()
if err != nil {
return err
}
// get driver id
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
if err != nil {
return err
}
d.DriveId = utils.Json.Get(res, "default_drive_id").ToString()
d.UserID = utils.Json.Get(res, "user_id").ToString()
d.cron = cron.NewCron(time.Hour * 2)
d.cron.Do(func() {
err := d.refreshToken()
if err != nil {
log.Errorf("%+v", err)
}
})
if global.Has(d.UserID) {
return nil
}
// init deviceID
deviceID := utils.HashData(utils.SHA256, []byte(d.UserID))
// init privateKey
privateKey, _ := NewPrivateKeyFromHex(deviceID)
state := State{
privateKey: privateKey,
deviceID: deviceID,
}
// store state
global.Store(d.UserID, &state)
// init signature
d.sign()
return nil
}
func (d *AliDrive) Drop(ctx context.Context) error {
if d.cron != nil {
d.cron.Stop()
}
return nil
}
func (d *AliDrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
data := base.Json{
"drive_id": d.DriveId,
"file_id": file.GetID(),
"expire_sec": 14400,
}
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
return nil, err
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.aliyundrive.com/"},
},
URL: utils.Json.Get(res, "url").ToString(),
}, nil
}
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
"name": dirName,
"parent_file_id": parentDir.GetID(),
"type": "folder",
})
}, nil)
return err
}
func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
err := d.batch(srcObj.GetID(), dstDir.GetID(), "/file/move")
return err
}
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"name": newName,
})
}, nil)
return err
}
func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
err := d.batch(srcObj.GetID(), dstDir.GetID(), "/file/copy")
return err
}
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": obj.GetID(),
})
}, nil)
return err
}
func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.FileStreamer, up driver.UpdateProgress) error {
file := stream.FileStream{
Obj: streamer,
Reader: streamer,
Mimetype: streamer.GetMimetype(),
}
const DEFAULT int64 = 10485760
var count = int(math.Ceil(float64(streamer.GetSize()) / float64(DEFAULT)))
partInfoList := make([]base.Json, 0, count)
for i := 1; i <= count; i++ {
partInfoList = append(partInfoList, base.Json{"part_number": i})
}
reqBody := base.Json{
"check_name_mode": "overwrite",
"drive_id": d.DriveId,
"name": file.GetName(),
"parent_file_id": dstDir.GetID(),
"part_info_list": partInfoList,
"size": file.GetSize(),
"type": "file",
}
var localFile *os.File
if fileStream, ok := file.Reader.(*stream.FileStream); ok {
localFile, _ = fileStream.Reader.(*os.File)
}
if d.RapidUpload {
buf := bytes.NewBuffer(make([]byte, 0, 1024))
io.CopyN(buf, file, 1024)
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
if localFile != nil {
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
return err
}
} else {
// 把头部拼接回去
file.Reader = struct {
io.Reader
io.Closer
}{
Reader: io.MultiReader(buf, file),
Closer: &file,
}
}
} else {
reqBody["content_hash_name"] = "none"
reqBody["proof_version"] = "v1"
}
var resp UploadResp
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if d.RapidUpload && e.Code == "PreHashMatched" {
delete(reqBody, "pre_hash")
h := sha1.New()
if localFile != nil {
if err = utils.CopyWithCtx(ctx, h, localFile, 0, nil); err != nil {
return err
}
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
return err
}
} else {
tempFile, err := os.CreateTemp(conf.Conf.TempDir, "file-*")
if err != nil {
return err
}
defer func() {
_ = tempFile.Close()
_ = os.Remove(tempFile.Name())
}()
if err = utils.CopyWithCtx(ctx, io.MultiWriter(tempFile, h), file, 0, nil); err != nil {
return err
}
localFile = tempFile
}
reqBody["content_hash"] = hex.EncodeToString(h.Sum(nil))
reqBody["content_hash_name"] = "sha1"
reqBody["proof_version"] = "v1"
/*
js 隐性转换太坑不知道有没有bug
var n = e.access_token
r = new BigNumber('0x'.concat(md5(n).slice(0, 16)))
i = new BigNumber(t.file.size)
o = i ? r.mod(i) : new gt.BigNumber(0);
(t.file.slice(o.toNumber(), Math.min(o.plus(8).toNumber(), t.file.size)))
*/
buf := make([]byte, 8)
r, _ := new(big.Int).SetString(utils.GetMD5EncodeStr(d.AccessToken)[:16], 16)
i := new(big.Int).SetInt64(file.GetSize())
o := new(big.Int).SetInt64(0)
if file.GetSize() > 0 {
o = r.Mod(r, i)
}
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if resp.RapidUpload {
return nil
}
// 秒传失败
if _, err = localFile.Seek(0, io.SeekStart); err != nil {
return err
}
file.Reader = localFile
}
for i, partInfo := range resp.PartInfoList {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
url := partInfo.UploadUrl
if d.InternalUpload {
url = partInfo.InternalUploadUrl
}
req, err := http.NewRequest("PUT", url, io.LimitReader(file, DEFAULT))
if err != nil {
return err
}
req = req.WithContext(ctx)
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
res.Body.Close()
if count > 0 {
up(i * 100 / count)
}
}
var resp2 base.Json
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": resp.FileId,
"upload_id": resp.UploadId,
})
}, &resp2)
if err != nil && e.Code != "PreHashMatched" {
return err
}
if resp2["file_id"] == resp.FileId {
return nil
}
return fmt.Errorf("%+v", resp2)
}
func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
var resp base.Json
var url string
data := base.Json{
"drive_id": d.DriveId,
"file_id": args.Obj.GetID(),
}
switch args.Method {
case "doc_preview":
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
data["access_token"] = d.AccessToken
case "video_preview":
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
data["url_expire_sec"] = 14400
default:
return nil, errs.NotSupport
}
_, err, _ := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
return resp, nil
}
var _ driver.Driver = (*AliDrive)(nil)

View File

@ -1,16 +0,0 @@
package aliyundrive
import (
"crypto/ecdsa"
"github.com/alist-org/alist/v3/pkg/generic_sync"
)
type State struct {
deviceID string
signature string
retry int
privateKey *ecdsa.PrivateKey
}
var global = generic_sync.MapOf[string, *State]{}

View File

@ -1,66 +0,0 @@
package aliyundrive
import (
"crypto/ecdsa"
"crypto/rand"
"encoding/hex"
"math/big"
"github.com/dustinxie/ecc"
)
func NewPrivateKey() (*ecdsa.PrivateKey, error) {
p256k1 := ecc.P256k1()
return ecdsa.GenerateKey(p256k1, rand.Reader)
}
func NewPrivateKeyFromHex(hex_ string) (*ecdsa.PrivateKey, error) {
data, err := hex.DecodeString(hex_)
if err != nil {
return nil, err
}
return NewPrivateKeyFromBytes(data), nil
}
func NewPrivateKeyFromBytes(priv []byte) *ecdsa.PrivateKey {
p256k1 := ecc.P256k1()
x, y := p256k1.ScalarBaseMult(priv)
return &ecdsa.PrivateKey{
PublicKey: ecdsa.PublicKey{
Curve: p256k1,
X: x,
Y: y,
},
D: new(big.Int).SetBytes(priv),
}
}
func PrivateKeyToHex(private *ecdsa.PrivateKey) string {
return hex.EncodeToString(PrivateKeyToBytes(private))
}
func PrivateKeyToBytes(private *ecdsa.PrivateKey) []byte {
return private.D.Bytes()
}
func PublicKeyToHex(public *ecdsa.PublicKey) string {
return hex.EncodeToString(PublicKeyToBytes(public))
}
func PublicKeyToBytes(public *ecdsa.PublicKey) []byte {
x := public.X.Bytes()
if len(x) < 32 {
for i := 0; i < 32-len(x); i++ {
x = append([]byte{0}, x...)
}
}
y := public.Y.Bytes()
if len(y) < 32 {
for i := 0; i < 32-len(y); i++ {
y = append([]byte{0}, y...)
}
}
return append(x, y...)
}

View File

@ -1,30 +0,0 @@
package aliyundrive
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
RefreshToken string `json:"refresh_token" required:"true"`
//DeviceID string `json:"device_id" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
RapidUpload bool `json:"rapid_upload"`
InternalUpload bool `json:"internal_upload"`
}
var config = driver.Config{
Name: "Aliyundrive",
DefaultRoot: "root",
Alert: `warning|There may be an infinite loop bug in this driver.
Deprecated, no longer maintained and will be removed in a future version.
We recommend using the official driver AliyundriveOpen.`,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliDrive{}
})
}

View File

@ -1,56 +0,0 @@
package aliyundrive
import (
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type RespErr struct {
Code string `json:"code"`
Message string `json:"message"`
}
type Files struct {
Items []File `json:"items"`
NextMarker string `json:"next_marker"`
}
type File struct {
DriveId string `json:"drive_id"`
CreatedAt *time.Time `json:"created_at"`
FileExtension string `json:"file_extension"`
FileId string `json:"file_id"`
Type string `json:"type"`
Name string `json:"name"`
Category string `json:"category"`
ParentFileId string `json:"parent_file_id"`
UpdatedAt time.Time `json:"updated_at"`
Size int64 `json:"size"`
Thumbnail string `json:"thumbnail"`
Url string `json:"url"`
}
func fileToObj(f File) *model.ObjThumb {
return &model.ObjThumb{
Object: model.Object{
ID: f.FileId,
Name: f.Name,
Size: f.Size,
Modified: f.UpdatedAt,
IsFolder: f.Type == "folder",
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
}
}
type UploadResp struct {
FileId string `json:"file_id"`
UploadId string `json:"upload_id"`
PartInfoList []struct {
UploadUrl string `json:"upload_url"`
InternalUploadUrl string `json:"internal_upload_url"`
} `json:"part_info_list"`
RapidUpload bool `json:"rapid_upload"`
}

View File

@ -1,204 +0,0 @@
package aliyundrive
import (
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"net/http"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/dustinxie/ecc"
"github.com/go-resty/resty/v2"
"github.com/google/uuid"
)
func (d *AliDrive) createSession() error {
state, ok := global.Load(d.UserID)
if !ok {
return fmt.Errorf("can't load user state, user_id: %s", d.UserID)
}
d.sign()
state.retry++
if state.retry > 3 {
state.retry = 0
return fmt.Errorf("createSession failed after three retries")
}
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"deviceName": "samsung",
"modelName": "SM-G9810",
"nonce": 0,
"pubKey": PublicKeyToHex(&state.privateKey.PublicKey),
"refreshToken": d.RefreshToken,
})
}, nil)
if err == nil{
state.retry = 0
}
return err
}
// func (d *AliDrive) renewSession() error {
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
// return err
// }
func (d *AliDrive) sign() {
state, _ := global.Load(d.UserID)
secpAppID := "5dde4e1bdf9e4966b387ba58f4b3fdc3"
singdata := fmt.Sprintf("%s:%s:%s:%d", secpAppID, state.deviceID, d.UserID, 0)
hash := sha256.Sum256([]byte(singdata))
data, _ := ecc.SignBytes(state.privateKey, hash[:], ecc.RecID|ecc.LowerS)
state.signature = hex.EncodeToString(data) //strconv.Itoa(state.nonce)
}
// do others that not defined in Driver interface
func (d *AliDrive) refreshToken() error {
url := "https://auth.aliyundrive.com/v2/account/token"
var resp base.TokenResp
var e RespErr
_, err := base.RestyClient.R().
//ForceContentType("application/json").
SetBody(base.Json{"refresh_token": d.RefreshToken, "grant_type": "refresh_token"}).
SetResult(&resp).
SetError(&e).
Post(url)
if err != nil {
return err
}
if e.Code != "" {
return fmt.Errorf("failed to refresh token: %s", e.Message)
}
if resp.RefreshToken == "" {
return errors.New("failed to refresh token: refresh token is empty")
}
d.RefreshToken, d.AccessToken = resp.RefreshToken, resp.AccessToken
op.MustSaveDriverStorage(d)
return nil
}
func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp interface{}) ([]byte, error, RespErr) {
req := base.RestyClient.R()
state, ok := global.Load(d.UserID)
if !ok {
if url == "https://api.aliyundrive.com/v2/user/get" {
state = &State{}
} else {
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
}
}
req.SetHeaders(map[string]string{
"Authorization": "Bearer\t" + d.AccessToken,
"content-type": "application/json",
"origin": "https://www.aliyundrive.com",
"Referer": "https://aliyundrive.com/",
"X-Signature": state.signature,
"x-request-id": uuid.NewString(),
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
"X-Device-Id": state.deviceID,
})
if callback != nil {
callback(req)
} else {
req.SetBody("{}")
}
if resp != nil {
req.SetResult(resp)
}
var e RespErr
req.SetError(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err, e
}
if e.Code != "" {
switch e.Code {
case "AccessTokenInvalid":
err = d.refreshToken()
if err != nil {
return nil, err, e
}
case "DeviceSessionSignatureInvalid":
err = d.createSession()
if err != nil {
return nil, err, e
}
default:
return nil, errors.New(e.Message), e
}
return d.request(url, method, callback, resp)
} else if res.IsError() {
return nil, errors.New("bad status code " + res.Status()), e
}
return res.Body(), nil, e
}
func (d *AliDrive) getFiles(fileId string) ([]File, error) {
marker := "first"
res := make([]File, 0)
for marker != "" {
if marker == "first" {
marker = ""
}
var resp Files
data := base.Json{
"drive_id": d.DriveId,
"fields": "*",
"image_thumbnail_process": "image/resize,w_400/format,jpeg",
"image_url_process": "image/resize,w_1920/format,jpeg",
"limit": 200,
"marker": marker,
"order_by": d.OrderBy,
"order_direction": d.OrderDirection,
"parent_file_id": fileId,
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
"url_expire_sec": 14400,
}
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
marker = resp.NextMarker
res = append(res, resp.Items...)
}
return res, nil
}
func (d *AliDrive) batch(srcId, dstId string, url string) error {
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"requests": []base.Json{
{
"headers": base.Json{
"Content-Type": "application/json",
},
"method": "POST",
"id": srcId,
"body": base.Json{
"drive_id": d.DriveId,
"file_id": srcId,
"to_drive_id": d.DriveId,
"to_parent_file_id": dstId,
},
"url": url,
},
},
"resource": "file",
})
}, nil)
if err != nil {
return err
}
status := utils.Json.Get(res, "responses", 0, "status").ToInt()
if status < 400 && status >= 100 {
return nil
}
return errors.New(string(res))
}

View File

@ -1,227 +0,0 @@
package aliyundrive_open
import (
"context"
"errors"
"fmt"
"net/http"
"time"
"github.com/Xhofe/rateg"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
)
type AliyundriveOpen struct {
model.Storage
Addition
base string
DriveId string
limitList func(ctx context.Context, data base.Json) (*Files, error)
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
}
func (d *AliyundriveOpen) Config() driver.Config {
return config
}
func (d *AliyundriveOpen) GetAddition() driver.Additional {
return &d.Addition
}
func (d *AliyundriveOpen) Init(ctx context.Context) error {
if d.LIVPDownloadFormat == "" {
d.LIVPDownloadFormat = "jpeg"
}
if d.DriveType == "" {
d.DriveType = "default"
}
res, err := d.request("/adrive/v1.0/user/getDriveInfo", http.MethodPost, nil)
if err != nil {
return err
}
d.DriveId = utils.Json.Get(res, d.DriveType+"_drive_id").ToString()
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
Limit: 4,
Bucket: 1,
})
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
Limit: 1,
Bucket: 1,
})
return nil
}
func (d *AliyundriveOpen) Drop(ctx context.Context) error {
return nil
}
func (d *AliyundriveOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.limitList == nil {
return nil, fmt.Errorf("driver not init")
}
files, err := d.getFiles(ctx, dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link, error) {
res, err := d.request("/adrive/v1.0/openFile/getDownloadUrl", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": file.GetID(),
"expire_sec": 14400,
})
})
if err != nil {
return nil, err
}
url := utils.Json.Get(res, "url").ToString()
if url == "" {
if utils.Ext(file.GetName()) != "livp" {
return nil, errors.New("get download url failed: " + string(res))
}
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
}
exp := time.Hour
return &model.Link{
URL: url,
Expiration: &exp,
}, nil
}
func (d *AliyundriveOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if d.limitLink == nil {
return nil, fmt.Errorf("driver not init")
}
return d.limitLink(ctx, file)
}
func (d *AliyundriveOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
nowTime, _ := getNowTime()
newDir := File{CreatedAt: nowTime, UpdatedAt: nowTime}
_, err := d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"parent_file_id": parentDir.GetID(),
"name": dirName,
"type": "folder",
"check_name_mode": "refuse",
}).SetResult(&newDir)
})
if err != nil {
return nil, err
}
return fileToObj(newDir), nil
}
func (d *AliyundriveOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var resp MoveOrCopyResp
_, err := d.request("/adrive/v1.0/openFile/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"to_parent_file_id": dstDir.GetID(),
"check_name_mode": "refuse", // optional:ignore,auto_rename,refuse
//"new_name": "newName", // The new name to use when a file of the same name exists
}).SetResult(&resp)
})
if err != nil {
return nil, err
}
if resp.Exist {
return nil, errors.New("existence of files with the same name")
}
if srcObj, ok := srcObj.(*model.ObjThumb); ok {
srcObj.ID = resp.FileID
srcObj.Modified = time.Now()
return srcObj, nil
}
return nil, nil
}
func (d *AliyundriveOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
var newFile File
_, err := d.request("/adrive/v1.0/openFile/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"name": newName,
}).SetResult(&newFile)
})
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
func (d *AliyundriveOpen) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
_, err := d.request("/adrive/v1.0/openFile/copy", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": srcObj.GetID(),
"to_parent_file_id": dstDir.GetID(),
"auto_rename": true,
})
})
return err
}
func (d *AliyundriveOpen) Remove(ctx context.Context, obj model.Obj) error {
uri := "/adrive/v1.0/openFile/recyclebin/trash"
if d.RemoveWay == "delete" {
uri = "/adrive/v1.0/openFile/delete"
}
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": obj.GetID(),
})
})
return err
}
func (d *AliyundriveOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
return d.upload(ctx, dstDir, stream, up)
}
func (d *AliyundriveOpen) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
var resp base.Json
var uri string
data := base.Json{
"drive_id": d.DriveId,
"file_id": args.Obj.GetID(),
}
switch args.Method {
case "video_preview":
uri = "/adrive/v1.0/openFile/getVideoPreviewPlayInfo"
data["category"] = "live_transcoding"
data["url_expire_sec"] = 14400
default:
return nil, errs.NotSupport
}
_, err := d.request(uri, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetResult(&resp)
})
if err != nil {
return nil, err
}
return resp, nil
}
var _ driver.Driver = (*AliyundriveOpen)(nil)
var _ driver.MkdirResult = (*AliyundriveOpen)(nil)
var _ driver.MoveResult = (*AliyundriveOpen)(nil)
var _ driver.RenameResult = (*AliyundriveOpen)(nil)
var _ driver.PutResult = (*AliyundriveOpen)(nil)

View File

@ -1,42 +0,0 @@
package aliyundrive_open
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
DriveType string `json:"drive_type" type:"select" options:"default,resource,backup" default:"default"`
driver.RootID
RefreshToken string `json:"refresh_token" required:"true"`
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
RapidUpload bool `json:"rapid_upload" help:"If you enable this option, the file will be uploaded to the server first, so the progress will be incorrect"`
InternalUpload bool `json:"internal_upload" help:"If you are using Aliyun ECS is located in Beijing, you can turn it on to boost the upload speed"`
LIVPDownloadFormat string `json:"livp_download_format" type:"select" options:"jpeg,mov" default:"jpeg"`
AccessToken string
}
var config = driver.Config{
Name: "AliyundriveOpen",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "root",
NoOverwriteUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliyundriveOpen{
base: "https://openapi.aliyundrive.com",
}
})
}

View File

@ -1,84 +0,0 @@
package aliyundrive_open
import (
"github.com/alist-org/alist/v3/pkg/utils"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type ErrResp struct {
Code string `json:"code"`
Message string `json:"message"`
}
type Files struct {
Items []File `json:"items"`
NextMarker string `json:"next_marker"`
}
type File struct {
DriveId string `json:"drive_id"`
FileId string `json:"file_id"`
ParentFileId string `json:"parent_file_id"`
Name string `json:"name"`
Size int64 `json:"size"`
FileExtension string `json:"file_extension"`
ContentHash string `json:"content_hash"`
Category string `json:"category"`
Type string `json:"type"`
Thumbnail string `json:"thumbnail"`
Url string `json:"url"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
// create only
FileName string `json:"file_name"`
}
func fileToObj(f File) *model.ObjThumb {
if f.Name == "" {
f.Name = f.FileName
}
return &model.ObjThumb{
Object: model.Object{
ID: f.FileId,
Name: f.Name,
Size: f.Size,
Modified: f.UpdatedAt,
IsFolder: f.Type == "folder",
Ctime: f.CreatedAt,
HashInfo: utils.NewHashInfo(utils.SHA1, f.ContentHash),
},
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbnail},
}
}
type PartInfo struct {
Etag interface{} `json:"etag"`
PartNumber int `json:"part_number"`
PartSize interface{} `json:"part_size"`
UploadUrl string `json:"upload_url"`
ContentType string `json:"content_type"`
}
type CreateResp struct {
//Type string `json:"type"`
//ParentFileId string `json:"parent_file_id"`
//DriveId string `json:"drive_id"`
FileId string `json:"file_id"`
//RevisionId string `json:"revision_id"`
//EncryptMode string `json:"encrypt_mode"`
//DomainId string `json:"domain_id"`
//FileName string `json:"file_name"`
UploadId string `json:"upload_id"`
//Location string `json:"location"`
RapidUpload bool `json:"rapid_upload"`
PartInfoList []PartInfo `json:"part_info_list"`
}
type MoveOrCopyResp struct {
Exist bool `json:"exist"`
DriveID string `json:"drive_id"`
FileID string `json:"file_id"`
}

View File

@ -1,269 +0,0 @@
package aliyundrive_open
import (
"bytes"
"context"
"encoding/base64"
"fmt"
"github.com/alist-org/alist/v3/pkg/http_range"
"io"
"math"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/avast/retry-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
func makePartInfos(size int) []base.Json {
partInfoList := make([]base.Json, size)
for i := 0; i < size; i++ {
partInfoList[i] = base.Json{"part_number": 1 + i}
}
return partInfoList
}
func calPartSize(fileSize int64) int64 {
var partSize int64 = 20 * utils.MB
if fileSize > partSize {
if fileSize > 1*utils.TB { // file Size over 1TB
partSize = 5 * utils.GB // file part size 5GB
} else if fileSize > 768*utils.GB { // over 768GB
partSize = 109951163 // ≈ 104.8576MB, split 1TB into 10,000 part
} else if fileSize > 512*utils.GB { // over 512GB
partSize = 82463373 // ≈ 78.6432MB
} else if fileSize > 384*utils.GB { // over 384GB
partSize = 54975582 // ≈ 52.4288MB
} else if fileSize > 256*utils.GB { // over 256GB
partSize = 41231687 // ≈ 39.3216MB
} else if fileSize > 128*utils.GB { // over 128GB
partSize = 27487791 // ≈ 26.2144MB
}
}
return partSize
}
func (d *AliyundriveOpen) getUploadUrl(count int, fileId, uploadId string) ([]PartInfo, error) {
partInfoList := makePartInfos(count)
var resp CreateResp
_, err := d.request("/adrive/v1.0/openFile/getUploadUrl", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": fileId,
"part_info_list": partInfoList,
"upload_id": uploadId,
}).SetResult(&resp)
})
return resp.PartInfoList, err
}
func (d *AliyundriveOpen) uploadPart(ctx context.Context, r io.Reader, partInfo PartInfo) error {
uploadUrl := partInfo.UploadUrl
if d.InternalUpload {
uploadUrl = strings.ReplaceAll(uploadUrl, "https://cn-beijing-data.aliyundrive.net/", "http://ccp-bj29-bj-1592982087.oss-cn-beijing-internal.aliyuncs.com/")
}
req, err := http.NewRequestWithContext(ctx, "PUT", uploadUrl, r)
if err != nil {
return err
}
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode != http.StatusOK && res.StatusCode != http.StatusConflict {
return fmt.Errorf("upload status: %d", res.StatusCode)
}
return nil
}
func (d *AliyundriveOpen) completeUpload(fileId, uploadId string) (model.Obj, error) {
// 3. complete
var newFile File
_, err := d.request("/adrive/v1.0/openFile/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": fileId,
"upload_id": uploadId,
}).SetResult(&newFile)
})
if err != nil {
return nil, err
}
return fileToObj(newFile), nil
}
type ProofRange struct {
Start int64
End int64
}
func getProofRange(input string, size int64) (*ProofRange, error) {
if size == 0 {
return &ProofRange{}, nil
}
tmpStr := utils.GetMD5EncodeStr(input)[0:16]
tmpInt, err := strconv.ParseUint(tmpStr, 16, 64)
if err != nil {
return nil, err
}
index := tmpInt % uint64(size)
pr := &ProofRange{
Start: int64(index),
End: int64(index) + 8,
}
if pr.End >= size {
pr.End = size
}
return pr, nil
}
func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error) {
proofRange, err := getProofRange(d.AccessToken, stream.GetSize())
if err != nil {
return "", err
}
length := proofRange.End - proofRange.Start
buf := bytes.NewBuffer(make([]byte, 0, length))
reader, err := stream.RangeRead(http_range.Range{Start: proofRange.Start, Length: length})
if err != nil {
return "", err
}
_, err = io.CopyN(buf, reader, length)
if err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(buf.Bytes()), nil
}
func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 1. create
// Part Size Unit: Bytes, Default: 20MB,
// Maximum number of slices 10,000, ≈195.3125GB
var partSize = calPartSize(stream.GetSize())
const dateFormat = "2006-01-02T15:04:05.000Z"
mtimeStr := stream.ModTime().UTC().Format(dateFormat)
ctimeStr := stream.CreateTime().UTC().Format(dateFormat)
createData := base.Json{
"drive_id": d.DriveId,
"parent_file_id": dstDir.GetID(),
"name": stream.GetName(),
"type": "file",
"check_name_mode": "ignore",
"local_modified_at": mtimeStr,
"local_created_at": ctimeStr,
}
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
createData["part_info_list"] = makePartInfos(count)
// rapid upload
rapidUpload := stream.GetSize() > 100*utils.KB && d.RapidUpload
if rapidUpload {
log.Debugf("[aliyundrive_open] start cal pre_hash")
// read 1024 bytes to calculate pre hash
reader, err := stream.RangeRead(http_range.Range{Start: 0, Length: 1024})
if err != nil {
return nil, err
}
hash, err := utils.HashReader(utils.SHA1, reader)
if err != nil {
return nil, err
}
createData["size"] = stream.GetSize()
createData["pre_hash"] = hash
}
var createResp CreateResp
_, err, e := d.requestReturnErrResp("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(createData).SetResult(&createResp)
})
var tmpF model.File
if err != nil {
if e.Code != "PreHashMatched" || !rapidUpload {
return nil, err
}
log.Debugf("[aliyundrive_open] pre_hash matched, start rapid upload")
hi := stream.GetHash()
hash := hi.GetHash(utils.SHA1)
if len(hash) <= 0 {
tmpF, err = stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
hash, err = utils.HashFile(utils.SHA1, tmpF)
if err != nil {
return nil, err
}
}
delete(createData, "pre_hash")
createData["proof_version"] = "v1"
createData["content_hash_name"] = "sha1"
createData["content_hash"] = hash
createData["proof_code"], err = d.calProofCode(stream)
if err != nil {
return nil, fmt.Errorf("cal proof code error: %s", err.Error())
}
_, err = d.request("/adrive/v1.0/openFile/create", http.MethodPost, func(req *resty.Request) {
req.SetBody(createData).SetResult(&createResp)
})
if err != nil {
return nil, err
}
}
if !createResp.RapidUpload {
// 2. normal upload
log.Debugf("[aliyundive_open] normal upload")
preTime := time.Now()
var offset, length int64 = 0, partSize
//var length
for i := 0; i < len(createResp.PartInfoList); i++ {
if utils.IsCanceled(ctx) {
return nil, ctx.Err()
}
// refresh upload url if 50 minutes passed
if time.Since(preTime) > 50*time.Minute {
createResp.PartInfoList, err = d.getUploadUrl(count, createResp.FileId, createResp.UploadId)
if err != nil {
return nil, err
}
preTime = time.Now()
}
if remain := stream.GetSize() - offset; length > remain {
length = remain
}
//rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
rd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
if err != nil {
return nil, err
}
err = retry.Do(func() error {
//rd.Reset()
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
},
retry.Attempts(3),
retry.DelayType(retry.BackOffDelay),
retry.Delay(time.Second))
if err != nil {
return nil, err
}
offset += partSize
}
} else {
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
}
log.Debugf("[aliyundrive_open] create file success, resp: %+v", createResp)
// 3. complete
return d.completeUpload(createResp.FileId, createResp.UploadId)
}

View File

@ -1,178 +0,0 @@
package aliyundrive_open
import (
"context"
"encoding/base64"
"errors"
"fmt"
"net/http"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
func (d *AliyundriveOpen) _refreshToken() (string, string, error) {
url := d.base + "/oauth/access_token"
if d.OauthTokenURL != "" && d.ClientID == "" {
url = d.OauthTokenURL
}
//var resp base.TokenResp
var e ErrResp
res, err := base.RestyClient.R().
//ForceContentType("application/json").
SetBody(base.Json{
"client_id": d.ClientID,
"client_secret": d.ClientSecret,
"grant_type": "refresh_token",
"refresh_token": d.RefreshToken,
}).
//SetResult(&resp).
SetError(&e).
Post(url)
if err != nil {
return "", "", err
}
log.Debugf("[ali_open] refresh token response: %s", res.String())
if e.Code != "" {
return "", "", fmt.Errorf("failed to refresh token: %s", e.Message)
}
refresh, access := utils.Json.Get(res.Body(), "refresh_token").ToString(), utils.Json.Get(res.Body(), "access_token").ToString()
if refresh == "" {
return "", "", fmt.Errorf("failed to refresh token: refresh token is empty, resp: %s", res.String())
}
curSub, err := getSub(d.RefreshToken)
if err != nil {
return "", "", err
}
newSub, err := getSub(refresh)
if err != nil {
return "", "", err
}
if curSub != newSub {
return "", "", errors.New("failed to refresh token: sub not match")
}
return refresh, access, nil
}
func getSub(token string) (string, error) {
segments := strings.Split(token, ".")
if len(segments) != 3 {
return "", errors.New("not a jwt token because of invalid segments")
}
bs, err := base64.RawStdEncoding.DecodeString(segments[1])
if err != nil {
return "", errors.New("failed to decode jwt token")
}
return utils.Json.Get(bs, "sub").ToString(), nil
}
func (d *AliyundriveOpen) refreshToken() error {
refresh, access, err := d._refreshToken()
for i := 0; i < 3; i++ {
if err == nil {
break
} else {
log.Errorf("[ali_open] failed to refresh token: %s", err)
}
refresh, access, err = d._refreshToken()
}
if err != nil {
return err
}
log.Infof("[ali_open] toekn exchange: %s -> %s", d.RefreshToken, refresh)
d.RefreshToken, d.AccessToken = refresh, access
op.MustSaveDriverStorage(d)
return nil
}
func (d *AliyundriveOpen) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
b, err, _ := d.requestReturnErrResp(uri, method, callback, retry...)
return b, err
}
func (d *AliyundriveOpen) requestReturnErrResp(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error, *ErrResp) {
req := base.RestyClient.R()
// TODO check whether access_token is expired
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if method == http.MethodPost {
req.SetHeader("Content-Type", "application/json")
}
if callback != nil {
callback(req)
}
var e ErrResp
req.SetError(&e)
res, err := req.Execute(method, d.base+uri)
if err != nil {
if res != nil {
log.Errorf("[aliyundrive_open] request error: %s", res.String())
}
return nil, err, nil
}
isRetry := len(retry) > 0 && retry[0]
if e.Code != "" {
if !isRetry && (utils.SliceContains([]string{"AccessTokenInvalid", "AccessTokenExpired", "I400JD"}, e.Code) || d.AccessToken == "") {
err = d.refreshToken()
if err != nil {
return nil, err, nil
}
return d.requestReturnErrResp(uri, method, callback, true)
}
return nil, fmt.Errorf("%s:%s", e.Code, e.Message), &e
}
return res.Body(), nil, nil
}
func (d *AliyundriveOpen) list(ctx context.Context, data base.Json) (*Files, error) {
var resp Files
_, err := d.request("/adrive/v1.0/openFile/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetResult(&resp)
})
if err != nil {
return nil, err
}
return &resp, nil
}
func (d *AliyundriveOpen) getFiles(ctx context.Context, fileId string) ([]File, error) {
marker := "first"
res := make([]File, 0)
for marker != "" {
if marker == "first" {
marker = ""
}
data := base.Json{
"drive_id": d.DriveId,
"limit": 200,
"marker": marker,
"order_by": d.OrderBy,
"order_direction": d.OrderDirection,
"parent_file_id": fileId,
//"category": "",
//"type": "",
//"video_thumbnail_time": 120000,
//"video_thumbnail_width": 480,
//"image_thumbnail_width": 480,
}
resp, err := d.limitList(ctx, data)
if err != nil {
return nil, err
}
marker = resp.NextMarker
res = append(res, resp.Items...)
}
return res, nil
}
func getNowTime() (time.Time, string) {
nowTime := time.Now()
nowTimeStr := nowTime.Format("2006-01-02T15:04:05.000Z")
return nowTime, nowTimeStr
}

View File

@ -1,147 +0,0 @@
package aliyundrive_share
import (
"context"
"fmt"
"net/http"
"time"
"github.com/Xhofe/rateg"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type AliyundriveShare struct {
model.Storage
Addition
AccessToken string
ShareToken string
DriveId string
cron *cron.Cron
limitList func(ctx context.Context, dir model.Obj) ([]model.Obj, error)
limitLink func(ctx context.Context, file model.Obj) (*model.Link, error)
}
func (d *AliyundriveShare) Config() driver.Config {
return config
}
func (d *AliyundriveShare) GetAddition() driver.Additional {
return &d.Addition
}
func (d *AliyundriveShare) Init(ctx context.Context) error {
err := d.refreshToken()
if err != nil {
return err
}
err = d.getShareToken()
if err != nil {
return err
}
d.cron = cron.NewCron(time.Hour * 2)
d.cron.Do(func() {
err := d.refreshToken()
if err != nil {
log.Errorf("%+v", err)
}
})
d.limitList = rateg.LimitFnCtx(d.list, rateg.LimitFnOption{
Limit: 4,
Bucket: 1,
})
d.limitLink = rateg.LimitFnCtx(d.link, rateg.LimitFnOption{
Limit: 1,
Bucket: 1,
})
return nil
}
func (d *AliyundriveShare) Drop(ctx context.Context) error {
if d.cron != nil {
d.cron.Stop()
}
d.DriveId = ""
return nil
}
func (d *AliyundriveShare) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if d.limitList == nil {
return nil, fmt.Errorf("driver not init")
}
return d.limitList(ctx, dir)
}
func (d *AliyundriveShare) list(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *AliyundriveShare) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if d.limitLink == nil {
return nil, fmt.Errorf("driver not init")
}
return d.limitLink(ctx, file)
}
func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Link, error) {
data := base.Json{
"drive_id": d.DriveId,
"file_id": file.GetID(),
// // Only ten minutes lifetime
"expire_sec": 600,
"share_id": d.ShareId,
}
var resp ShareLinkResp
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
})
if err != nil {
return nil, err
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.aliyundrive.com/"},
},
URL: resp.DownloadUrl,
}, nil
}
func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
var resp base.Json
var url string
data := base.Json{
"share_id": d.ShareId,
"file_id": args.Obj.GetID(),
}
switch args.Method {
case "doc_preview":
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
case "video_preview":
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
default:
return nil, errs.NotSupport
}
_, err := d.request(url, http.MethodPost, func(req *resty.Request) {
req.SetBody(data).SetResult(&resp)
})
if err != nil {
return nil, err
}
return resp, nil
}
var _ driver.Driver = (*AliyundriveShare)(nil)

Some files were not shown because too many files have changed in this diff Show More