Compare commits

..

11 Commits

Author SHA1 Message Date
3b90f591b5 Squashed commit of the following:
commit 65c5ec0c34
Author: itsHenry <2671230065@qq.com>
Date:   Sat Nov 4 13:35:09 2023 +0800

    feat(cloudreve): folder size count and switch (#5457 close #5395)

commit a6325967d0
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Mon Oct 30 15:11:20 2023 +0800

    fix(deps): update module github.com/charmbracelet/lipgloss to v0.9.1 (#5234)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 4dff49470a
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Mon Oct 30 15:10:36 2023 +0800

    fix(deps): update golang.org/x/exp digest to 7918f67 (#5366)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit cc86d6f3d1
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Sun Oct 29 14:45:55 2023 +0800

    fix(deps): update module golang.org/x/net to v0.17.0 [security] (#5370)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit c0f9c8ebaf
Author: Andy Hsu <i@nn.ci>
Date:   Thu Oct 26 19:21:09 2023 +0800

    feat: add ignore direct link params (close #5434)
2023-11-05 22:28:31 +08:00
5657b12b20 Merge branch 'main' into refactor/offline-download 2023-10-20 21:17:32 +08:00
aba8bc0ec2 fix: adapt update progress type 2023-10-20 21:15:37 +08:00
ce6e486666 Squashed commit of the following:
commit 4fc0a77565
Author: Andy Hsu <i@nn.ci>
Date:   Fri Oct 20 21:06:25 2023 +0800

    fix(baidu_netdisk): upload file > 4GB (close #5392)

commit aaffaee2b5
Author: gmugu <94156510@qq.com>
Date:   Thu Oct 19 19:17:53 2023 +0800

    perf(webdav): support request with cookies (#5391)

commit 8ef8023c20
Author: NewbieOrange <NewbieOrange@users.noreply.github.com>
Date:   Thu Oct 19 19:17:09 2023 +0800

    fix(aliyundrive_open): upload progress for normal upload (#5398)

commit cdfbe6dcf2
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Wed Oct 18 16:27:07 2023 +0800

    fix: hash gcid empty file (#5394)

commit 94d028743a
Author: Andy Hsu <i@nn.ci>
Date:   Sat Oct 14 13:17:51 2023 +0800

    ci: remove `pr-welcome` label when close issue [skip ci]

commit 7f7335435c
Author: itsHenry <2671230065@qq.com>
Date:   Sat Oct 14 13:12:46 2023 +0800

    feat(cloudreve): support thumbnail (#5373 close #5348)

    * feat(cloudreve): support thumbnail

    * chore: remove unnecessary code

commit b9e192b29c
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Thu Oct 12 20:57:12 2023 +0800

    fix(115): limit request rate (#5367 close #5275)

    * fix(115):limit request rate

    * chore(115): fix unit of `limit_rate`

    ---------

    Co-authored-by: Andy Hsu <i@nn.ci>

commit 69a98eaef6
Author: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
Date:   Wed Oct 11 22:01:55 2023 +0800

    fix(deps): update module github.com/aliyun/aliyun-oss-go-sdk to v2.2.9+incompatible (#5141)

    Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>

commit 1ebc96a4e5
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:32:00 2023 +0800

    fix(wopan): fatal error concurrent map writes (close #5352)

commit 66e2324cac
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:23:11 2023 +0800

    chore(deps): upgrade dependencies

commit 7600dc28df
Author: Andy Hsu <i@nn.ci>
Date:   Tue Oct 10 18:13:58 2023 +0800

    fix(aliyundrive_open): change default api to raw server (close #5358)

commit 8ef89ad0a4
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Tue Oct 10 18:08:27 2023 +0800

    fix(baidu_netdisk): hash and `error 2` (#5356)

    * fix(baidu):hash and error:2

    * fix:invalid memory address

commit 35d672217d
Author: jeffmingup <1960588251@qq.com>
Date:   Sun Oct 8 19:29:45 2023 +0800

    fix(onedrive_app): incorrect api on `_accessToken` (#5346)

commit 1a283bb272
Author: foxxorcat <95907542+foxxorcat@users.noreply.github.com>
Date:   Fri Oct 6 16:04:39 2023 +0800

    feat(google_drive): add `hash_info`, `ctime`, `thumbnail` (#5334)

commit a008f54f4d
Author: nkh0472 <67589323+nkh0472@users.noreply.github.com>
Date:   Thu Oct 5 13:10:51 2023 +0800

    docs: minor language improvements (#5329) [skip ci]
2023-10-20 21:14:15 +08:00
9fb9efb704 chore: fix typo 2023-10-06 22:32:05 +08:00
1490da8b53 wip: adapt qBittorrent 2023-10-06 16:02:29 +08:00
12dfb60a66 wip: use tool manager 2023-10-05 22:13:02 +08:00
0380d7fff9 wip: use items in offline_download 2023-10-05 13:38:35 +08:00
0acb2d6073 wip: adapt aria2 2023-10-04 22:23:45 +08:00
ea9a3432ab refactor: change type of percentage to float64 2023-10-04 20:59:11 +08:00
7db3975b18 wip: refactor offline download (#5331)
* base tool

* working: aria2
2023-10-04 16:27:08 +08:00
259 changed files with 3111 additions and 15644 deletions

View File

@ -1,44 +0,0 @@
root = "."
testdata_dir = "testdata"
tmp_dir = "tmp"
[build]
args_bin = ["server"]
bin = "./tmp/main"
cmd = "go build -o ./tmp/main ."
delay = 0
exclude_dir = ["assets", "tmp", "vendor", "testdata"]
exclude_file = []
exclude_regex = ["_test.go"]
exclude_unchanged = false
follow_symlink = false
full_bin = ""
include_dir = []
include_ext = ["go", "tpl", "tmpl", "html"]
include_file = []
kill_delay = "0s"
log = "build-errors.log"
poll = false
poll_interval = 0
rerun = false
rerun_delay = 500
send_interrupt = false
stop_on_error = false
[color]
app = ""
build = "yellow"
main = "magenta"
runner = "green"
watcher = "cyan"
[log]
main_only = false
time = false
[misc]
clean_on_exit = false
[screen]
clear_on_rebuild = false
keep_scroll = true

View File

@ -1,5 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Questions & Discussions
url: https://github.com/alist-org/alist/discussions
url: https://github.com/Xhofe/alist/discussions
about: Use GitHub discussions for message-board style questions and discussions.

View File

@ -20,22 +20,22 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
go-version: [ '1.20' ]
name: auto generate lang.json
runs-on: ${{ matrix.platform }}
steps:
- name: Setup go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout alist
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
path: alist
- name: Checkout alist-web
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: 'alist-org/alist-web'
ref: main

View File

@ -15,20 +15,17 @@ jobs:
strategy:
matrix:
platform: [ubuntu-latest]
go-version: [ '1.21' ]
go-version: [ '1.20' ]
name: Build
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
- uses: benjlevesque/short-sha@v3.0
id: short-sha
uses: actions/checkout@v3
- name: Install dependencies
run: |
@ -42,7 +39,7 @@ jobs:
bash build.sh dev
- name: Upload artifact
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: alist_${{ env.SHA }}
name: alist
path: dist

View File

@ -3,8 +3,6 @@ name: build_docker
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
@ -12,91 +10,45 @@ concurrency:
jobs:
build_docker:
name: Build Docker
name: Build docker
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: xhofe/alist
- name: Docker meta with ffmpeg
id: meta-ffmpeg
uses: docker/metadata-action@v5
with:
images: xhofe/alist
flavor: |
suffix=-ffmpeg,onlatest=true
- uses: actions/setup-go@v5
with:
go-version: 'stable'
- name: Cache Musl
id: cache-musl
uses: actions/cache@v4
with:
path: build/musl-libs
key: docker-musl-libs
- name: Download Musl Library
if: steps.cache-musl.outputs.cache-hit != 'true'
run: bash build.sh prepare docker-multiplatform
- name: Build go binary
run: bash build.sh dev docker-multiplatform
- name: Replace release with dev
run: |
sed -i 's/release/dev/g' Dockerfile
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
if: github.event_name == 'push'
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v6
uses: docker/build-push-action@v4
with:
context: .
file: Dockerfile.ci
push: ${{ github.event_name == 'push' }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
- name: Replace dockerfile tag
run: |
sed -i -e "s/latest/main/g" Dockerfile.ffmpeg
- name: Build and push with ffmpeg
id: docker_build_ffmpeg
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile.ffmpeg
push: ${{ github.event_name == 'push' }}
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
platforms: linux/amd64,linux/arm64
build_docker_with_aria2:
needs: build_docker
name: Build docker with aria2
runs-on: ubuntu-latest
if: github.event_name == 'push'
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: alist-org/with_aria2
ref: main
@ -114,4 +66,4 @@ jobs:
with:
github_token: ${{ secrets.MY_TOKEN }}
branch: main
repository: alist-org/with_aria2
repository: alist-org/with_aria2

View File

@ -11,7 +11,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 0
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result

View File

@ -10,7 +10,7 @@ jobs:
if: github.event.label.name == 'question'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3.6.0
uses: actions-cool/issues-helper@v3.5.2
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -9,7 +9,7 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
@ -21,12 +21,12 @@ jobs:
prerelease: true
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 0
@ -42,7 +42,7 @@ jobs:
bash build.sh release
- name: Upload assets
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
files: build/compress/*
prerelease: false
@ -53,7 +53,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: alist-org/desktop-release
ref: main

View File

@ -1,34 +0,0 @@
name: release_android
on:
release:
types: [ published ]
jobs:
release_android:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v5
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build
run: |
bash build.sh release android
- name: Upload assets
uses: softprops/action-gh-release@v2
with:
files: build/compress/*

View File

@ -11,82 +11,43 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- uses: actions/setup-go@v5
with:
go-version: 'stable'
- name: Cache Musl
id: cache-musl
uses: actions/cache@v4
with:
path: build/musl-libs
key: docker-musl-libs
- name: Download Musl Library
if: steps.cache-musl.outputs.cache-hit != 'true'
run: bash build.sh prepare docker-multiplatform
- name: Build go binary
run: bash build.sh release docker-multiplatform
uses: actions/checkout@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
uses: docker/metadata-action@v4
with:
images: xhofe/alist
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
uses: docker/setup-qemu-action@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v2
- name: Login to DockerHub
uses: docker/login-action@v3
uses: docker/login-action@v2
with:
username: xhofe
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v6
uses: docker/build-push-action@v4
with:
context: .
file: Dockerfile.ci
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
- name: Docker meta with ffmpeg
id: meta-ffmpeg
uses: docker/metadata-action@v5
with:
images: xhofe/alist
flavor: |
latest=true
suffix=-ffmpeg,onlatest=true
- name: Build and push with ffmpeg
id: docker_build_ffmpeg
uses: docker/build-push-action@v6
with:
context: .
file: Dockerfile.ffmpeg
push: true
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
release_docker_with_aria2:
needs: release_docker
name: Release docker with aria2
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
repository: alist-org/with_aria2
ref: main

View File

@ -9,18 +9,18 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 0
@ -29,6 +29,6 @@ jobs:
bash build.sh release linux_musl
- name: Upload assets
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

View File

@ -9,18 +9,18 @@ jobs:
strategy:
matrix:
platform: [ ubuntu-latest ]
go-version: [ '1.21' ]
go-version: [ '1.20' ]
name: Release
runs-on: ${{ matrix.platform }}
steps:
- name: Setup Go
uses: actions/setup-go@v5
uses: actions/setup-go@v4
with:
go-version: ${{ matrix.go-version }}
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@v3
with:
fetch-depth: 0
@ -29,6 +29,6 @@ jobs:
bash build.sh release linux_musl_arm
- name: Upload assets
uses: softprops/action-gh-release@v2
uses: softprops/action-gh-release@v1
with:
files: build/compress/*

1
.gitignore vendored
View File

@ -24,7 +24,6 @@ output/
*.json
/build
/data/
/tmp/
/log/
/lang/
/daemon/

View File

@ -1,23 +1,18 @@
FROM alpine:edge as builder
FROM alpine:3.18 as builder
LABEL stage=go-builder
WORKDIR /app/
RUN apk add --no-cache bash curl gcc git go musl-dev
COPY go.mod go.sum ./
RUN go mod download
COPY ./ ./
RUN bash build.sh release docker
RUN apk add --no-cache bash curl gcc git go musl-dev; \
bash build.sh release docker
FROM alpine:edge
FROM alpine:3.18
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY --from=builder /app/bin/alist ./
COPY entrypoint.sh /entrypoint.sh
RUN apk update && \
apk upgrade --no-cache && \
apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh && \
rm -rf /var/cache/apk/*
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh
ENV PUID=0 PGID=0 UMASK=022
EXPOSE 5244 5245
CMD [ "/entrypoint.sh" ]
CMD [ "/entrypoint.sh" ]

View File

@ -1,16 +0,0 @@
FROM alpine:edge
ARG TARGETPLATFORM
LABEL MAINTAINER="i@nn.ci"
VOLUME /opt/alist/data/
WORKDIR /opt/alist/
COPY /build/${TARGETPLATFORM}/alist ./
COPY entrypoint.sh /entrypoint.sh
RUN apk update && \
apk upgrade --no-cache && \
apk add --no-cache bash ca-certificates su-exec tzdata; \
chmod +x /entrypoint.sh && \
rm -rf /var/cache/apk/* && \
/entrypoint.sh version
ENV PUID=0 PGID=0 UMASK=022
EXPOSE 5244 5245
CMD [ "/entrypoint.sh" ]

View File

@ -1,4 +0,0 @@
FROM xhofe/alist:latest
RUN apk update && \
apk add --no-cache ffmpeg \
rm -rf /var/cache/apk/*

View File

@ -1,17 +1,17 @@
<div align="center">
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂A file list program that supports multiple storages, powered by Gin and Solidjs.</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
@ -19,13 +19,13 @@
</a>
</div>
<div>
<a href="https://github.com/alist-org/alist/discussions">
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
@ -45,7 +45,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] Multiple storages
- [x] Local storage
- [x] [Aliyundrive](https://www.alipan.com/)
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
@ -66,8 +66,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [ILanzou](https://www.ilanzou.com/)
- [x] [Aliyundrive share](https://www.alipan.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
@ -75,8 +74,6 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [FeijiPan](https://www.feijipan.com/)
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
- [x] Easy to deploy and out-of-the-box
- [x] File preview (PDF, markdown, code, plain text, ...)
- [x] Image preview in gallery mode
@ -106,7 +103,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
## Discussion
Please go to our [discussion forum](https://github.com/alist-org/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
## Sponsor
@ -115,9 +112,9 @@ https://alist.nn.ci/guide/sponsor.html
### Special sponsors
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## Contributors
@ -138,4 +135,4 @@ The `AList` is open-source software licensed under the AGPL-3.0 license.
---
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)

View File

@ -1,17 +1,17 @@
<div align="center">
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂一个支持多存储的文件列表程序,使用 Gin 和 Solidjs。</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
@ -19,13 +19,13 @@
</a>
</div>
<div>
<a href="https://github.com/alist-org/alist/discussions">
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
@ -45,7 +45,7 @@
- [x] 多种存储
- [x] 本地存储
- [x] [阿里云盘](https://www.alipan.com/)
- [x] [阿里云盘](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint[国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
- [x] [GoogleDrive](https://drive.google.com/)
@ -65,8 +65,7 @@
- [x] [夸克网盘](https://pan.quark.cn)
- [x] [迅雷网盘](https://pan.xunlei.com)
- [x] [蓝奏云](https://www.lanzou.com/)
- [x] [蓝奏云优享版](https://www.ilanzou.com/)
- [x] [阿里云盘分享](https://www.alipan.com/)
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
- [x] [谷歌相册](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [一刻相册](https://photo.baidu.com/)
@ -74,8 +73,6 @@
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [飞机盘](https://www.feijipan.com/)
- [x] [多吉云](https://www.dogecloud.com/product/oss)
- [x] 部署方便,开箱即用
- [x] 文件预览PDF、markdown、代码、纯文本……
- [x] 画廊模式下的图像预览
@ -105,7 +102,7 @@
## 讨论
一般问题请到[讨论论坛](https://github.com/alist-org/alist/discussions) **issue仅针对错误报告和功能请求。**
一般问题请到[讨论论坛](https://github.com/Xhofe/alist/discussions) **issue仅针对错误报告和功能请求。**
## 赞助
@ -113,9 +110,9 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
### 特别赞助
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - 苹果生态下优雅的网盘视频播放器iPhoneiPadMacApple TV全平台支持。
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## 贡献者
@ -136,4 +133,4 @@ Thanks goes to these wonderful people:
---
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
> [@博客](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@Telegram群](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)

View File

@ -1,17 +1,17 @@
<div align="center">
<a href="https://alist.nn.ci"><img width="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<a href="https://alist.nn.ci"><img height="100px" alt="logo" src="https://cdn.jsdelivr.net/gh/alist-org/logo@main/logo.svg"/></a>
<p><em>🗂Gin と Solidjs による、複数のストレージをサポートするファイルリストプログラム。</em></p>
<div>
<a href="https://goreportcard.com/report/github.com/alist-org/alist/v3">
<img src="https://goreportcard.com/badge/github.com/alist-org/alist/v3" alt="latest version" />
</a>
<a href="https://github.com/alist-org/alist/blob/main/LICENSE">
<a href="https://github.com/Xhofe/alist/blob/main/LICENSE">
<img src="https://img.shields.io/github/license/Xhofe/alist" alt="License" />
</a>
<a href="https://github.com/alist-org/alist/actions?query=workflow%3ABuild">
<a href="https://github.com/Xhofe/alist/actions?query=workflow%3ABuild">
<img src="https://img.shields.io/github/actions/workflow/status/Xhofe/alist/build.yml?branch=main" alt="Build status" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/release/Xhofe/alist" alt="latest version" />
</a>
<a title="Crowdin" target="_blank" href="https://crwd.in/alist">
@ -19,13 +19,13 @@
</a>
</div>
<div>
<a href="https://github.com/alist-org/alist/discussions">
<a href="https://github.com/Xhofe/alist/discussions">
<img src="https://img.shields.io/github/discussions/Xhofe/alist?color=%23ED8936" alt="discussions" />
</a>
<a href="https://discord.gg/F4ymsH4xv2">
<img src="https://img.shields.io/discord/1018870125102895134?logo=discord" alt="discussions" />
</a>
<a href="https://github.com/alist-org/alist/releases">
<a href="https://github.com/Xhofe/alist/releases">
<img src="https://img.shields.io/github/downloads/Xhofe/alist/total?color=%239F7AEA&logo=github" alt="Downloads" />
</a>
<a href="https://hub.docker.com/r/xhofe/alist">
@ -45,7 +45,7 @@
- [x] マルチストレージ
- [x] ローカルストレージ
- [x] [Aliyundrive](https://www.alipan.com/)
- [x] [Aliyundrive](https://www.aliyundrive.com/)
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
- [x] [GoogleDrive](https://drive.google.com/)
@ -66,8 +66,7 @@
- [x] [Quark](https://pan.quark.cn)
- [x] [Thunder](https://pan.xunlei.com)
- [x] [Lanzou](https://www.lanzou.com/)
- [x] [ILanzou](https://www.ilanzou.com/)
- [x] [Aliyundrive share](https://www.alipan.com/)
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
- [x] [Google photo](https://photos.google.com/)
- [x] [Mega.nz](https://mega.nz)
- [x] [Baidu photo](https://photo.baidu.com/)
@ -75,8 +74,6 @@
- [x] [115](https://115.com/)
- [X] Cloudreve
- [x] [Dropbox](https://www.dropbox.com/)
- [x] [FeijiPan](https://www.feijipan.com/)
- [x] [dogecloud](https://www.dogecloud.com/product/oss)
- [x] デプロイが簡単で、すぐに使える
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
- [x] ギャラリーモードでの画像プレビュー
@ -106,7 +103,7 @@
## ディスカッション
一般的なご質問は[ディスカッションフォーラム](https://github.com/alist-org/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
一般的なご質問は[ディスカッションフォーラム](https://github.com/Xhofe/alist/discussions)をご利用ください。**問題はバグレポートと機能リクエストのみです。**
## スポンサー
@ -115,9 +112,9 @@ https://alist.nn.ci/guide/sponsor.html
### スペシャルスポンサー
- [VidHub](https://apps.apple.com/app/apple-store/id1659622164?pt=118612019&ct=alist&mt=8) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
## コントリビューター
@ -138,4 +135,4 @@ https://alist.nn.ci/guide/sponsor.html
---
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/alist-org) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)
> [@Blog](https://nn.ci/) · [@GitHub](https://github.com/Xhofe) · [@TelegramGroup](https://t.me/alist_chat) · [@Discord](https://discord.gg/F4ymsH4xv2)

View File

@ -49,7 +49,6 @@ BuildWinArm64() {
export GOARCH=arm64
export CC=$(pwd)/wrapper/zcc-arm64
export CXX=$(pwd)/wrapper/zcxx-arm64
export CGO_ENABLED=1
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
}
@ -76,7 +75,7 @@ BuildDev() {
export CGO_ENABLED=1
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
done
xgo -targets=windows/amd64,darwin/amd64,darwin/arm64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
mv alist-* dist
cd dist
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
@ -89,57 +88,6 @@ BuildDocker() {
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
}
PrepareBuildDockerMusl() {
mkdir -p build/musl-libs
BASE="https://musl.cc/"
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
for i in "${FILES[@]}"; do
url="${BASE}${i}.tgz"
lib_tgz="build/${i}.tgz"
curl -L -o "${lib_tgz}" "${url}"
tar xf "${lib_tgz}" --strip-components 1 -C build/musl-libs
rm -f "${lib_tgz}"
done
}
BuildDockerMultiplatform() {
go mod download
# run PrepareBuildDockerMusl before build
export PATH=$PATH:$PWD/build/musl-libs/bin
docker_lflags="--extldflags '-static -fpic' $ldflags"
export CGO_ENABLED=1
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
os=${os_arch%%-*}
arch=${os_arch##*-}
export GOOS=$os
export GOARCH=$arch
export CC=${cgo_cc}
echo "building for $os_arch"
go build -o build/$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
done
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
GO_ARM=(6 7)
export GOOS=linux
export GOARCH=arm
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
docker_arch=${DOCKER_ARM_ARCHES[$i]}
cgo_cc=${CGO_ARGS[$i]}
export GOARM=${GO_ARM[$i]}
export CC=${cgo_cc}
echo "building for $docker_arch"
go build -o build/${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
done
}
BuildRelease() {
rm -rf .git/
mkdir -p "build"
@ -211,27 +159,6 @@ BuildReleaseLinuxMuslArm() {
done
}
BuildReleaseAndroid() {
rm -rf .git/
mkdir -p "build"
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
unzip android-ndk-r26b-linux.zip
rm android-ndk-r26b-linux.zip
OS_ARCHES=(amd64 arm64 386 arm)
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
for i in "${!OS_ARCHES[@]}"; do
os_arch=${OS_ARCHES[$i]}
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
echo building for android-${os_arch}
export GOOS=android
export GOARCH=${os_arch##*-}
export CC=${cgo_cc}
export CGO_ENABLED=1
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
done
}
MakeRelease() {
cd build
mkdir compress
@ -239,11 +166,6 @@ MakeRelease() {
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-android-*"); do
cp "$i" alist
tar -czvf compress/"$i".tar.gz alist
rm -f alist
done
for i in $(find . -type f -name "$appName-darwin-*"); do
cp "$i" alist
@ -265,8 +187,6 @@ if [ "$1" = "dev" ]; then
FetchWebDev
if [ "$2" = "docker" ]; then
BuildDocker
elif [ "$2" = "docker-multiplatform" ]; then
BuildDockerMultiplatform
else
BuildDev
fi
@ -274,25 +194,16 @@ elif [ "$1" = "release" ]; then
FetchWebRelease
if [ "$2" = "docker" ]; then
BuildDocker
elif [ "$2" = "docker-multiplatform" ]; then
BuildDockerMultiplatform
elif [ "$2" = "linux_musl_arm" ]; then
BuildReleaseLinuxMuslArm
MakeRelease "md5-linux-musl-arm.txt"
elif [ "$2" = "linux_musl" ]; then
BuildReleaseLinuxMusl
MakeRelease "md5-linux-musl.txt"
elif [ "$2" = "android" ]; then
BuildReleaseAndroid
MakeRelease "md5-android.txt"
else
BuildRelease
MakeRelease "md5.txt"
fi
elif [ "$1" = "prepare" ]; then
if [ "$2" = "docker-multiplatform" ]; then
PrepareBuildDockerMusl
fi
else
echo -e "Parameter error"
fi

View File

@ -2,7 +2,6 @@ package cmd
import (
"context"
"errors"
"fmt"
"net"
"net/http"
@ -37,7 +36,6 @@ the address is defined in config file`,
}
bootstrap.InitOfflineDownloadTools()
bootstrap.LoadStorages()
bootstrap.InitTaskManager()
if !flags.Debug && !flags.Dev {
gin.SetMode(gin.ReleaseMode)
}
@ -51,7 +49,7 @@ the address is defined in config file`,
httpSrv = &http.Server{Addr: httpBase, Handler: r}
go func() {
err := httpSrv.ListenAndServe()
if err != nil && !errors.Is(err, http.ErrServerClosed) {
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start http: %s", err.Error())
}
}()
@ -62,7 +60,7 @@ the address is defined in config file`,
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
go func() {
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
if err != nil && !errors.Is(err, http.ErrServerClosed) {
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start https: %s", err.Error())
}
}()
@ -86,32 +84,11 @@ the address is defined in config file`,
}
}
err = unixSrv.Serve(listener)
if err != nil && !errors.Is(err, http.ErrServerClosed) {
if err != nil && err != http.ErrServerClosed {
utils.Log.Fatalf("failed to start unix: %s", err.Error())
}
}()
}
if conf.Conf.S3.Port != -1 && conf.Conf.S3.Enable {
s3r := gin.New()
s3r.Use(gin.LoggerWithWriter(log.StandardLogger().Out), gin.RecoveryWithWriter(log.StandardLogger().Out))
server.InitS3(s3r)
s3Base := fmt.Sprintf("%s:%d", conf.Conf.Scheme.Address, conf.Conf.S3.Port)
utils.Log.Infof("start S3 server @ %s", s3Base)
go func() {
var err error
if conf.Conf.S3.SSL {
httpsSrv = &http.Server{Addr: s3Base, Handler: s3r}
err = httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
}
if !conf.Conf.S3.SSL {
httpSrv = &http.Server{Addr: s3Base, Handler: s3r}
err = httpSrv.ListenAndServe()
}
if err != nil && !errors.Is(err, http.ErrServerClosed) {
utils.Log.Fatalf("failed to start s3 server: %s", err.Error())
}
}()
}
// Wait for interrupt signal to gracefully shutdown the server with
// a timeout of 1 second.
quit := make(chan os.Signal, 1)

View File

@ -63,9 +63,8 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
var userAgent = args.Header.Get("User-Agent")
downloadInfo, err := d.
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
downloadInfo, err := d.client.
DownloadWithUA(file.(*FileObj).PickCode, driver115.UA115Browser)
if err != nil {
return nil, err
}

View File

@ -6,20 +6,19 @@ import (
)
type Addition struct {
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
driver.RootID
}
var config = driver.Config{
Name: "115 Cloud",
DefaultRoot: "0",
//OnlyProxy: true,
OnlyProxy: true,
//OnlyLocal: true,
//NoOverwriteUpload: true,
NoOverwriteUpload: true,
}
func init() {

View File

@ -5,8 +5,12 @@ import (
"crypto/tls"
"encoding/json"
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
"github.com/orzogc/fake115uploader/cipher"
"io"
"net/http"
"net/url"
"path/filepath"
"strconv"
@ -14,35 +18,29 @@ import (
"sync"
"time"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aliyun/aliyun-oss-go-sdk/oss"
"github.com/SheltonZhu/115driver/pkg/driver"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
crypto "github.com/gaoyb7/115drive-webdav/115"
"github.com/orzogc/fake115uploader/cipher"
"github.com/alist-org/alist/v3/internal/conf"
"github.com/pkg/errors"
)
var UserAgent = driver115.UA115Desktop
var UserAgent = driver.UA115Desktop
func (d *Pan115) login() error {
var err error
opts := []driver115.Option{
driver115.UA(UserAgent),
func(c *driver115.Pan115Client) {
opts := []driver.Option{
driver.UA(UserAgent),
func(c *driver.Pan115Client) {
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
},
}
d.client = driver115.New(opts...)
cr := &driver115.Credential{}
d.client = driver.New(opts...)
cr := &driver.Credential{}
if d.Addition.QRCodeToken != "" {
s := &driver115.QRCodeSession{
s := &driver.QRCodeSession{
UID: d.Addition.QRCodeToken,
}
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
if cr, err = d.client.QRCodeLogin(s); err != nil {
return errors.Wrap(err, "failed to login by qrcode")
}
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
@ -61,7 +59,7 @@ func (d *Pan115) login() error {
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
res := make([]FileObj, 0)
if d.PageSize <= 0 {
d.PageSize = driver115.FileListLimit
d.PageSize = driver.FileListLimit
}
files, err := d.client.ListWithLimit(fileId, d.PageSize)
if err != nil {
@ -77,61 +75,6 @@ const (
appVer = "2.0.3.6"
)
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
key := crypto.GenerateKey()
result := driver115.DownloadResp{}
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
if err != nil {
return nil, err
}
data := crypto.Encode(params, key)
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
req.Header.Set("Cookie", c.Cookie)
req.Header.Set("User-Agent", ua)
resp, err := c.client.Client.GetClient().Do(req)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if err := utils.Json.Unmarshal(body, &result); err != nil {
return nil, err
}
if err = result.Err(string(body)); err != nil {
return nil, err
}
bytes, err := crypto.Decode(string(result.EncodedData), key)
if err != nil {
return nil, err
}
downloadInfo := driver115.DownloadData{}
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
return nil, err
}
for _, info := range downloadInfo {
if info.FileSize < 0 {
return nil, driver115.ErrDownloadEmpty
}
info.Header = resp.Request.Header
return info, nil
}
return nil, driver115.ErrUnexpected
}
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
var (
ecdhCipher *cipher.EcdhCipher
@ -306,7 +249,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
go func(threadId int) {
defer func() {
if r := recover(); r != nil {
errCh <- fmt.Errorf("recovered in %v", r)
errCh <- fmt.Errorf("Recovered in %v", r)
}
}()
for chunk := range chunksCh {

View File

@ -1,112 +0,0 @@
package _115_share
import (
"context"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"golang.org/x/time/rate"
)
type Pan115Share struct {
model.Storage
Addition
client *driver115.Pan115Client
limiter *rate.Limiter
}
func (d *Pan115Share) Config() driver.Config {
return config
}
func (d *Pan115Share) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Pan115Share) Init(ctx context.Context) error {
if d.LimitRate > 0 {
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
}
return d.login()
}
func (d *Pan115Share) WaitLimit(ctx context.Context) error {
if d.limiter != nil {
return d.limiter.Wait(ctx)
}
return nil
}
func (d *Pan115Share) Drop(ctx context.Context) error {
return nil
}
func (d *Pan115Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
files := make([]driver115.ShareFile, 0)
fileResp, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, dir.GetID(), driver115.QueryLimit(int(d.PageSize)))
if err != nil {
return nil, err
}
files = append(files, fileResp.Data.List...)
total := fileResp.Data.Count
count := len(fileResp.Data.List)
for total > count {
fileResp, err := d.client.GetShareSnap(
d.ShareCode, d.ReceiveCode, dir.GetID(),
driver115.QueryLimit(int(d.PageSize)), driver115.QueryOffset(count),
)
if err != nil {
return nil, err
}
files = append(files, fileResp.Data.List...)
count += len(fileResp.Data.List)
}
return utils.SliceConvert(files, transFunc)
}
func (d *Pan115Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if err := d.WaitLimit(ctx); err != nil {
return nil, err
}
downloadInfo, err := d.client.DownloadByShareCode(d.ShareCode, d.ReceiveCode, file.GetID())
if err != nil {
return nil, err
}
return &model.Link{URL: downloadInfo.URL.URL}, nil
}
func (d *Pan115Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
return errs.NotSupport
}
func (d *Pan115Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return errs.NotSupport
}
func (d *Pan115Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Remove(ctx context.Context, obj model.Obj) error {
return errs.NotSupport
}
func (d *Pan115Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
return errs.NotSupport
}
var _ driver.Driver = (*Pan115Share)(nil)

View File

@ -1,34 +0,0 @@
package _115_share
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
driver.RootID
}
var config = driver.Config{
Name: "115 Share",
DefaultRoot: "",
// OnlyProxy: true,
// OnlyLocal: true,
CheckStatus: false,
Alert: "",
NoOverwriteUpload: true,
NoUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Pan115Share{}
})
}

View File

@ -1,111 +0,0 @@
package _115_share
import (
"fmt"
"strconv"
"time"
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/pkg/errors"
)
var _ model.Obj = (*FileObj)(nil)
type FileObj struct {
Size int64
Sha1 string
Utm time.Time
FileName string
isDir bool
FileID string
}
func (f *FileObj) CreateTime() time.Time {
return f.Utm
}
func (f *FileObj) GetHash() utils.HashInfo {
return utils.NewHashInfo(utils.SHA1, f.Sha1)
}
func (f *FileObj) GetSize() int64 {
return f.Size
}
func (f *FileObj) GetName() string {
return f.FileName
}
func (f *FileObj) ModTime() time.Time {
return f.Utm
}
func (f *FileObj) IsDir() bool {
return f.isDir
}
func (f *FileObj) GetID() string {
return f.FileID
}
func (f *FileObj) GetPath() string {
return ""
}
func transFunc(sf driver115.ShareFile) (model.Obj, error) {
timeInt, err := strconv.ParseInt(sf.UpdateTime, 10, 64)
if err != nil {
return nil, err
}
var (
utm = time.Unix(timeInt, 0)
isDir = (sf.IsFile == 0)
fileID = string(sf.FileID)
)
if isDir {
fileID = string(sf.CategoryID)
}
return &FileObj{
Size: int64(sf.Size),
Sha1: sf.Sha1,
Utm: utm,
FileName: string(sf.FileName),
isDir: isDir,
FileID: fileID,
}, nil
}
var UserAgent = driver115.UA115Browser
func (d *Pan115Share) login() error {
var err error
opts := []driver115.Option{
driver115.UA(UserAgent),
}
d.client = driver115.New(opts...)
if _, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, ""); err != nil {
return errors.Wrap(err, "failed to get share snap")
}
cr := &driver115.Credential{}
if d.QRCodeToken != "" {
s := &driver115.QRCodeSession{
UID: d.QRCodeToken,
}
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
return errors.Wrap(err, "failed to login by qrcode")
}
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
d.QRCodeToken = ""
} else if d.Cookie != "" {
if err = cr.FromCookie(d.Cookie); err != nil {
return errors.Wrap(err, "failed to login by cookies")
}
d.client.ImportCredential(cr)
} else {
return errors.New("missing cookie or qrcode account")
}
return d.client.LoginCheck()
}

View File

@ -6,13 +6,6 @@ import (
"encoding/base64"
"encoding/hex"
"fmt"
"golang.org/x/time/rate"
"io"
"net/http"
"net/url"
"sync"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
@ -24,12 +17,14 @@ import (
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
"io"
"net/http"
"net/url"
)
type Pan123 struct {
model.Storage
Addition
apiRateLimit sync.Map
}
func (d *Pan123) Config() driver.Config {
@ -53,7 +48,7 @@ func (d *Pan123) Drop(ctx context.Context) error {
}
func (d *Pan123) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.getFiles(dir.GetID(), dir.GetName())
files, err := d.getFiles(dir.GetID())
if err != nil {
return nil, err
}
@ -194,7 +189,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
defer func() {
_ = tempFile.Close()
}()
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
if _, err = io.Copy(h, tempFile); err != nil {
return err
}
_, err = tempFile.Seek(0, io.SeekStart)
@ -237,9 +232,6 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Key,
@ -258,11 +250,4 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
return err
}
func (d *Pan123) APIRateLimit(api string) bool {
limiter, _ := d.apiRateLimit.LoadOrStore(api,
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
ins := limiter.(*rate.Limiter)
return ins.Allow()
}
var _ driver.Driver = (*Pan123)(nil)

View File

@ -9,15 +9,14 @@ type Addition struct {
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootID
//OrderBy string `json:"order_by" type:"select" options:"file_id,file_name,size,update_at" default:"file_name"`
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
AccessToken string
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
AccessToken string
}
var config = driver.Config{
Name: "123Pan",
DefaultRoot: "0",
LocalSort: true,
}
func init() {

View File

@ -87,9 +87,8 @@ var _ model.Thumb = (*File)(nil)
type Files struct {
//BaseResp
Data struct {
Next string `json:"Next"`
Total int `json:"Total"`
InfoList []File `json:"InfoList"`
Next string `json:"Next"`
} `json:"data"`
}

View File

@ -3,20 +3,13 @@ package _123
import (
"errors"
"fmt"
"hash/crc32"
"math"
"math/rand"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
resty "github.com/go-resty/resty/v2"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
)
// do others that not defined in Driver interface
@ -25,7 +18,7 @@ const (
Api = "https://www.123pan.com/api"
AApi = "https://www.123pan.com/a/api"
BApi = "https://www.123pan.com/b/api"
MainApi = BApi
MainApi = Api
SignIn = MainApi + "/user/sign_in"
Logout = MainApi + "/user/logout"
UserInfo = MainApi + "/user/info"
@ -44,104 +37,6 @@ const (
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
)
func signPath(path string, os string, version string) (k string, v string) {
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
now := time.Now().In(time.FixedZone("CST", 8*3600))
timestamp := fmt.Sprint(now.Unix())
nowStr := []byte(now.Format("200601021504"))
for i := 0; i < len(nowStr); i++ {
nowStr[i] = table[nowStr[i]-48]
}
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
}
func GetApi(rawUrl string) string {
u, _ := url.Parse(rawUrl)
query := u.Query()
query.Add(signPath(u.Path, "web", "3"))
u.RawQuery = query.Encode()
return u.String()
}
//func GetApi(url string) string {
// vm := js.New()
// vm.Set("url", url[22:])
// r, err := vm.RunString(`
// (function(e){
// function A(t, e) {
// e = 1 < arguments.length && void 0 !== e ? e : 10;
// for (var n = function() {
// for (var t = [], e = 0; e < 256; e++) {
// for (var n = e, r = 0; r < 8; r++)
// n = 1 & n ? 3988292384 ^ n >>> 1 : n >>> 1;
// t[e] = n
// }
// return t
// }(), r = function(t) {
// t = t.replace(/\\r\\n/g, "\\n");
// for (var e = "", n = 0; n < t.length; n++) {
// var r = t.charCodeAt(n);
// r < 128 ? e += String.fromCharCode(r) : e = 127 < r && r < 2048 ? (e += String.fromCharCode(r >> 6 | 192)) + String.fromCharCode(63 & r | 128) : (e = (e += String.fromCharCode(r >> 12 | 224)) + String.fromCharCode(r >> 6 & 63 | 128)) + String.fromCharCode(63 & r | 128)
// }
// return e
// }(t), a = -1, i = 0; i < r.length; i++)
// a = a >>> 8 ^ n[255 & (a ^ r.charCodeAt(i))];
// return (a = (-1 ^ a) >>> 0).toString(e)
// }
//
// function v(t) {
// return (v = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(t) {
// return typeof t
// }
// : function(t) {
// return t && "function" == typeof Symbol && t.constructor === Symbol && t !== Symbol.prototype ? "symbol" : typeof t
// }
// )(t)
// }
//
// for (p in a = Math.round(1e7 * Math.random()),
// o = Math.round(((new Date).getTime() + 60 * (new Date).getTimezoneOffset() * 1e3 + 288e5) / 1e3).toString(),
// m = ["a", "d", "e", "f", "g", "h", "l", "m", "y", "i", "j", "n", "o", "p", "k", "q", "r", "s", "t", "u", "b", "c", "v", "w", "s", "z"],
// u = function(t, e, n) {
// var r;
// n = 2 < arguments.length && void 0 !== n ? n : 8;
// return 0 === arguments.length ? null : (r = "object" === v(t) ? t : (10 === "".concat(t).length && (t = 1e3 * Number.parseInt(t)),
// new Date(t)),
// t += 6e4 * new Date(t).getTimezoneOffset(),
// {
// y: (r = new Date(t + 36e5 * n)).getFullYear(),
// m: r.getMonth() + 1 < 10 ? "0".concat(r.getMonth() + 1) : r.getMonth() + 1,
// d: r.getDate() < 10 ? "0".concat(r.getDate()) : r.getDate(),
// h: r.getHours() < 10 ? "0".concat(r.getHours()) : r.getHours(),
// f: r.getMinutes() < 10 ? "0".concat(r.getMinutes()) : r.getMinutes()
// })
// }(o),
// h = u.y,
// g = u.m,
// l = u.d,
// c = u.h,
// u = u.f,
// d = [h, g, l, c, u].join(""),
// f = [],
// d)
// f.push(m[Number(d[p])]);
// return h = A(f.join("")),
// g = A("".concat(o, "|").concat(a, "|").concat(e, "|").concat("web", "|").concat("3", "|").concat(h)),
// "".concat(h, "=").concat(o, "-").concat(a, "-").concat(g);
// })(url)
// `)
// if err != nil {
// fmt.Println(err)
// return url
// }
// v, _ := r.Export().(string)
// return url + "?" + v
//}
func (d *Pan123) login() error {
var body base.Json
if utils.IsEmailFormat(d.Username) {
@ -161,9 +56,9 @@ func (d *Pan123) login() error {
SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"user-agent": "Dart/2.19(dart:io)-alist",
"platform": "web",
"app-version": "3",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
//"user-agent": base.UserAgent,
}).
SetBody(body).Post(SignIn)
@ -198,9 +93,9 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"authorization": "Bearer " + d.AccessToken,
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
"platform": "web",
"app-version": "3",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
//"user-agent": base.UserAgent,
})
if callback != nil {
@ -214,7 +109,7 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
// return nil, err
//}
//req.SetQueryParam("auth-key", *authKey)
res, err := req.Execute(method, GetApi(url))
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
@ -233,48 +128,32 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
return body, nil
}
func (d *Pan123) getFiles(parentId string, name string) ([]File, error) {
func (d *Pan123) getFiles(parentId string) ([]File, error) {
page := 1
total := 0
res := make([]File, 0)
// 2024-02-06 fix concurrency by 123pan
for {
if !d.APIRateLimit(FileList) {
time.Sleep(time.Millisecond * 200)
continue
}
var resp Files
query := map[string]string{
"driveId": "0",
"limit": "100",
"next": "0",
"orderBy": "file_id",
"orderDirection": "desc",
"parentFileId": parentId,
"trashed": "false",
"SearchData": "",
"Page": strconv.Itoa(page),
"OnlyLookAbnormalFile": "0",
"event": "homeListFile",
"operateType": "4",
"inDirectSpace": "false",
"driveId": "0",
"limit": "100",
"next": "0",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"trashed": "false",
"Page": strconv.Itoa(page),
}
_res, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
log.Debug(string(_res))
page++
res = append(res, resp.Data.InfoList...)
total = resp.Data.Total
if len(resp.Data.InfoList) == 0 || resp.Data.Next == "-1" {
break
}
}
if len(res) != total {
log.Warnf("incorrect file count from remote at %s: expected %d, got %d", name, total, len(res))
}
return res, nil
}

View File

@ -4,11 +4,8 @@ import (
"context"
"encoding/base64"
"fmt"
"golang.org/x/time/rate"
"net/http"
"net/url"
"sync"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
@ -22,7 +19,6 @@ import (
type Pan123Share struct {
model.Storage
Addition
apiRateLimit sync.Map
}
func (d *Pan123Share) Config() driver.Config {
@ -150,11 +146,4 @@ func (d *Pan123Share) Put(ctx context.Context, dstDir model.Obj, stream model.Fi
// return nil, errs.NotSupport
//}
func (d *Pan123Share) APIRateLimit(api string) bool {
limiter, _ := d.apiRateLimit.LoadOrStore(api,
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
ins := limiter.(*rate.Limiter)
return ins.Allow()
}
var _ driver.Driver = (*Pan123Share)(nil)

View File

@ -7,11 +7,10 @@ import (
type Addition struct {
ShareKey string `json:"sharekey" required:"true"`
SharePwd string `json:"sharepassword"`
SharePwd string `json:"sharepassword" required:"true"`
driver.RootID
//OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
//OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
AccessToken string `json:"accesstoken" type:"text"`
OrderBy string `json:"order_by" type:"select" options:"file_name,size,update_at" default:"file_name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
}
var config = driver.Config{

View File

@ -2,15 +2,8 @@ package _123Share
import (
"errors"
"fmt"
"hash/crc32"
"math"
"math/rand"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
@ -22,45 +15,20 @@ const (
Api = "https://www.123pan.com/api"
AApi = "https://www.123pan.com/a/api"
BApi = "https://www.123pan.com/b/api"
MainApi = BApi
MainApi = Api
FileList = MainApi + "/share/get"
DownloadInfo = MainApi + "/share/download/info"
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
)
func signPath(path string, os string, version string) (k string, v string) {
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
now := time.Now().In(time.FixedZone("CST", 8*3600))
timestamp := fmt.Sprint(now.Unix())
nowStr := []byte(now.Format("200601021504"))
for i := 0; i < len(nowStr); i++ {
nowStr[i] = table[nowStr[i]-48]
}
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
}
func GetApi(rawUrl string) string {
u, _ := url.Parse(rawUrl)
query := u.Query()
query.Add(signPath(u.Path, "web", "3"))
u.RawQuery = query.Encode()
return u.String()
}
func (d *Pan123Share) request(url string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"authorization": "Bearer " + d.AccessToken,
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
"platform": "web",
"app-version": "3",
//"user-agent": base.UserAgent,
"origin": "https://www.123pan.com",
"referer": "https://www.123pan.com/",
"user-agent": "Dart/2.19(dart:io)",
"platform": "android",
"app-version": "36",
})
if callback != nil {
callback(req)
@ -68,7 +36,7 @@ func (d *Pan123Share) request(url string, method string, callback base.ReqCallba
if resp != nil {
req.SetResult(resp)
}
res, err := req.Execute(method, GetApi(url))
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
@ -84,16 +52,12 @@ func (d *Pan123Share) getFiles(parentId string) ([]File, error) {
page := 1
res := make([]File, 0)
for {
if !d.APIRateLimit(FileList) {
time.Sleep(time.Millisecond * 200)
continue
}
var resp Files
query := map[string]string{
"limit": "100",
"next": "0",
"orderBy": "file_id",
"orderDirection": "desc",
"orderBy": d.OrderBy,
"orderDirection": d.OrderDirection,
"parentFileId": parentId,
"Page": strconv.Itoa(page),
"shareKey": d.ShareKey,

View File

@ -8,21 +8,18 @@ import (
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/cron"
log "github.com/sirupsen/logrus"
)
type Yun139 struct {
model.Storage
Addition
cron *cron.Cron
Account string
}
@ -38,116 +35,61 @@ func (d *Yun139) Init(ctx context.Context) error {
if d.Authorization == "" {
return fmt.Errorf("authorization is empty")
}
d.cron = cron.NewCron(time.Hour * 24 * 7)
d.cron.Do(func() {
err := d.refreshToken()
if err != nil {
log.Errorf("%+v", err)
}
})
switch d.Addition.Type {
case MetaPersonalNew:
if len(d.Addition.RootFolderID) == 0 {
d.RootFolderID = "/"
}
return nil
case MetaPersonal:
if len(d.Addition.RootFolderID) == 0 {
d.RootFolderID = "root"
}
fallthrough
case MetaFamily:
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
if len(splits) < 2 {
return fmt.Errorf("authorization is invalid, splits < 2")
}
d.Account = splits[1]
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}, nil)
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
default:
return errs.NotImplement
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
if len(splits) < 2 {
return fmt.Errorf("authorization is invalid, splits < 2")
}
d.Account = splits[1]
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
"qryUserExternInfoReq": base.Json{
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}, nil)
return err
}
func (d *Yun139) Drop(ctx context.Context) error {
if d.cron != nil {
d.cron.Stop()
}
return nil
}
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
switch d.Addition.Type {
case MetaPersonalNew:
return d.personalGetFiles(dir.GetID())
case MetaPersonal:
return d.getFiles(dir.GetID())
case MetaFamily:
if d.isFamily() {
return d.familyGetFiles(dir.GetID())
default:
return nil, errs.NotImplement
} else {
return d.getFiles(dir.GetID())
}
}
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var url string
var err error
switch d.Addition.Type {
case MetaPersonalNew:
url, err = d.personalGetLink(file.GetID())
case MetaPersonal:
fallthrough
case MetaFamily:
url, err = d.getLink(file.GetID())
default:
return nil, errs.NotImplement
}
u, err := d.getLink(file.GetID())
if err != nil {
return nil, err
}
return &model.Link{URL: url}, nil
return &model.Link{URL: u}, nil
}
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"parentFileId": parentDir.GetID(),
"name": dirName,
"description": "",
"type": "folder",
"fileRenameMode": "force_rename",
}
pathname := "/hcy/file/create"
_, err = d.personalPost(pathname, data, nil)
case MetaPersonal:
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
data := base.Json{
"createCatalogExtReq": base.Json{
"parentCatalogID": parentDir.GetID(),
"newCatalogName": dirName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
_, err = d.post(pathname, data, nil)
case MetaFamily:
data := base.Json{
},
}
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
if d.isFamily() {
data = base.Json{
"cloudID": d.CloudID,
"commonAccountInfo": base.Json{
"account": d.Account,
@ -155,198 +97,147 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
},
"docLibName": dirName,
}
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{srcObj.GetID()},
"toParentFileId": dstDir.GetID(),
}
pathname := "/hcy/file/batchMove"
_, err := d.personalPost(pathname, data, nil)
if err != nil {
return nil, err
}
return srcObj, nil
case MetaPersonal:
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
if err != nil {
return nil, err
}
return srcObj, nil
default:
if d.isFamily() {
return nil, errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": "304",
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
if err != nil {
return nil, err
}
return srcObj, nil
}
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileId": srcObj.GetID(),
"name": newName,
"description": "",
}
pathname := "/hcy/file/update"
_, err = d.personalPost(pathname, data, nil)
case MetaPersonal:
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
}
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
if d.isFamily() {
return errs.NotImplement
}
var data base.Json
var pathname string
if srcObj.IsDir() {
data = base.Json{
"catalogID": srcObj.GetID(),
"catalogName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
} else {
data = base.Json{
"contentID": srcObj.GetID(),
"contentName": newName,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
}
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
var err error
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{srcObj.GetID()},
"toParentFileId": dstDir.GetID(),
}
pathname := "/hcy/file/batchCopy"
_, err := d.personalPost(pathname, data, nil)
return err
case MetaPersonal:
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err = d.post(pathname, data, nil)
default:
err = errs.NotImplement
if d.isFamily() {
return errs.NotImplement
}
var contentInfoList []string
var catalogInfoList []string
if srcObj.IsDir() {
catalogInfoList = append(catalogInfoList, srcObj.GetID())
} else {
contentInfoList = append(contentInfoList, srcObj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 3,
"actionType": 309,
"taskInfo": base.Json{
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
"newCatalogID": dstDir.GetID(),
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
_, err := d.post(pathname, data, nil)
return err
}
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
switch d.Addition.Type {
case MetaPersonalNew:
data := base.Json{
"fileIds": []string{obj.GetID()},
}
pathname := "/hcy/recyclebin/batchTrash"
_, err := d.personalPost(pathname, data, nil)
return err
case MetaPersonal:
fallthrough
case MetaFamily:
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
}
_, err := d.post(pathname, data, nil)
return err
default:
return errs.NotImplement
var contentInfoList []string
var catalogInfoList []string
if obj.IsDir() {
catalogInfoList = append(catalogInfoList, obj.GetID())
} else {
contentInfoList = append(contentInfoList, obj.GetID())
}
data := base.Json{
"createBatchOprTaskReq": base.Json{
"taskType": 2,
"actionType": 201,
"taskInfo": base.Json{
"newCatalogID": "",
"contentInfoList": contentInfoList,
"catalogInfoList": catalogInfoList,
},
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
},
}
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
if d.isFamily() {
data = base.Json{
"catalogList": catalogInfoList,
"contentList": contentInfoList,
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
"sourceCatalogType": 1002,
"taskType": 2,
}
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
}
_, err := d.post(pathname, data, nil)
return err
}
const (
@ -366,208 +257,94 @@ func getPartSize(size int64) int64 {
}
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
switch d.Addition.Type {
case MetaPersonalNew:
var err error
fullHash := stream.GetHash().GetHash(utils.SHA256)
if len(fullHash) <= 0 {
tmpF, err := stream.CacheFullInTempFile()
if err != nil {
return err
}
fullHash, err = utils.HashFile(utils.SHA256, tmpF)
if err != nil {
return err
}
}
// return errs.NotImplement
data := base.Json{
"contentHash": fullHash,
"contentHashAlgorithm": "SHA256",
"contentType": "application/octet-stream",
"parallelUpload": false,
"partInfos": []base.Json{{
"parallelHashCtx": base.Json{
"partOffset": 0,
},
"partNumber": 1,
"partSize": stream.GetSize(),
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": 0, // 去除上传大小限制
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0, // 去除上传大小限制
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
data = d.newJson(base.Json{
"fileCount": 1,
"manualRename": 2,
"operation": 0,
"path": "",
"seqNo": "",
"totalSize": 0,
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0,
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"size": stream.GetSize(),
"parentFileId": dstDir.GetID(),
"name": stream.GetName(),
"type": "file",
"fileRenameMode": "auto_rename",
}
pathname := "/hcy/file/create"
var resp PersonalUploadResp
_, err = d.personalPost(pathname, data, &resp)
if err != nil {
return err
})
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
var partSize = getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
if resp.Data.Exist || resp.Data.RapidUpload {
return nil
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
limitReader := io.LimitReader(stream, byteSize)
// Update Progress
r := io.TeeReader(stream, p)
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
r := io.TeeReader(limitReader, p)
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "application/octet-stream")
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
req.Header.Set("Origin", "https://yun.139.com")
req.Header.Set("Referer", "https://yun.139.com/")
req.ContentLength = stream.GetSize()
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
req.Header.Set("rangeType", "0")
req.ContentLength = byteSize
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
data = base.Json{
"contentHash": fullHash,
"contentHashAlgorithm": "SHA256",
"fileId": resp.Data.FileId,
"uploadId": resp.Data.UploadId,
}
_, err = d.personalPost("/hcy/file/complete", data, nil)
if err != nil {
return err
}
return nil
case MetaPersonal:
fallthrough
case MetaFamily:
data := base.Json{
"manualRename": 2,
"operation": 0,
"fileCount": 1,
"totalSize": 0, // 去除上传大小限制
"uploadContentList": []base.Json{{
"contentName": stream.GetName(),
"contentSize": 0, // 去除上传大小限制
// "digest": "5a3231986ce7a6b46e408612d385bafa"
}},
"parentCatalogID": dstDir.GetID(),
"newCatalogName": "",
"commonAccountInfo": base.Json{
"account": d.Account,
"accountType": 1,
},
}
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
if d.isFamily() {
// data = d.newJson(base.Json{
// "fileCount": 1,
// "manualRename": 2,
// "operation": 0,
// "path": "",
// "seqNo": "",
// "totalSize": 0,
// "uploadContentList": []base.Json{{
// "contentName": stream.GetName(),
// "contentSize": 0,
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
// }},
// })
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
return errs.NotImplement
}
var resp UploadResp
_, err := d.post(pathname, data, &resp)
if err != nil {
return err
}
// Progress
p := driver.NewProgress(stream.GetSize(), up)
var partSize = getPartSize(stream.GetSize())
part := (stream.GetSize() + partSize - 1) / partSize
if part == 0 {
part = 1
}
for i := int64(0); i < part; i++ {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
start := i * partSize
byteSize := stream.GetSize() - start
if byteSize > partSize {
byteSize = partSize
}
limitReader := io.LimitReader(stream, byteSize)
// Update Progress
r := io.TeeReader(limitReader, p)
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
if err != nil {
return err
}
req = req.WithContext(ctx)
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
req.Header.Set("rangeType", "0")
req.ContentLength = byteSize
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
_ = res.Body.Close()
log.Debugf("%+v", res)
if res.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
}
}
return nil
default:
return errs.NotImplement
}
}
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
switch d.Addition.Type {
case MetaPersonalNew:
var resp base.Json
var uri string
data := base.Json{
"category": "video",
"fileId": args.Obj.GetID(),
}
switch args.Method {
case "video_preview":
uri = "/hcy/videoPreview/getPreviewInfo"
default:
return nil, errs.NotSupport
}
_, err := d.personalPost(uri, data, &resp)
if err != nil {
return nil, err
}
return resp["data"], nil
default:
return nil, errs.NotImplement
}
return nil
}
var _ driver.Driver = (*Yun139)(nil)

View File

@ -9,20 +9,17 @@ type Addition struct {
//Account string `json:"account" required:"true"`
Authorization string `json:"authorization" type:"text" required:"true"`
driver.RootID
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
CloudID string `json:"cloud_id"`
}
var config = driver.Config{
Name: "139Yun",
LocalSort: true,
ProxyRangeOption: true,
Name: "139Yun",
LocalSort: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
d := &Yun139{}
d.ProxyRange = true
return d
return &Yun139{}
})
}

View File

@ -1,15 +1,5 @@
package _139
import (
"encoding/xml"
)
const (
MetaPersonal string = "personal"
MetaFamily string = "family"
MetaPersonalNew string = "personal_new"
)
type BaseResp struct {
Success bool `json:"success"`
Code string `json:"code"`
@ -195,51 +185,3 @@ type QueryContentListResp struct {
RecallContent interface{} `json:"recallContent"`
} `json:"data"`
}
type PersonalThumbnail struct {
Style string `json:"style"`
Url string `json:"url"`
}
type PersonalFileItem struct {
FileId string `json:"fileId"`
Name string `json:"name"`
Size int64 `json:"size"`
Type string `json:"type"`
CreatedAt string `json:"createdAt"`
UpdatedAt string `json:"updatedAt"`
Thumbnails []PersonalThumbnail `json:"thumbnailUrls"`
}
type PersonalListResp struct {
BaseResp
Data struct {
Items []PersonalFileItem `json:"items"`
NextPageCursor string `json:"nextPageCursor"`
}
}
type PersonalPartInfo struct {
PartNumber int `json:"partNumber"`
UploadUrl string `json:"uploadUrl"`
}
type PersonalUploadResp struct {
BaseResp
Data struct {
FileId string `json:"fileId"`
PartInfos []PersonalPartInfo `json:"partInfos"`
Exist bool `json:"exist"`
RapidUpload bool `json:"rapidUpload"`
UploadId string `json:"uploadId"`
}
}
type RefreshTokenResp struct {
XMLName xml.Name `xml:"root"`
Return string `xml:"return"`
Token string `xml:"token"`
Expiretime int32 `xml:"expiretime"`
AccessToken string `xml:"accessToken"`
Desc string `xml:"desc"`
}

View File

@ -15,7 +15,6 @@ import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/pkg/utils/random"
"github.com/alist-org/alist/v3/internal/op"
"github.com/go-resty/resty/v2"
jsoniter "github.com/json-iterator/go"
log "github.com/sirupsen/logrus"
@ -53,32 +52,6 @@ func getTime(t string) time.Time {
return stamp
}
func (d *Yun139) refreshToken() error {
url := "https://aas.caiyun.feixin.10086.cn:443/tellin/authTokenRefresh.do"
var resp RefreshTokenResp
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
if err != nil {
return err
}
decodeStr := string(decode)
splits := strings.Split(decodeStr, ":")
reqBody := "<root><token>" + splits[2] + "</token><account>" + splits[1] + "</account><clienttype>656</clienttype></root>"
_, err = base.RestyClient.R().
ForceContentType("application/xml").
SetBody(reqBody).
SetResult(&resp).
Post(url)
if err != nil {
return err
}
if resp.Return != "0" {
return fmt.Errorf("failed to refresh token: %s", resp.Desc)
}
d.Authorization = base64.StdEncoding.EncodeToString([]byte(splits[0] + ":" + splits[1] + ":" + resp.Token))
op.MustSaveDriverStorage(d)
return nil
}
func (d *Yun139) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
url := "https://yun.139.com" + pathname
req := base.RestyClient.R()
@ -279,154 +252,3 @@ func unicode(str string) string {
textUnquoted := textQuoted[1 : len(textQuoted)-1]
return textUnquoted
}
func (d *Yun139) personalRequest(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
url := "https://personal-kd-njs.yun.139.com" + pathname
req := base.RestyClient.R()
randStr := random.String(16)
ts := time.Now().Format("2006-01-02 15:04:05")
if callback != nil {
callback(req)
}
body, err := utils.Json.Marshal(req.Body)
if err != nil {
return nil, err
}
sign := calSign(string(body), ts, randStr)
svcType := "1"
if d.isFamily() {
svcType = "2"
}
req.SetHeaders(map[string]string{
"Accept": "application/json, text/plain, */*",
"Authorization": "Basic " + d.Authorization,
"Caller": "web",
"Cms-Device": "default",
"Mcloud-Channel": "1000101",
"Mcloud-Client": "10701",
"Mcloud-Route": "001",
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
"Mcloud-Version": "7.13.0",
"Origin": "https://yun.139.com",
"Referer": "https://yun.139.com/w/",
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
"x-huawei-channelSrc": "10000034",
"x-inner-ntwk": "2",
"x-m4c-caller": "PC",
"x-m4c-src": "10002",
"x-SvcType": svcType,
"X-Yun-Api-Version": "v1",
"X-Yun-App-Channel": "10000034",
"X-Yun-Channel-Source": "10000034",
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
"X-Yun-Module-Type": "100",
"X-Yun-Svc-Type": "1",
})
var e BaseResp
req.SetResult(&e)
res, err := req.Execute(method, url)
if err != nil {
return nil, err
}
log.Debugln(res.String())
if !e.Success {
return nil, errors.New(e.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
if err != nil {
return nil, err
}
}
return res.Body(), nil
}
func (d *Yun139) personalPost(pathname string, data interface{}, resp interface{}) ([]byte, error) {
return d.personalRequest(pathname, http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, resp)
}
func getPersonalTime(t string) time.Time {
stamp, err := time.ParseInLocation("2006-01-02T15:04:05.999-07:00", t, utils.CNLoc)
if err != nil {
panic(err)
}
return stamp
}
func (d *Yun139) personalGetFiles(fileId string) ([]model.Obj, error) {
files := make([]model.Obj, 0)
nextPageCursor := ""
for {
data := base.Json{
"imageThumbnailStyleList": []string{"Small", "Large"},
"orderBy": "updated_at",
"orderDirection": "DESC",
"pageInfo": base.Json{
"pageCursor": nextPageCursor,
"pageSize": 100,
},
"parentFileId": fileId,
}
var resp PersonalListResp
_, err := d.personalPost("/hcy/file/list", data, &resp)
if err != nil {
return nil, err
}
nextPageCursor = resp.Data.NextPageCursor
for _, item := range resp.Data.Items {
var isFolder = (item.Type == "folder")
var f model.Obj
if isFolder {
f = &model.Object{
ID: item.FileId,
Name: item.Name,
Size: 0,
Modified: getPersonalTime(item.UpdatedAt),
Ctime: getPersonalTime(item.CreatedAt),
IsFolder: isFolder,
}
} else {
var Thumbnails = item.Thumbnails
var ThumbnailUrl string
if len(Thumbnails) > 0 {
ThumbnailUrl = Thumbnails[len(Thumbnails)-1].Url
}
f = &model.ObjThumb{
Object: model.Object{
ID: item.FileId,
Name: item.Name,
Size: item.Size,
Modified: getPersonalTime(item.UpdatedAt),
Ctime: getPersonalTime(item.CreatedAt),
IsFolder: isFolder,
},
Thumbnail: model.Thumbnail{Thumbnail: ThumbnailUrl},
}
}
files = append(files, f)
}
if len(nextPageCursor) == 0 {
break
}
}
return files, nil
}
func (d *Yun139) personalGetLink(fileId string) (string, error) {
data := base.Json{
"fileId": fileId,
}
res, err := d.personalPost("/hcy/file/getDownloadUrl",
data, nil)
if err != nil {
return "", err
}
var cdnUrl = jsoniter.Get(res, "data", "cdnUrl").ToString()
if cdnUrl != "" {
return cdnUrl, nil
} else {
return jsoniter.Get(res, "data", "url").ToString(), nil
}
}

View File

@ -1,7 +1,6 @@
package _189pc
import (
"container/ring"
"context"
"net/http"
"strconv"
@ -29,9 +28,6 @@ type Cloud189PC struct {
uploadThread int
familyTransferFolder *ring.Ring
cleanFamilyTransferFile func()
storageConfig driver.Config
}
@ -56,6 +52,7 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
}
if !y.isFamily() && y.RootFolderID == "" {
y.RootFolderID = "-11"
y.FamilyID = ""
}
// 限制上传线程数
@ -82,24 +79,11 @@ func (y *Cloud189PC) Init(ctx context.Context) (err error) {
}
// 处理家庭云ID
if y.FamilyID == "" {
if y.isFamily() && y.FamilyID == "" {
if y.FamilyID, err = y.getFamilyID(); err != nil {
return err
}
}
// 创建中转文件夹,防止重名文件
if y.FamilyTransfer {
if y.familyTransferFolder, err = y.createFamilyTransferFolder(32); err != nil {
return err
}
}
y.cleanFamilyTransferFile = utils.NewThrottle2(time.Minute, func() {
if err := y.cleanFamilyTransfer(context.TODO()); err != nil {
utils.Log.Errorf("cleanFamilyTransferFolderError:%s", err)
}
})
return
}
@ -108,7 +92,7 @@ func (y *Cloud189PC) Drop(ctx context.Context) error {
}
func (y *Cloud189PC) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return y.getFiles(ctx, dir.GetID(), y.isFamily())
return y.getFiles(ctx, dir.GetID())
}
func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
@ -116,9 +100,8 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
URL string `json:"fileDownloadUrl"`
}
isFamily := y.isFamily()
fullUrl := API_URL
if isFamily {
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/getFileDownloadUrl.action"
@ -126,7 +109,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
_, err := y.get(fullUrl, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParam("fileId", file.GetID())
if isFamily {
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
})
@ -136,7 +119,7 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
"flag": "1",
})
}
}, &downloadUrl, isFamily)
}, &downloadUrl)
if err != nil {
return nil, err
}
@ -173,9 +156,8 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
}
func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
isFamily := y.isFamily()
fullUrl := API_URL
if isFamily {
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/createFolder.action"
@ -187,7 +169,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
"folderName": dirName,
"relativePath": "",
})
if isFamily {
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentDir.GetID(),
@ -197,7 +179,7 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
"parentFolderId": parentDir.GetID(),
})
}
}, &newFolder, isFamily)
}, &newFolder)
if err != nil {
return nil, err
}
@ -205,14 +187,27 @@ func (y *Cloud189PC) MakeDir(ctx context.Context, parentDir model.Obj, dirName s
}
func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
isFamily := y.isFamily()
other := map[string]string{"targetFileName": dstDir.GetName()}
resp, err := y.CreateBatchTask("MOVE", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
})
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "MOVE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return nil, err
}
@ -223,11 +218,10 @@ func (y *Cloud189PC) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.
}
func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
isFamily := y.isFamily()
queryParam := make(map[string]string)
fullUrl := API_URL
method := http.MethodPost
if isFamily {
if y.isFamily() {
fullUrl += "/family/file"
method = http.MethodGet
queryParam["familyId"] = y.FamilyID
@ -251,7 +245,7 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
_, err := y.request(fullUrl, method, func(req *resty.Request) {
req.SetContext(ctx).SetQueryParams(queryParam)
}, nil, newObj, isFamily)
}, nil, newObj)
if err != nil {
return nil, err
}
@ -259,15 +253,28 @@ func (y *Cloud189PC) Rename(ctx context.Context, srcObj model.Obj, newName strin
}
func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
isFamily := y.isFamily()
other := map[string]string{"targetFileName": dstDir.GetName()}
resp, err := y.CreateBatchTask("COPY", IF(isFamily, y.FamilyID, ""), dstDir.GetID(), other, BatchTaskInfo{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
})
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "COPY",
"taskInfos": MustString(utils.Json.MarshalToString(
[]BatchTaskInfo{
{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
},
})),
"targetFolderId": dstDir.GetID(),
"targetFileName": dstDir.GetName(),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return err
}
@ -275,13 +282,27 @@ func (y *Cloud189PC) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
}
func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
isFamily := y.isFamily()
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetContext(ctx)
req.SetFormData(map[string]string{
"type": "DELETE",
"taskInfos": MustString(utils.Json.MarshalToString(
[]*BatchTaskInfo{
{
FileId: obj.GetID(),
FileName: obj.GetName(),
IsFolder: BoolToNumber(obj.IsDir()),
},
})),
})
resp, err := y.CreateBatchTask("DELETE", IF(isFamily, y.FamilyID, ""), "", nil, BatchTaskInfo{
FileId: obj.GetID(),
FileName: obj.GetName(),
IsFolder: BoolToNumber(obj.IsDir()),
})
if y.isFamily() {
req.SetFormData(map[string]string{
"familyId": y.FamilyID,
})
}
}, &resp)
if err != nil {
return err
}
@ -289,73 +310,25 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
return y.WaitBatchTask("DELETE", resp.TaskID, time.Millisecond*200)
}
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (newObj model.Obj, err error) {
overwrite := true
isFamily := y.isFamily()
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// 响应时间长,按需启用
if y.Addition.RapidUpload && !stream.IsForceStreamUpload() {
if newObj, err := y.RapidUpload(ctx, dstDir, stream, isFamily, overwrite); err == nil {
if y.Addition.RapidUpload {
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
return newObj, nil
}
}
uploadMethod := y.UploadMethod
if stream.IsForceStreamUpload() {
uploadMethod = "stream"
}
// 旧版上传家庭云也有限制
if uploadMethod == "old" {
return y.OldUpload(ctx, dstDir, stream, up, isFamily, overwrite)
}
// 开启家庭云转存
if !isFamily && y.FamilyTransfer {
// 修改上传目标为家庭云文件夹
transferDstDir := dstDir
dstDir = (y.familyTransferFolder.Value).(*Cloud189Folder)
y.familyTransferFolder = y.familyTransferFolder.Next()
isFamily = true
overwrite = false
defer func() {
if newObj != nil {
// 批量任务有概率删不掉
y.cleanFamilyTransferFile()
// 转存家庭云文件到个人云
err = y.SaveFamilyFileToPersonCloud(context.TODO(), y.FamilyID, newObj, transferDstDir, true)
task := BatchTaskInfo{
FileId: newObj.GetID(),
FileName: newObj.GetName(),
IsFolder: BoolToNumber(newObj.IsDir()),
}
// 删除源文件
if resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, task); err == nil {
y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
// 永久删除
if resp, err := y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, task); err == nil {
y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
}
}
newObj = nil
}
}()
}
switch uploadMethod {
switch y.UploadMethod {
case "old":
return y.OldUpload(ctx, dstDir, stream, up)
case "rapid":
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
return y.FastUpload(ctx, dstDir, stream, up)
case "stream":
if stream.GetSize() == 0 {
return y.FastUpload(ctx, dstDir, stream, up, isFamily, overwrite)
return y.FastUpload(ctx, dstDir, stream, up)
}
fallthrough
default:
return y.StreamUpload(ctx, dstDir, stream, up, isFamily, overwrite)
return y.StreamUpload(ctx, dstDir, stream, up)
}
}

View File

@ -192,19 +192,3 @@ func partSize(size int64) int64 {
}
return DEFAULT
}
func isBool(bs ...bool) bool {
for _, b := range bs {
if b {
return true
}
}
return false
}
func IF[V any](o bool, t V, f V) V {
if o {
return t
}
return f
}

View File

@ -16,7 +16,6 @@ type Addition struct {
FamilyID string `json:"family_id"`
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
FamilyTransfer bool `json:"family_transfer"`
RapidUpload bool `json:"rapid_upload"`
NoUseOcr bool `json:"no_use_ocr"`
}

View File

@ -3,11 +3,10 @@ package _189pc
import (
"encoding/xml"
"fmt"
"github.com/alist-org/alist/v3/pkg/utils"
"sort"
"strings"
"time"
"github.com/alist-org/alist/v3/pkg/utils"
)
// 居然有四种返回方式
@ -143,7 +142,7 @@ type FamilyInfoListResp struct {
type FamilyInfoResp struct {
Count int `json:"count"`
CreateTime string `json:"createTime"`
FamilyID int64 `json:"familyId"`
FamilyID int `json:"familyId"`
RemarkName string `json:"remarkName"`
Type int `json:"type"`
UseFlag int `json:"useFlag"`
@ -243,12 +242,7 @@ type BatchTaskInfo struct {
// IsFolder 是否是文件夹0-否1-是
IsFolder int `json:"isFolder"`
// SrcParentId 文件所在父目录ID
SrcParentId string `json:"srcParentId,omitempty"`
/* 冲突管理 */
// 1 -> 跳过 2 -> 保留 3 -> 覆盖
DealWay int `json:"dealWay,omitempty"`
IsConflict int `json:"isConflict,omitempty"`
//SrcParentId string `json:"srcParentId"`
}
/* 上传部分 */
@ -361,14 +355,6 @@ type BatchTaskStateResp struct {
TaskStatus int `json:"taskStatus"` //1 初始化 2 存在冲突 3 执行中4 完成
}
type BatchTaskConflictTaskInfoResp struct {
SessionKey string `json:"sessionKey"`
TargetFolderID int `json:"targetFolderId"`
TaskID string `json:"taskId"`
TaskInfos []BatchTaskInfo
TaskType int `json:"taskType"`
}
/* query 加密参数*/
type Params map[string]string

View File

@ -2,7 +2,6 @@ package _189pc
import (
"bytes"
"container/ring"
"context"
"crypto/md5"
"encoding/base64"
@ -55,11 +54,11 @@ const (
CHANNEL_ID = "web_cloud.189.cn"
)
func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool) map[string]string {
func (y *Cloud189PC) SignatureHeader(url, method, params string) map[string]string {
dateOfGmt := getHttpDateStr()
sessionKey := y.tokenInfo.SessionKey
sessionSecret := y.tokenInfo.SessionSecret
if isFamily {
if y.isFamily() {
sessionKey = y.tokenInfo.FamilySessionKey
sessionSecret = y.tokenInfo.FamilySessionSecret
}
@ -73,9 +72,9 @@ func (y *Cloud189PC) SignatureHeader(url, method, params string, isFamily bool)
return header
}
func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
func (y *Cloud189PC) EncryptParams(params Params) string {
sessionSecret := y.tokenInfo.SessionSecret
if isFamily {
if y.isFamily() {
sessionSecret = y.tokenInfo.FamilySessionSecret
}
if params != nil {
@ -84,17 +83,17 @@ func (y *Cloud189PC) EncryptParams(params Params, isFamily bool) string {
return ""
}
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}, isFamily ...bool) ([]byte, error) {
func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, params Params, resp interface{}) ([]byte, error) {
req := y.client.R().SetQueryParams(clientSuffix())
// 设置params
paramsData := y.EncryptParams(params, isBool(isFamily...))
paramsData := y.EncryptParams(params)
if paramsData != "" {
req.SetQueryParam("params", paramsData)
}
// Signature
req.SetHeaders(y.SignatureHeader(url, method, paramsData, isBool(isFamily...)))
req.SetHeaders(y.SignatureHeader(url, method, paramsData))
var erron RespErr
req.SetError(&erron)
@ -130,15 +129,15 @@ func (y *Cloud189PC) request(url, method string, callback base.ReqCallback, para
return res.Body(), nil
}
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
return y.request(url, http.MethodGet, callback, nil, resp, isFamily...)
func (y *Cloud189PC) get(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodGet, callback, nil, resp)
}
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}, isFamily ...bool) ([]byte, error) {
return y.request(url, http.MethodPost, callback, nil, resp, isFamily...)
func (y *Cloud189PC) post(url string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
return y.request(url, http.MethodPost, callback, nil, resp)
}
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader, isFamily bool) ([]byte, error) {
func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]string, sign bool, file io.Reader) ([]byte, error) {
req, err := http.NewRequestWithContext(ctx, http.MethodPut, url, file)
if err != nil {
return nil, err
@ -155,7 +154,7 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
}
if sign {
for key, value := range y.SignatureHeader(url, http.MethodPut, "", isFamily) {
for key, value := range y.SignatureHeader(url, http.MethodPut, "") {
req.Header.Add(key, value)
}
}
@ -182,9 +181,9 @@ func (y *Cloud189PC) put(ctx context.Context, url string, headers map[string]str
}
return body, nil
}
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool) ([]model.Obj, error) {
func (y *Cloud189PC) getFiles(ctx context.Context, fileId string) ([]model.Obj, error) {
fullUrl := API_URL
if isFamily {
if y.isFamily() {
fullUrl += "/family/file"
}
fullUrl += "/listFiles.action"
@ -202,7 +201,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool)
"pageNum": fmt.Sprint(pageNum),
"pageSize": "130",
})
if isFamily {
if y.isFamily() {
r.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"orderBy": toFamilyOrderBy(y.OrderBy),
@ -215,7 +214,7 @@ func (y *Cloud189PC) getFiles(ctx context.Context, fileId string, isFamily bool)
"descending": toDesc(y.OrderDirection),
})
}
}, &resp, isFamily)
}, &resp)
if err != nil {
return nil, err
}
@ -438,7 +437,7 @@ func (y *Cloud189PC) refreshSession() (err error) {
// 普通上传
// 无法上传大小为0的文件
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
var sliceSize = partSize(file.GetSize())
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
lastPartSize := file.GetSize() % sliceSize
@ -455,7 +454,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
}
fullUrl := UPLOAD_URL
if isFamily {
if y.isFamily() {
params.Set("familyId", y.FamilyID)
fullUrl += "/family"
} else {
@ -467,7 +466,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
var initMultiUpload InitMultiUploadResp
_, err := y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &initMultiUpload, isFamily)
}, params, &initMultiUpload)
if err != nil {
return nil, err
}
@ -503,14 +502,14 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
partInfo := fmt.Sprintf("%d-%s", i, base64.StdEncoding.EncodeToString(md5Bytes))
threadG.Go(func(ctx context.Context) error {
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, initMultiUpload.Data.UploadFileID, partInfo)
uploadUrls, err := y.GetMultiUploadUrls(ctx, initMultiUpload.Data.UploadFileID, partInfo)
if err != nil {
return err
}
// step.4 上传切片
uploadUrl := uploadUrls[0]
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData), isFamily)
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, bytes.NewReader(byteData))
if err != nil {
return err
}
@ -539,21 +538,21 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
"sliceMd5": sliceMd5Hex,
"lazyCheck": "1",
"isLog": "0",
"opertype": IF(overwrite, "3", "1"),
}, &resp, isFamily)
"opertype": "3",
}, &resp)
if err != nil {
return nil, err
}
return resp.toFile(), nil
}
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, isFamily bool, overwrite bool) (model.Obj, error) {
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
fileMd5 := stream.GetHash().GetHash(utils.MD5)
if len(fileMd5) < utils.MD5.Width {
return nil, errors.New("invalid hash")
}
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()), isFamily)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
if err != nil {
return nil, err
}
@ -562,11 +561,11 @@ func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream m
return nil, errors.New("rapid upload fail")
}
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId, isFamily, overwrite)
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
}
// 快传
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
@ -595,7 +594,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
}
silceMd5.Reset()
if _, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
if _, err := io.CopyN(io.MultiWriter(fileMd5, silceMd5), tempFile, byteSize); err != nil && err != io.EOF {
return nil, err
}
md5Byte := silceMd5.Sum(nil)
@ -610,7 +609,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
}
fullUrl := UPLOAD_URL
if isFamily {
if y.isFamily() {
fullUrl += "/family"
} else {
//params.Set("extend", `{"opScene":"1","relativepath":"","rootfolderid":""}`)
@ -629,13 +628,13 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
"sliceSize": fmt.Sprint(sliceSize),
"sliceMd5": sliceMd5Hex,
}
if isFamily {
if y.isFamily() {
params.Set("familyId", y.FamilyID)
}
var uploadInfo InitMultiUploadResp
_, err = y.request(fullUrl+"/initMultiUpload", http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx)
}, params, &uploadInfo, isFamily)
}, params, &uploadInfo)
if err != nil {
return nil, err
}
@ -660,7 +659,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
i, uploadPart := i, uploadPart
threadG.Go(func(ctx context.Context) error {
// step.3 获取上传链接
uploadUrls, err := y.GetMultiUploadUrls(ctx, isFamily, uploadInfo.UploadFileID, uploadPart)
uploadUrls, err := y.GetMultiUploadUrls(ctx, uploadInfo.UploadFileID, uploadPart)
if err != nil {
return err
}
@ -672,7 +671,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
}
// step.4 上传切片
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize), isFamily)
_, err = y.put(ctx, uploadUrl.RequestURL, uploadUrl.Headers, false, io.NewSectionReader(tempFile, offset, byteSize))
if err != nil {
return err
}
@ -699,8 +698,8 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
}, Params{
"uploadFileId": uploadInfo.UploadFileID,
"isLog": "0",
"opertype": IF(overwrite, "3", "1"),
}, &resp, isFamily)
"opertype": "3",
}, &resp)
if err != nil {
return nil, err
}
@ -709,9 +708,9 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
// 获取上传切片信息
// 对http body有大小限制分片信息太多会出错
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string, partInfo ...string) ([]UploadUrlInfo, error) {
fullUrl := UPLOAD_URL
if isFamily {
if y.isFamily() {
fullUrl += "/family"
} else {
fullUrl += "/person"
@ -724,7 +723,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uplo
}, Params{
"uploadFileId": uploadFileId,
"partInfo": strings.Join(partInfo, ","),
}, &uploadUrlsResp, isFamily)
}, &uploadUrlsResp)
if err != nil {
return nil, err
}
@ -753,7 +752,7 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, isFamily bool, uplo
}
// 旧版本上传,家庭云不支持覆盖
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress, isFamily bool, overwrite bool) (model.Obj, error) {
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
tempFile, err := file.CacheFullInTempFile()
if err != nil {
return nil, err
@ -764,7 +763,7 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
}
// 创建上传会话
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()), isFamily)
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
if err != nil {
return nil, err
}
@ -781,14 +780,14 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
"Expect": "100-continue",
}
if isFamily {
if y.isFamily() {
header["FamilyId"] = fmt.Sprint(y.FamilyID)
header["UploadFileId"] = fmt.Sprint(status.UploadFileId)
} else {
header["Edrive-UploadFileId"] = fmt.Sprint(status.UploadFileId)
}
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile), isFamily)
_, err := y.put(ctx, status.FileUploadUrl, header, true, io.NopCloser(tempFile))
if err, ok := err.(*RespErr); ok && err.Code != "InputStreamReadError" {
return nil, err
}
@ -803,10 +802,10 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
"uploadFileId": fmt.Sprint(status.UploadFileId),
"resumePolicy": "1",
})
if isFamily {
if y.isFamily() {
req.SetQueryParam("familyId", fmt.Sprint(y.FamilyID))
}
}, &status, isFamily)
}, &status)
if err != nil {
return nil, err
}
@ -816,20 +815,20 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
}
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId, isFamily, overwrite)
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
}
// 创建上传会话
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string, isFamily bool) (*CreateUploadFileResp, error) {
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
var uploadInfo CreateUploadFileResp
fullUrl := API_URL + "/createUploadFile.action"
if isFamily {
if y.isFamily() {
fullUrl = API_URL + "/family/file/createFamilyFile.action"
}
_, err := y.post(fullUrl, func(req *resty.Request) {
req.SetContext(ctx)
if isFamily {
if y.isFamily() {
req.SetQueryParams(map[string]string{
"familyId": y.FamilyID,
"parentId": parentID,
@ -850,7 +849,7 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
"isLog": "0",
})
}
}, &uploadInfo, isFamily)
}, &uploadInfo)
if err != nil {
return nil, err
@ -859,11 +858,11 @@ func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileM
}
// 提交上传文件
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64, isFamily bool, overwrite bool) (model.Obj, error) {
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
var resp OldCommitUploadFileResp
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
req.SetContext(ctx)
if isFamily {
if y.isFamily() {
req.SetHeaders(map[string]string{
"ResumePolicy": "1",
"UploadFileId": fmt.Sprint(uploadFileID),
@ -871,13 +870,13 @@ func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string,
})
} else {
req.SetFormData(map[string]string{
"opertype": IF(overwrite, "3", "1"),
"opertype": "3",
"resumePolicy": "1",
"uploadFileId": fmt.Sprint(uploadFileID),
"isLog": "0",
})
}
}, &resp, isFamily)
}, &resp)
if err != nil {
return nil, err
}
@ -896,100 +895,10 @@ func (y *Cloud189PC) isLogin() bool {
return err == nil
}
// 创建家庭云中转文件夹
func (y *Cloud189PC) createFamilyTransferFolder(count int) (*ring.Ring, error) {
folders := ring.New(count)
var rootFolder Cloud189Folder
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"folderName": "FamilyTransferFolder",
"familyId": y.FamilyID,
})
}, &rootFolder, true)
if err != nil {
return nil, err
}
folderCount := 0
// 获取已有目录
files, err := y.getFiles(context.TODO(), rootFolder.GetID(), true)
if err != nil {
return nil, err
}
for _, file := range files {
if folder, ok := file.(*Cloud189Folder); ok {
folders.Value = folder
folders = folders.Next()
folderCount++
}
}
// 创建新的目录
for folderCount < count {
var newFolder Cloud189Folder
_, err := y.post(API_URL+"/family/file/createFolder.action", func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"folderName": uuid.NewString(),
"familyId": y.FamilyID,
"parentId": rootFolder.GetID(),
})
}, &newFolder, true)
if err != nil {
return nil, err
}
folders.Value = &newFolder
folders = folders.Next()
folderCount++
}
return folders, nil
}
// 清理中转文件夹
func (y *Cloud189PC) cleanFamilyTransfer(ctx context.Context) error {
var tasks []BatchTaskInfo
r := y.familyTransferFolder
for p := r.Next(); p != r; p = p.Next() {
folder := p.Value.(*Cloud189Folder)
files, err := y.getFiles(ctx, folder.GetID(), true)
if err != nil {
return err
}
for _, file := range files {
tasks = append(tasks, BatchTaskInfo{
FileId: file.GetID(),
FileName: file.GetName(),
IsFolder: BoolToNumber(file.IsDir()),
})
}
}
if len(tasks) > 0 {
// 删除
resp, err := y.CreateBatchTask("DELETE", y.FamilyID, "", nil, tasks...)
if err != nil {
return err
}
err = y.WaitBatchTask("DELETE", resp.TaskID, time.Second)
if err != nil {
return err
}
// 永久删除
resp, err = y.CreateBatchTask("CLEAR_RECYCLE", y.FamilyID, "", nil, tasks...)
if err != nil {
return err
}
err = y.WaitBatchTask("CLEAR_RECYCLE", resp.TaskID, time.Second)
return err
}
return nil
}
// 获取家庭云所有用户信息
func (y *Cloud189PC) getFamilyInfoList() ([]FamilyInfoResp, error) {
var resp FamilyInfoListResp
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp, true)
_, err := y.get(API_URL+"/family/manage/getFamilyList.action", nil, &resp)
if err != nil {
return nil, err
}
@ -1013,73 +922,6 @@ func (y *Cloud189PC) getFamilyID() (string, error) {
return fmt.Sprint(infos[0].FamilyID), nil
}
// 保存家庭云中的文件到个人云
func (y *Cloud189PC) SaveFamilyFileToPersonCloud(ctx context.Context, familyId string, srcObj, dstDir model.Obj, overwrite bool) error {
// _, err := y.post(API_URL+"/family/file/saveFileToMember.action", func(req *resty.Request) {
// req.SetQueryParams(map[string]string{
// "channelId": "home",
// "familyId": familyId,
// "destParentId": destParentId,
// "fileIdList": familyFileId,
// })
// }, nil)
// return err
task := BatchTaskInfo{
FileId: srcObj.GetID(),
FileName: srcObj.GetName(),
IsFolder: BoolToNumber(srcObj.IsDir()),
}
resp, err := y.CreateBatchTask("COPY", familyId, dstDir.GetID(), map[string]string{
"groupId": "null",
"copyType": "2",
"shareId": "null",
}, task)
if err != nil {
return err
}
for {
state, err := y.CheckBatchTask("COPY", resp.TaskID)
if err != nil {
return err
}
switch state.TaskStatus {
case 2:
task.DealWay = IF(overwrite, 3, 2)
// 冲突时覆盖文件
if err := y.ManageBatchTask("COPY", resp.TaskID, dstDir.GetID(), task); err != nil {
return err
}
case 4:
return nil
}
time.Sleep(time.Millisecond * 400)
}
}
func (y *Cloud189PC) CreateBatchTask(aType string, familyID string, targetFolderId string, other map[string]string, taskInfos ...BatchTaskInfo) (*CreateBatchTaskResp, error) {
var resp CreateBatchTaskResp
_, err := y.post(API_URL+"/batch/createBatchTask.action", func(req *resty.Request) {
req.SetFormData(map[string]string{
"type": aType,
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
})
if targetFolderId != "" {
req.SetFormData(map[string]string{"targetFolderId": targetFolderId})
}
if familyID != "" {
req.SetFormData(map[string]string{"familyId": familyID})
}
req.SetFormData(other)
}, &resp, familyID != "")
if err != nil {
return nil, err
}
return &resp, nil
}
// 检测任务状态
func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStateResp, error) {
var resp BatchTaskStateResp
_, err := y.post(API_URL+"/batch/checkBatchTask.action", func(req *resty.Request) {
@ -1094,37 +936,6 @@ func (y *Cloud189PC) CheckBatchTask(aType string, taskID string) (*BatchTaskStat
return &resp, nil
}
// 获取冲突的任务信息
func (y *Cloud189PC) GetConflictTaskInfo(aType string, taskID string) (*BatchTaskConflictTaskInfoResp, error) {
var resp BatchTaskConflictTaskInfoResp
_, err := y.post(API_URL+"/batch/getConflictTaskInfo.action", func(req *resty.Request) {
req.SetFormData(map[string]string{
"type": aType,
"taskId": taskID,
})
}, &resp)
if err != nil {
return nil, err
}
return &resp, nil
}
// 处理冲突
func (y *Cloud189PC) ManageBatchTask(aType string, taskID string, targetFolderId string, taskInfos ...BatchTaskInfo) error {
_, err := y.post(API_URL+"/batch/manageBatchTask.action", func(req *resty.Request) {
req.SetFormData(map[string]string{
"targetFolderId": targetFolderId,
"type": aType,
"taskId": taskID,
"taskInfos": MustString(utils.Json.MarshalToString(taskInfos)),
})
}, nil)
return err
}
var ErrIsConflict = errors.New("there is a conflict with the target object")
// 等待任务完成
func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration) error {
for {
state, err := y.CheckBatchTask(aType, taskID)
@ -1133,7 +944,7 @@ func (y *Cloud189PC) WaitBatchTask(aType string, taskID string, t time.Duration)
}
switch state.TaskStatus {
case 2:
return ErrIsConflict
return errors.New("there is a conflict with the target object")
case 4:
return nil
}

View File

@ -7,7 +7,6 @@ import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
)
@ -46,9 +45,6 @@ func (d *Alias) Init(ctx context.Context) error {
d.oneKey = k
}
d.autoFlatten = true
} else {
d.oneKey = ""
d.autoFlatten = false
}
return nil
}
@ -91,9 +87,8 @@ func (d *Alias) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
return nil, errs.ObjectNotFound
}
var objs []model.Obj
fsArgs := &fs.ListArgs{NoLog: true, Refresh: args.Refresh}
for _, dst := range dsts {
tmp, err := d.list(ctx, dst, sub, fsArgs)
tmp, err := d.list(ctx, dst, sub)
if err == nil {
objs = append(objs, tmp...)
}
@ -116,26 +111,4 @@ func (d *Alias) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
return nil, errs.ObjectNotFound
}
func (d *Alias) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
reqPath, err := d.getReqPath(ctx, srcObj)
if err == nil {
return fs.Rename(ctx, *reqPath, newName)
}
if errs.IsNotImplement(err) {
return errors.New("same-name files cannot be Rename")
}
return err
}
func (d *Alias) Remove(ctx context.Context, obj model.Obj) error {
reqPath, err := d.getReqPath(ctx, obj)
if err == nil {
return fs.Remove(ctx, *reqPath)
}
if errs.IsNotImplement(err) {
return errors.New("same-name files cannot be Delete")
}
return err
}
var _ driver.Driver = (*Alias)(nil)

View File

@ -9,25 +9,19 @@ type Addition struct {
// Usually one of two
// driver.RootPath
// define other
Paths string `json:"paths" required:"true" type:"text"`
ProtectSameName bool `json:"protect_same_name" default:"true" required:"false" help:"Protects same-name files from Delete or Rename"`
Paths string `json:"paths" required:"true" type:"text"`
}
var config = driver.Config{
Name: "Alias",
LocalSort: true,
NoCache: true,
NoUpload: true,
DefaultRoot: "/",
ProxyRangeOption: true,
Name: "Alias",
LocalSort: true,
NoCache: true,
NoUpload: true,
DefaultRoot: "/",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Alias{
Addition: Addition{
ProtectSameName: true,
},
}
return &Alias{}
})
}

View File

@ -6,7 +6,6 @@ import (
stdpath "path"
"strings"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/sign"
@ -16,7 +15,7 @@ import (
func (d *Alias) listRoot() []model.Obj {
var objs []model.Obj
for k := range d.pathMap {
for k, _ := range d.pathMap {
obj := model.Object{
Name: k,
IsFolder: true,
@ -65,8 +64,8 @@ func (d *Alias) get(ctx context.Context, path string, dst, sub string) (model.Ob
}, nil
}
func (d *Alias) list(ctx context.Context, dst, sub string, args *fs.ListArgs) ([]model.Obj, error) {
objs, err := fs.List(ctx, stdpath.Join(dst, sub), args)
func (d *Alias) list(ctx context.Context, dst, sub string) ([]model.Obj, error) {
objs, err := fs.List(ctx, stdpath.Join(dst, sub), &fs.ListArgs{NoLog: true})
// the obj must implement the model.SetPath interface
// return objs, err
if err != nil {
@ -103,49 +102,13 @@ func (d *Alias) link(ctx context.Context, dst, sub string, args model.LinkArgs)
return nil, err
}
if common.ShouldProxy(storage, stdpath.Base(sub)) {
link := &model.Link{
return &model.Link{
URL: fmt.Sprintf("%s/p%s?sign=%s",
common.GetApiUrl(args.HttpReq),
utils.EncodePath(reqPath, true),
sign.Sign(reqPath)),
}
if args.HttpReq != nil && d.ProxyRange {
link.RangeReadCloser = common.NoProxyRange
}
return link, nil
}, nil
}
link, _, err := fs.Link(ctx, reqPath, args)
return link, err
}
func (d *Alias) getReqPath(ctx context.Context, obj model.Obj) (*string, error) {
root, sub := d.getRootAndPath(obj.GetPath())
if sub == "" {
return nil, errs.NotSupport
}
dsts, ok := d.pathMap[root]
if !ok {
return nil, errs.ObjectNotFound
}
var reqPath *string
for _, dst := range dsts {
path := stdpath.Join(dst, sub)
_, err := fs.Get(ctx, path, &fs.GetArgs{NoLog: true})
if err != nil {
continue
}
if !d.ProtectSameName {
return &path, nil
}
if ok {
ok = false
} else {
return nil, errs.NotImplement
}
reqPath = &path
}
if reqPath == nil {
return nil, errs.ObjectNotFound
}
return reqPath, nil
}

View File

@ -6,6 +6,7 @@ import (
"io"
"net/http"
"path"
"strconv"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
@ -15,7 +16,6 @@ import (
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type AListV3 struct {
@ -41,7 +41,7 @@ func (d *AListV3) Init(ctx context.Context) error {
return err
}
// if the username is not empty and the username is not the same as the current username, then login again
if d.Username != resp.Data.Username {
if d.Username != "" && d.Username != resp.Data.Username {
err = d.login()
if err != nil {
return err
@ -108,19 +108,11 @@ func (d *AListV3) List(ctx context.Context, dir model.Obj, args model.ListArgs)
func (d *AListV3) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp common.Resp[FsGetResp]
// if PassUAToUpsteam is true, then pass the user-agent to the upstream
userAgent := base.UserAgent
if d.PassUAToUpsteam {
userAgent = args.Header.Get("user-agent")
if userAgent == "" {
userAgent = base.UserAgent
}
}
_, err := d.request("/fs/get", http.MethodPost, func(req *resty.Request) {
req.SetResult(&resp).SetBody(FsGetReq{
Path: file.GetPath(),
Password: d.MetaPassword,
}).SetHeader("user-agent", userAgent)
})
})
if err != nil {
return nil, err
@ -182,41 +174,14 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
}
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
req, err := http.NewRequestWithContext(ctx, http.MethodPut, d.Address+"/api/fs/put", stream)
if err != nil {
return err
}
req.Header.Set("Authorization", d.Token)
req.Header.Set("File-Path", path.Join(dstDir.GetPath(), stream.GetName()))
req.Header.Set("Password", d.MetaPassword)
req.ContentLength = stream.GetSize()
// client := base.NewHttpClient()
// client.Timeout = time.Hour * 6
res, err := base.HttpClient.Do(req)
if err != nil {
return err
}
bytes, err := io.ReadAll(res.Body)
if err != nil {
return err
}
log.Debugf("[alist_v3] response body: %s", string(bytes))
if res.StatusCode >= 400 {
return fmt.Errorf("request failed, status: %s", res.Status)
}
code := utils.Json.Get(bytes, "code").ToInt()
if code != 200 {
if code == 401 || code == 403 {
err = d.login()
if err != nil {
return err
}
}
return fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(bytes, "message").ToString())
}
return nil
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
SetHeader("Password", d.MetaPassword).
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
SetContentLength(true).
SetBody(io.ReadCloser(stream))
})
return err
}
//func (d *AList) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {

View File

@ -7,20 +7,18 @@ import (
type Addition struct {
driver.RootPath
Address string `json:"url" required:"true"`
MetaPassword string `json:"meta_password"`
Username string `json:"username"`
Password string `json:"password"`
Token string `json:"token"`
PassUAToUpsteam bool `json:"pass_ua_to_upsteam" default:"true"`
Address string `json:"url" required:"true"`
MetaPassword string `json:"meta_password"`
Username string `json:"username"`
Password string `json:"password"`
Token string `json:"token"`
}
var config = driver.Config{
Name: "AList V3",
LocalSort: true,
DefaultRoot: "/",
CheckStatus: true,
ProxyRangeOption: true,
Name: "AList V3",
LocalSort: true,
DefaultRoot: "/",
CheckStatus: true,
}
func init() {

View File

@ -13,9 +13,6 @@ import (
)
func (d *AListV3) login() error {
if d.Username == "" {
return nil
}
var resp common.Resp[LoginResp]
_, err := d.request("/auth/login", http.MethodPost, func(req *resty.Request) {
req.SetResult(&resp).SetBody(base.Json{

View File

@ -52,7 +52,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
return err
}
// get driver id
res, err, _ := d.request("https://api.alipan.com/v2/user/get", http.MethodPost, nil, nil)
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
if err != nil {
return err
}
@ -106,7 +106,7 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
"file_id": file.GetID(),
"expire_sec": 14400,
}
res, err, _ := d.request("https://api.alipan.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
if err != nil {
@ -114,14 +114,14 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.alipan.com/"},
"Referer": []string{"https://www.aliyundrive.com/"},
},
URL: utils.Json.Get(res, "url").ToString(),
}, nil
}
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
_, err, _ := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
@ -139,7 +139,7 @@ func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
}
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
_, err, _ := d.request("https://api.alipan.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"check_name_mode": "refuse",
"drive_id": d.DriveId,
@ -156,7 +156,7 @@ func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
}
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
_, err, _ := d.request("https://api.alipan.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": obj.GetID(),
@ -194,7 +194,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
if d.RapidUpload {
buf := bytes.NewBuffer(make([]byte, 0, 1024))
utils.CopyWithBufferN(buf, file, 1024)
io.CopyN(buf, file, 1024)
reqBody["pre_hash"] = utils.HashData(utils.SHA1, buf.Bytes())
if localFile != nil {
if _, err := localFile.Seek(0, io.SeekStart); err != nil {
@ -216,7 +216,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
var resp UploadResp
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
@ -270,7 +270,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil && e.Code != "PreHashMatched" {
@ -309,7 +309,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
}
}
var resp2 base.Json
_, err, e = d.request("https://api.alipan.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"drive_id": d.DriveId,
"file_id": resp.FileId,
@ -334,10 +334,10 @@ func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}
}
switch args.Method {
case "doc_preview":
url = "https://api.alipan.com/v2/file/get_office_preview_url"
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
data["access_token"] = d.AccessToken
case "video_preview":
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
data["url_expire_sec"] = 14400
default:

View File

@ -26,7 +26,7 @@ func (d *AliDrive) createSession() error {
state.retry = 0
return fmt.Errorf("createSession failed after three retries")
}
_, err, _ := d.request("https://api.alipan.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"deviceName": "samsung",
"modelName": "SM-G9810",
@ -42,7 +42,7 @@ func (d *AliDrive) createSession() error {
}
// func (d *AliDrive) renewSession() error {
// _, err, _ := d.request("https://api.alipan.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
// return err
// }
@ -58,7 +58,7 @@ func (d *AliDrive) sign() {
// do others that not defined in Driver interface
func (d *AliDrive) refreshToken() error {
url := "https://auth.alipan.com/v2/account/token"
url := "https://auth.aliyundrive.com/v2/account/token"
var resp base.TokenResp
var e RespErr
_, err := base.RestyClient.R().
@ -85,7 +85,7 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
req := base.RestyClient.R()
state, ok := global.Load(d.UserID)
if !ok {
if url == "https://api.alipan.com/v2/user/get" {
if url == "https://api.aliyundrive.com/v2/user/get" {
state = &State{}
} else {
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
@ -94,8 +94,8 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
req.SetHeaders(map[string]string{
"Authorization": "Bearer\t" + d.AccessToken,
"content-type": "application/json",
"origin": "https://www.alipan.com",
"Referer": "https://alipan.com/",
"origin": "https://www.aliyundrive.com",
"Referer": "https://aliyundrive.com/",
"X-Signature": state.signature,
"x-request-id": uuid.NewString(),
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
@ -158,7 +158,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
"url_expire_sec": 14400,
}
_, err, _ := d.request("https://api.alipan.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
@ -172,7 +172,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
}
func (d *AliDrive) batch(srcId, dstId string, url string) error {
res, err, _ := d.request("https://api.alipan.com/v3/batch", http.MethodPost, func(req *resty.Request) {
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"requests": []base.Json{
{

View File

@ -93,7 +93,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
}
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
}
exp := time.Minute
exp := time.Hour
return &model.Link{
URL: url,
Expiration: &exp,

View File

@ -36,7 +36,7 @@ var config = driver.Config{
func init() {
op.RegisterDriver(func() driver.Driver {
return &AliyundriveOpen{
base: "https://openapi.alipan.com",
base: "https://openapi.aliyundrive.com",
}
})
}

View File

@ -136,7 +136,7 @@ func (d *AliyundriveOpen) calProofCode(stream model.FileStreamer) (string, error
if err != nil {
return "", err
}
_, err = utils.CopyWithBufferN(buf, reader, length)
_, err = io.CopyN(buf, reader, length)
if err != nil {
return "", err
}
@ -164,7 +164,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
count := int(math.Ceil(float64(stream.GetSize()) / float64(partSize)))
createData["part_info_list"] = makePartInfos(count)
// rapid upload
rapidUpload := !stream.IsForceStreamUpload() && stream.GetSize() > 100*utils.KB && d.RapidUpload
rapidUpload := stream.GetSize() > 100*utils.KB && d.RapidUpload
if rapidUpload {
log.Debugf("[aliyundrive_open] start cal pre_hash")
// read 1024 bytes to calculate pre hash
@ -242,16 +242,13 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
if remain := stream.GetSize() - offset; length > remain {
length = remain
}
rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
if rapidUpload {
srd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
if err != nil {
return nil, err
}
rd = utils.NewMultiReadable(srd)
//rd := utils.NewMultiReadable(io.LimitReader(stream, partSize))
rd, err := stream.RangeRead(http_range.Range{Start: offset, Length: length})
if err != nil {
return nil, err
}
err = retry.Do(func() error {
rd.Reset()
//rd.Reset()
return d.uploadPart(ctx, rd, createResp.PartInfoList[i])
},
retry.Attempts(3),

View File

@ -105,7 +105,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
"share_id": d.ShareId,
}
var resp ShareLinkResp
_, err := d.request("https://api.alipan.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
})
if err != nil {
@ -113,7 +113,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
}
return &model.Link{
Header: http.Header{
"Referer": []string{"https://www.alipan.com/"},
"Referer": []string{"https://www.aliyundrive.com/"},
},
URL: resp.DownloadUrl,
}, nil
@ -128,9 +128,9 @@ func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (int
}
switch args.Method {
case "doc_preview":
url = "https://api.alipan.com/v2/file/get_office_preview_url"
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
case "video_preview":
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
data["category"] = "live_transcoding"
default:
return nil, errs.NotSupport

View File

@ -16,7 +16,7 @@ const (
)
func (d *AliyundriveShare) refreshToken() error {
url := "https://auth.alipan.com/v2/account/token"
url := "https://auth.aliyundrive.com/v2/account/token"
var resp base.TokenResp
var e ErrorResp
_, err := base.RestyClient.R().
@ -47,7 +47,7 @@ func (d *AliyundriveShare) getShareToken() error {
var resp ShareTokenResp
_, err := base.RestyClient.R().
SetResult(&resp).SetError(&e).SetBody(data).
Post("https://api.alipan.com/v2/share_link/get_share_token")
Post("https://api.aliyundrive.com/v2/share_link/get_share_token")
if err != nil {
return err
}
@ -116,7 +116,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
SetHeader("x-share-token", d.ShareToken).
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
SetResult(&resp).SetError(&e).SetBody(data).
Post("https://api.alipan.com/adrive/v3/file/list")
Post("https://api.aliyundrive.com/adrive/v3/file/list")
if err != nil {
return nil, err
}

View File

@ -2,7 +2,6 @@ package drivers
import (
_ "github.com/alist-org/alist/v3/drivers/115"
_ "github.com/alist-org/alist/v3/drivers/115_share"
_ "github.com/alist-org/alist/v3/drivers/123"
_ "github.com/alist-org/alist/v3/drivers/123_link"
_ "github.com/alist-org/alist/v3/drivers/123_share"
@ -18,29 +17,23 @@ import (
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
_ "github.com/alist-org/alist/v3/drivers/chaoxing"
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
_ "github.com/alist-org/alist/v3/drivers/crypt"
_ "github.com/alist-org/alist/v3/drivers/dropbox"
_ "github.com/alist-org/alist/v3/drivers/ftp"
_ "github.com/alist-org/alist/v3/drivers/google_drive"
_ "github.com/alist-org/alist/v3/drivers/google_photo"
_ "github.com/alist-org/alist/v3/drivers/halalcloud"
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
_ "github.com/alist-org/alist/v3/drivers/lanzou"
_ "github.com/alist-org/alist/v3/drivers/local"
_ "github.com/alist-org/alist/v3/drivers/mediatrack"
_ "github.com/alist-org/alist/v3/drivers/mega"
_ "github.com/alist-org/alist/v3/drivers/mopan"
_ "github.com/alist-org/alist/v3/drivers/netease_music"
_ "github.com/alist-org/alist/v3/drivers/onedrive"
_ "github.com/alist-org/alist/v3/drivers/onedrive_app"
_ "github.com/alist-org/alist/v3/drivers/onedrive_sharelink"
_ "github.com/alist-org/alist/v3/drivers/pikpak"
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
_ "github.com/alist-org/alist/v3/drivers/quqi"
_ "github.com/alist-org/alist/v3/drivers/s3"
_ "github.com/alist-org/alist/v3/drivers/seafile"
_ "github.com/alist-org/alist/v3/drivers/sftp"
@ -48,13 +41,10 @@ import (
_ "github.com/alist-org/alist/v3/drivers/teambition"
_ "github.com/alist-org/alist/v3/drivers/terabox"
_ "github.com/alist-org/alist/v3/drivers/thunder"
_ "github.com/alist-org/alist/v3/drivers/thunder_browser"
_ "github.com/alist-org/alist/v3/drivers/thunderx"
_ "github.com/alist-org/alist/v3/drivers/trainbit"
_ "github.com/alist-org/alist/v3/drivers/url_tree"
_ "github.com/alist-org/alist/v3/drivers/uss"
_ "github.com/alist-org/alist/v3/drivers/virtual"
_ "github.com/alist-org/alist/v3/drivers/vtencent"
_ "github.com/alist-org/alist/v3/drivers/webdav"
_ "github.com/alist-org/alist/v3/drivers/weiyun"
_ "github.com/alist-org/alist/v3/drivers/wopan"

View File

@ -165,16 +165,9 @@ func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream mo
if err != nil {
return nil, err
}
// 修复时间,具体原因见 Put 方法注释的 **注意**
newFile.Ctime = stream.CreateTime().Unix()
newFile.Mtime = stream.ModTime().Unix()
return fileToObj(newFile), nil
}
// Put
//
// **注意**: 截至 2024/04/20 百度云盘 api 接口返回的时间永远是当前时间,而不是文件时间。
// 而实际上云盘存储的时间是文件时间,所以此处需要覆盖时间,保证缓存与云盘的数据一致
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// rapid upload
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
@ -211,7 +204,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
if i == count {
byteSize = lastBlockSize
}
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
if err != nil && err != io.EOF {
return nil, err
}
@ -252,9 +245,9 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
log.Debugf("%+v", precreateResp)
if precreateResp.ReturnType == 2 {
//rapid upload, since got md5 match from baidu server
// 修复时间,具体原因见 Put 方法注释的 **注意**
precreateResp.File.Ctime = ctime
precreateResp.File.Mtime = mtime
if err != nil {
return nil, err
}
return fileToObj(precreateResp.File), nil
}
}
@ -305,9 +298,6 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
if err != nil {
return nil, err
}
// 修复时间,具体原因见 Put 方法注释的 **注意**
newFile.Ctime = ctime
newFile.Mtime = mtime
return fileToObj(newFile), nil
}

View File

@ -8,16 +8,15 @@ import (
type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
driver.RootPath
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
AccessToken string
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
CustomUploadPartSize int64 `json:"custom_upload_part_size" type:"number" default:"0" help:"0 for auto"`
OrderBy string `json:"order_by" type:"select" options:"name,time,size" default:"name"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
DownloadAPI string `json:"download_api" type:"select" options:"official,crack" default:"official"`
ClientID string `json:"client_id" required:"true" default:"iYCeC9g08h5vuP9UqvPHKKSVrKFXGa1v"`
ClientSecret string `json:"client_secret" required:"true" default:"jXiFMOPVPCWlO2M5CwWQzffpNPaGTRBG"`
CustomCrackUA string `json:"custom_crack_ua" required:"true" default:"netdisk"`
AccessToken string
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
UploadAPI string `json:"upload_api" default:"https://d.pcs.baidu.com"`
}
var config = driver.Config{

View File

@ -249,9 +249,6 @@ const (
)
func (d *BaiduNetdisk) getSliceSize() int64 {
if d.CustomUploadPartSize != 0 {
return d.CustomUploadPartSize
}
switch d.vipType {
case 1:
return VipSliceSize

View File

@ -261,7 +261,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
if i == count {
byteSize = lastBlockSize
}
_, err := utils.CopyWithBufferN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
_, err := io.CopyN(io.MultiWriter(fileMd5H, sliceMd5H, slicemd5H2Write), tempFile, byteSize)
if err != nil && err != io.EOF {
return nil, err
}

View File

@ -33,7 +33,6 @@ func NewRestyClient() *resty.Client {
client := resty.New().
SetHeader("user-agent", UserAgent).
SetRetryCount(3).
SetRetryResetReaders(true).
SetTimeout(DefaultTimeout).
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
return client

View File

@ -1,297 +0,0 @@
package chaoxing
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"mime/multipart"
"net/http"
"net/url"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/cron"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/go-resty/resty/v2"
"google.golang.org/appengine/log"
)
type ChaoXing struct {
model.Storage
Addition
cron *cron.Cron
config driver.Config
conf Conf
}
func (d *ChaoXing) Config() driver.Config {
return d.config
}
func (d *ChaoXing) GetAddition() driver.Additional {
return &d.Addition
}
func (d *ChaoXing) refreshCookie() error {
cookie, err := d.Login()
if err != nil {
d.Status = err.Error()
op.MustSaveDriverStorage(d)
return nil
}
d.Addition.Cookie = cookie
op.MustSaveDriverStorage(d)
return nil
}
func (d *ChaoXing) Init(ctx context.Context) error {
err := d.refreshCookie()
if err != nil {
log.Errorf(ctx, err.Error())
}
d.cron = cron.NewCron(time.Hour * 12)
d.cron.Do(func() {
err = d.refreshCookie()
if err != nil {
log.Errorf(ctx, err.Error())
}
})
return nil
}
func (d *ChaoXing) Drop(ctx context.Context) error {
d.cron.Stop()
return nil
}
func (d *ChaoXing) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.GetFiles(dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *ChaoXing) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
var resp DownResp
ua := d.conf.ua
fileId := strings.Split(file.GetID(), "$")[1]
_, err := d.requestDownload("/screen/note_note/files/status/"+fileId, http.MethodPost, func(req *resty.Request) {
req.SetHeader("User-Agent", ua)
}, &resp)
if err != nil {
return nil, err
}
u := resp.Download
return &model.Link{
URL: u,
Header: http.Header{
"Cookie": []string{d.Cookie},
"Referer": []string{d.conf.referer},
"User-Agent": []string{ua},
},
Concurrency: 2,
PartSize: 10 * utils.MB,
}, nil
}
func (d *ChaoXing) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"name": dirName,
"pid": parentDir.GetID(),
}
var resp ListFileResp
_, err := d.request("/pc/resource/addResourceFolder", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderIds": srcObj.GetID(),
"targetId": dstDir.GetID(),
}
if !srcObj.IsDir() {
query = map[string]string{
"bbsid": d.Addition.Bbsid,
"recIds": strings.Split(srcObj.GetID(), "$")[0],
"targetId": dstDir.GetID(),
}
}
var resp ListFileResp
_, err := d.request("/pc/resource/moveResource", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if !resp.Status {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": srcObj.GetID(),
"name": newName,
}
path := "/pc/resource/updateResourceFolderName"
if !srcObj.IsDir() {
// path = "/pc/resource/updateResourceFileName"
// query = map[string]string{
// "bbsid": d.Addition.Bbsid,
// "recIds": strings.Split(srcObj.GetID(), "$")[0],
// "name": newName,
// }
return errors.New("此网盘不支持修改文件名")
}
var resp ListFileResp
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
// TODO copy obj, optional
return errs.NotImplement
}
func (d *ChaoXing) Remove(ctx context.Context, obj model.Obj) error {
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderIds": obj.GetID(),
}
path := "/pc/resource/deleteResourceFolder"
var resp ListFileResp
if !obj.IsDir() {
path = "/pc/resource/deleteResourceFile"
query = map[string]string{
"bbsid": d.Addition.Bbsid,
"recIds": strings.Split(obj.GetID(), "$")[0],
}
}
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error:%s", resp.Msg)
return errors.New(msg)
}
return nil
}
func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
var resp UploadDataRsp
_, err := d.request("https://noteyd.chaoxing.com/pc/files/getUploadConfig", http.MethodGet, func(req *resty.Request) {
}, &resp)
if err != nil {
return err
}
if resp.Result != 1 {
return errors.New("get upload data error")
}
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
filePart, err := writer.CreateFormFile("file", stream.GetName())
if err != nil {
return err
}
_, err = utils.CopyWithBuffer(filePart, stream)
if err != nil {
return err
}
err = writer.WriteField("_token", resp.Msg.Token)
if err != nil {
return err
}
err = writer.WriteField("puid", fmt.Sprintf("%d", resp.Msg.Puid))
if err != nil {
fmt.Println("Error writing param2 to request body:", err)
return err
}
err = writer.Close()
if err != nil {
return err
}
req, err := http.NewRequest("POST", "https://pan-yz.chaoxing.com/upload", body)
if err != nil {
return err
}
req.Header.Set("Content-Type", writer.FormDataContentType())
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
resps, err := http.DefaultClient.Do(req)
if err != nil {
return err
}
defer resps.Body.Close()
bodys, err := io.ReadAll(resps.Body)
if err != nil {
return err
}
var fileRsp UploadFileDataRsp
err = json.Unmarshal(bodys, &fileRsp)
if err != nil {
return err
}
if fileRsp.Msg != "success" {
return errors.New(fileRsp.Msg)
}
uploadDoneParam := UploadDoneParam{Key: fileRsp.ObjectID, Cataid: "100000019", Param: fileRsp.Data}
params, err := json.Marshal(uploadDoneParam)
if err != nil {
return err
}
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"pid": dstDir.GetID(),
"type": "yunpan",
"params": url.QueryEscape("[" + string(params) + "]"),
}
var respd ListFileResp
_, err = d.request("/pc/resource/addResource", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &respd)
if err != nil {
return err
}
if respd.Result != 1 {
msg := fmt.Sprintf("error:%v", resp.Msg)
return errors.New(msg)
}
return nil
}
var _ driver.Driver = (*ChaoXing)(nil)

View File

@ -1,47 +0,0 @@
package chaoxing
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
// 此程序挂载的是超星小组网盘,需要代理才能使用;
// 登录超星后进入个人空间,进入小组,新建小组,点击进去。
// url中就有bbsid的参数系统限制单文件大小2G没有总容量限制
type Addition struct {
// 超星用户名及密码
UserName string `json:"user_name" required:"true"`
Password string `json:"password" required:"true"`
// 从自己新建的小组url里获取
Bbsid string `json:"bbsid" required:"true"`
driver.RootID
// 可不填,程序会自动登录获取
Cookie string `json:"cookie"`
}
type Conf struct {
ua string
referer string
api string
DowloadApi string
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &ChaoXing{
config: driver.Config{
Name: "ChaoXingGroupDrive",
OnlyProxy: true,
OnlyLocal: false,
DefaultRoot: "-1",
NoOverwriteUpload: true,
},
conf: Conf{
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
referer: "https://chaoxing.com/",
api: "https://groupweb.chaoxing.com",
DowloadApi: "https://noteyd.chaoxing.com",
},
}
})
}

View File

@ -1,276 +0,0 @@
package chaoxing
import (
"bytes"
"fmt"
"strconv"
"time"
"github.com/alist-org/alist/v3/internal/model"
)
type Resp struct {
Result int `json:"result"`
}
type UserAuth struct {
GroupAuth struct {
AddData int `json:"addData"`
AddDataFolder int `json:"addDataFolder"`
AddLebel int `json:"addLebel"`
AddManager int `json:"addManager"`
AddMem int `json:"addMem"`
AddTopicFolder int `json:"addTopicFolder"`
AnonymousAddReply int `json:"anonymousAddReply"`
AnonymousAddTopic int `json:"anonymousAddTopic"`
BatchOperation int `json:"batchOperation"`
DelData int `json:"delData"`
DelDataFolder int `json:"delDataFolder"`
DelMem int `json:"delMem"`
DelTopicFolder int `json:"delTopicFolder"`
Dismiss int `json:"dismiss"`
ExamEnc string `json:"examEnc"`
GroupChat int `json:"groupChat"`
IsShowCircleChatButton int `json:"isShowCircleChatButton"`
IsShowCircleCloudButton int `json:"isShowCircleCloudButton"`
IsShowCompanyButton int `json:"isShowCompanyButton"`
Join int `json:"join"`
MemberShowRankSet int `json:"memberShowRankSet"`
ModifyDataFolder int `json:"modifyDataFolder"`
ModifyExpose int `json:"modifyExpose"`
ModifyName int `json:"modifyName"`
ModifyShowPic int `json:"modifyShowPic"`
ModifyTopicFolder int `json:"modifyTopicFolder"`
ModifyVisibleState int `json:"modifyVisibleState"`
OnlyMgrScoreSet int `json:"onlyMgrScoreSet"`
Quit int `json:"quit"`
SendNotice int `json:"sendNotice"`
ShowActivityManage int `json:"showActivityManage"`
ShowActivitySet int `json:"showActivitySet"`
ShowAttentionSet int `json:"showAttentionSet"`
ShowAutoClearStatus int `json:"showAutoClearStatus"`
ShowBarcode int `json:"showBarcode"`
ShowChatRoomSet int `json:"showChatRoomSet"`
ShowCircleActivitySet int `json:"showCircleActivitySet"`
ShowCircleSet int `json:"showCircleSet"`
ShowCmem int `json:"showCmem"`
ShowDataFolder int `json:"showDataFolder"`
ShowDelReason int `json:"showDelReason"`
ShowForward int `json:"showForward"`
ShowGroupChat int `json:"showGroupChat"`
ShowGroupChatSet int `json:"showGroupChatSet"`
ShowGroupSquareSet int `json:"showGroupSquareSet"`
ShowLockAddSet int `json:"showLockAddSet"`
ShowManager int `json:"showManager"`
ShowManagerIdentitySet int `json:"showManagerIdentitySet"`
ShowNeedDelReasonSet int `json:"showNeedDelReasonSet"`
ShowNotice int `json:"showNotice"`
ShowOnlyManagerReplySet int `json:"showOnlyManagerReplySet"`
ShowRank int `json:"showRank"`
ShowRank2 int `json:"showRank2"`
ShowRecycleBin int `json:"showRecycleBin"`
ShowReplyByClass int `json:"showReplyByClass"`
ShowReplyNeedCheck int `json:"showReplyNeedCheck"`
ShowSignbanSet int `json:"showSignbanSet"`
ShowSpeechSet int `json:"showSpeechSet"`
ShowTopicCheck int `json:"showTopicCheck"`
ShowTopicNeedCheck int `json:"showTopicNeedCheck"`
ShowTransferSet int `json:"showTransferSet"`
} `json:"groupAuth"`
OperationAuth struct {
Add int `json:"add"`
AddTopicToFolder int `json:"addTopicToFolder"`
ChoiceSet int `json:"choiceSet"`
DelTopicFromFolder int `json:"delTopicFromFolder"`
Delete int `json:"delete"`
Reply int `json:"reply"`
ScoreSet int `json:"scoreSet"`
TopSet int `json:"topSet"`
Update int `json:"update"`
} `json:"operationAuth"`
}
// 手机端学习通上传的文件的json内容(content字段)与网页端上传的有所不同
// 网页端json `"puid": 54321, "size": 12345`
// 手机端json `"puid": "54321". "size": "12345"`
type int_str int
// json 字符串数字和纯数字解析
func (ios *int_str) UnmarshalJSON(data []byte) error {
intValue, err := strconv.Atoi(string(bytes.Trim(data, "\"")))
if err != nil {
return err
}
*ios = int_str(intValue)
return nil
}
type File struct {
Cataid int `json:"cataid"`
Cfid int `json:"cfid"`
Content struct {
Cfid int `json:"cfid"`
Pid int `json:"pid"`
FolderName string `json:"folderName"`
ShareType int `json:"shareType"`
Preview string `json:"preview"`
Filetype string `json:"filetype"`
PreviewURL string `json:"previewUrl"`
IsImg bool `json:"isImg"`
ParentPath string `json:"parentPath"`
Icon string `json:"icon"`
Suffix string `json:"suffix"`
Duration int `json:"duration"`
Pantype string `json:"pantype"`
Puid int_str `json:"puid"`
Filepath string `json:"filepath"`
Crc string `json:"crc"`
Isfile bool `json:"isfile"`
Residstr string `json:"residstr"`
ObjectID string `json:"objectId"`
Extinfo string `json:"extinfo"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
ResTypeValue int `json:"resTypeValue"`
UploadDateFormat string `json:"uploadDateFormat"`
DisableOpt bool `json:"disableOpt"`
DownPath string `json:"downPath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
Restype string `json:"restype"`
Size int_str `json:"size"`
UploadDate int64 `json:"uploadDate"`
FileSize string `json:"fileSize"`
Name string `json:"name"`
FileID string `json:"fileId"`
} `json:"content"`
CreatorID int `json:"creatorId"`
DesID string `json:"des_id"`
ID int `json:"id"`
Inserttime int64 `json:"inserttime"`
Key string `json:"key"`
Norder int `json:"norder"`
OwnerID int `json:"ownerId"`
OwnerType int `json:"ownerType"`
Path string `json:"path"`
Rid int `json:"rid"`
Status int `json:"status"`
Topsign int `json:"topsign"`
}
type ListFileResp struct {
Msg string `json:"msg"`
Result int `json:"result"`
Status bool `json:"status"`
UserAuth UserAuth `json:"userAuth"`
List []File `json:"list"`
}
type DownResp struct {
Msg string `json:"msg"`
Duration int `json:"duration"`
Download string `json:"download"`
FileStatus string `json:"fileStatus"`
URL string `json:"url"`
Status bool `json:"status"`
}
type UploadDataRsp struct {
Result int `json:"result"`
Msg struct {
Puid int `json:"puid"`
Token string `json:"token"`
} `json:"msg"`
}
type UploadFileDataRsp struct {
Result bool `json:"result"`
Msg string `json:"msg"`
Crc string `json:"crc"`
ObjectID string `json:"objectId"`
Resid int64 `json:"resid"`
Puid int `json:"puid"`
Data struct {
DisableOpt bool `json:"disableOpt"`
Resid int64 `json:"resid"`
Crc string `json:"crc"`
Puid int `json:"puid"`
Isfile bool `json:"isfile"`
Pantype string `json:"pantype"`
Size int `json:"size"`
Name string `json:"name"`
ObjectID string `json:"objectId"`
Restype string `json:"restype"`
UploadDate time.Time `json:"uploadDate"`
ModifyDate time.Time `json:"modifyDate"`
UploadDateFormat string `json:"uploadDateFormat"`
Residstr string `json:"residstr"`
Suffix string `json:"suffix"`
Preview string `json:"preview"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
Duration int `json:"duration"`
IsImg bool `json:"isImg"`
PreviewURL string `json:"previewUrl"`
Filetype string `json:"filetype"`
Filepath string `json:"filepath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
ResTypeValue int `json:"resTypeValue"`
Extinfo string `json:"extinfo"`
} `json:"data"`
}
type UploadDoneParam struct {
Cataid string `json:"cataid"`
Key string `json:"key"`
Param struct {
DisableOpt bool `json:"disableOpt"`
Resid int64 `json:"resid"`
Crc string `json:"crc"`
Puid int `json:"puid"`
Isfile bool `json:"isfile"`
Pantype string `json:"pantype"`
Size int `json:"size"`
Name string `json:"name"`
ObjectID string `json:"objectId"`
Restype string `json:"restype"`
UploadDate time.Time `json:"uploadDate"`
ModifyDate time.Time `json:"modifyDate"`
UploadDateFormat string `json:"uploadDateFormat"`
Residstr string `json:"residstr"`
Suffix string `json:"suffix"`
Preview string `json:"preview"`
Thumbnail string `json:"thumbnail"`
Creator int `json:"creator"`
Duration int `json:"duration"`
IsImg bool `json:"isImg"`
PreviewURL string `json:"previewUrl"`
Filetype string `json:"filetype"`
Filepath string `json:"filepath"`
Sort int `json:"sort"`
Topsort int `json:"topsort"`
ResTypeValue int `json:"resTypeValue"`
Extinfo string `json:"extinfo"`
} `json:"param"`
}
func fileToObj(f File) *model.Object {
if len(f.Content.FolderName) > 0 {
return &model.Object{
ID: fmt.Sprintf("%d", f.ID),
Name: f.Content.FolderName,
Size: 0,
Modified: time.UnixMilli(f.Inserttime),
IsFolder: true,
}
}
paserTime := time.UnixMilli(f.Content.UploadDate)
return &model.Object{
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
Name: f.Content.Name,
Size: int64(f.Content.Size),
Modified: paserTime,
IsFolder: false,
}
}

View File

@ -1,183 +0,0 @@
package chaoxing
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"encoding/base64"
"errors"
"fmt"
"mime/multipart"
"net/http"
"strings"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/go-resty/resty/v2"
)
func (d *ChaoXing) requestDownload(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := d.conf.DowloadApi + pathname
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": d.conf.referer,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *ChaoXing) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := d.conf.api + pathname
if strings.Contains(pathname, "getUploadConfig") {
u = pathname
}
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Cookie": d.Cookie,
"Accept": "application/json, text/plain, */*",
"Referer": d.conf.referer,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
return res.Body(), nil
}
func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
files := make([]File, 0)
query := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": parent,
"recType": "1",
}
var resp ListFileResp
_, err := d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)
}, &resp)
if err != nil {
return nil, err
}
if resp.Result != 1 {
msg := fmt.Sprintf("error code is:%d", resp.Result)
return nil, errors.New(msg)
}
if len(resp.List) > 0 {
files = append(files, resp.List...)
}
querys := map[string]string{
"bbsid": d.Addition.Bbsid,
"folderId": parent,
"recType": "2",
}
var resps ListFileResp
_, err = d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(querys)
}, &resps)
if err != nil {
return nil, err
}
for _, file := range resps.List {
// 手机端超星上传的文件没有fileID字段但ObjectID与fileID相同可代替
if file.Content.FileID == "" {
file.Content.FileID = file.Content.ObjectID
}
files = append(files, file)
}
return files, nil
}
func EncryptByAES(message, key string) (string, error) {
aesKey := []byte(key)
plainText := []byte(message)
block, err := aes.NewCipher(aesKey)
if err != nil {
return "", err
}
iv := aesKey[:aes.BlockSize]
mode := cipher.NewCBCEncrypter(block, iv)
padding := aes.BlockSize - len(plainText)%aes.BlockSize
paddedText := append(plainText, byte(padding))
for i := 0; i < padding-1; i++ {
paddedText = append(paddedText, byte(padding))
}
ciphertext := make([]byte, len(paddedText))
mode.CryptBlocks(ciphertext, paddedText)
encrypted := base64.StdEncoding.EncodeToString(ciphertext)
return encrypted, nil
}
func CookiesToString(cookies []*http.Cookie) string {
var cookieStr string
for _, cookie := range cookies {
cookieStr += cookie.Name + "=" + cookie.Value + "; "
}
if len(cookieStr) > 2 {
cookieStr = cookieStr[:len(cookieStr)-2]
}
return cookieStr
}
func (d *ChaoXing) Login() (string, error) {
transferKey := "u2oh6Vu^HWe4_AES"
body := &bytes.Buffer{}
writer := multipart.NewWriter(body)
uname, err := EncryptByAES(d.Addition.UserName, transferKey)
if err != nil {
return "", err
}
password, err := EncryptByAES(d.Addition.Password, transferKey)
if err != nil {
return "", err
}
err = writer.WriteField("uname", uname)
if err != nil {
return "", err
}
err = writer.WriteField("password", password)
if err != nil {
return "", err
}
err = writer.WriteField("t", "true")
if err != nil {
return "", err
}
err = writer.Close()
if err != nil {
return "", err
}
// Create the request
req, err := http.NewRequest("POST", "https://passport2.chaoxing.com/fanyalogin", body)
if err != nil {
return "", err
}
req.Header.Set("Content-Type", writer.FormDataContentType())
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
resp, err := http.DefaultClient.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
return CookiesToString(resp.Cookies()), nil
}

View File

@ -71,9 +71,6 @@ func (d *Cloudreve) Link(ctx context.Context, file model.Obj, args model.LinkArg
if err != nil {
return nil, err
}
if strings.HasPrefix(dUrl, "/api") {
dUrl = d.Address + dUrl
}
return &model.Link{
URL: dUrl,
}, nil

View File

@ -3,6 +3,7 @@ package crypt
import (
"context"
"fmt"
"github.com/alist-org/alist/v3/internal/stream"
"io"
stdpath "path"
"regexp"
@ -13,7 +14,6 @@ import (
"github.com/alist-org/alist/v3/internal/fs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/internal/stream"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
@ -124,9 +124,6 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
if !d.ShowHidden && strings.HasPrefix(name, ".") {
continue
}
objRes := model.Object{
Name: name,
Size: 0,
@ -148,9 +145,6 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
//filter illegal files
continue
}
if !d.ShowHidden && strings.HasPrefix(name, ".") {
continue
}
objRes := model.Object{
Name: name,
Size: size,
@ -160,7 +154,7 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
// discarding hash as it's encrypted
}
if d.Thumbnail && thumb == "" {
thumb = utils.EncodePath(common.GetApiUrl(nil)+stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
thumb = utils.EncodePath(common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
}
if !ok && !d.Thumbnail {
result = append(result, &objRes)
@ -389,11 +383,10 @@ func (d *Crypt) Put(ctx context.Context, dstDir model.Obj, streamer model.FileSt
Modified: streamer.ModTime(),
IsFolder: streamer.IsDir(),
},
Reader: wrappedIn,
Mimetype: "application/octet-stream",
WebPutAsTask: streamer.NeedStore(),
ForceStreamUpload: true,
Exist: streamer.GetExist(),
Reader: wrappedIn,
Mimetype: "application/octet-stream",
WebPutAsTask: streamer.NeedStore(),
Exist: streamer.GetExist(),
}
err = op.Put(ctx, d.remoteStorage, dstDirActualPath, streamOut, up, false)
if err != nil {

View File

@ -21,8 +21,6 @@ type Addition struct {
FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
}
var config = driver.Config{

View File

@ -45,25 +45,7 @@ func (d *Dropbox) Init(ctx context.Context) error {
if result != query {
return fmt.Errorf("failed to check user: %s", string(res))
}
d.RootNamespaceId, err = d.GetRootNamespaceId(ctx)
return err
}
func (d *Dropbox) GetRootNamespaceId(ctx context.Context) (string, error) {
res, err := d.request("/2/users/get_current_account", http.MethodPost, func(req *resty.Request) {
req.SetBody(nil)
})
if err != nil {
return "", err
}
var currentAccountResp CurrentAccountResp
err = utils.Json.Unmarshal(res, &currentAccountResp)
if err != nil {
return "", err
}
rootNamespaceId := currentAccountResp.RootInfo.RootNamespaceId
return rootNamespaceId, nil
return nil
}
func (d *Dropbox) Drop(ctx context.Context) error {

View File

@ -17,8 +17,7 @@ type Addition struct {
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
AccessToken string
RootNamespaceId string
AccessToken string
}
var config = driver.Config{

View File

@ -23,13 +23,6 @@ type RefreshTokenErrorResp struct {
ErrorDescription string `json:"error_description"`
}
type CurrentAccountResp struct {
RootInfo struct {
RootNamespaceId string `json:"root_namespace_id"`
HomeNamespaceId string `json:"home_namespace_id"`
} `json:"root_info"`
}
type File struct {
Tag string `json:".tag"`
Name string `json:"name"`

View File

@ -46,22 +46,12 @@ func (d *Dropbox) refreshToken() error {
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
req := base.RestyClient.R()
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
if d.RootNamespaceId != "" {
apiPathRootJson, err := utils.Json.MarshalToString(map[string]interface{}{
".tag": "root",
"root": d.RootNamespaceId,
})
if err != nil {
return nil, err
}
req.SetHeader("Dropbox-API-Path-Root", apiPathRootJson)
if method == http.MethodPost {
req.SetHeader("Content-Type", "application/json")
}
if callback != nil {
callback(req)
}
if method == http.MethodPost && req.Body != nil {
req.SetHeader("Content-Type", "application/json")
}
var e ErrorResp
req.SetError(&e)
res, err := req.Execute(method, d.base+uri)

View File

@ -39,7 +39,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
if err := d.login(); err != nil {
return nil, err
}
entries, err := d.conn.List(encode(dir.GetPath(), d.Encoding))
entries, err := d.conn.List(dir.GetPath())
if err != nil {
return nil, err
}
@ -49,7 +49,7 @@ func (d *FTP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]m
continue
}
f := model.Object{
Name: decode(entry.Name, d.Encoding),
Name: entry.Name,
Size: int64(entry.Size),
Modified: entry.Time,
IsFolder: entry.Type == ftp.EntryTypeFolder,
@ -64,7 +64,7 @@ func (d *FTP) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*m
return nil, err
}
r := NewFileReader(d.conn, encode(file.GetPath(), d.Encoding), file.GetSize())
r := NewFileReader(d.conn, file.GetPath(), file.GetSize())
link := &model.Link{
MFile: r,
}
@ -75,27 +75,21 @@ func (d *FTP) MakeDir(ctx context.Context, parentDir model.Obj, dirName string)
if err := d.login(); err != nil {
return err
}
return d.conn.MakeDir(encode(stdpath.Join(parentDir.GetPath(), dirName), d.Encoding))
return d.conn.MakeDir(stdpath.Join(parentDir.GetPath(), dirName))
}
func (d *FTP) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
if err := d.login(); err != nil {
return err
}
return d.conn.Rename(
encode(srcObj.GetPath(), d.Encoding),
encode(stdpath.Join(dstDir.GetPath(), srcObj.GetName()), d.Encoding),
)
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(dstDir.GetPath(), srcObj.GetName()))
}
func (d *FTP) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
if err := d.login(); err != nil {
return err
}
return d.conn.Rename(
encode(srcObj.GetPath(), d.Encoding),
encode(stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName), d.Encoding),
)
return d.conn.Rename(srcObj.GetPath(), stdpath.Join(stdpath.Dir(srcObj.GetPath()), newName))
}
func (d *FTP) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
@ -106,11 +100,10 @@ func (d *FTP) Remove(ctx context.Context, obj model.Obj) error {
if err := d.login(); err != nil {
return err
}
path := encode(obj.GetPath(), d.Encoding)
if obj.IsDir() {
return d.conn.RemoveDirRecur(path)
return d.conn.RemoveDirRecur(obj.GetPath())
} else {
return d.conn.Delete(path)
return d.conn.Delete(obj.GetPath())
}
}
@ -119,8 +112,7 @@ func (d *FTP) Put(ctx context.Context, dstDir model.Obj, stream model.FileStream
return err
}
// TODO: support cancel
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
return d.conn.Stor(encode(path, d.Encoding), stream)
return d.conn.Stor(stdpath.Join(dstDir.GetPath(), stream.GetName()), stream)
}
var _ driver.Driver = (*FTP)(nil)

View File

@ -3,28 +3,10 @@ package ftp
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
"github.com/axgle/mahonia"
)
func encode(str string, encoding string) string {
if encoding == "" {
return str
}
encoder := mahonia.NewEncoder(encoding)
return encoder.ConvertString(str)
}
func decode(str string, encoding string) string {
if encoding == "" {
return str
}
decoder := mahonia.NewDecoder(encoding)
return decoder.ConvertString(str)
}
type Addition struct {
Address string `json:"address" required:"true"`
Encoding string `json:"encoding" required:"true"`
Username string `json:"username" required:"true"`
Password string `json:"password" required:"true"`
driver.RootPath

View File

@ -5,6 +5,7 @@ import (
"crypto/x509"
"encoding/pem"
"fmt"
"io/ioutil"
"net/http"
"os"
"regexp"
@ -43,7 +44,7 @@ func (d *GoogleDrive) refreshToken() error {
gdsaFileThis := d.RefreshToken
if gdsaFile.IsDir() {
if len(d.ServiceAccountFileList) <= 0 {
gdsaReadDir, gdsaDirErr := os.ReadDir(d.RefreshToken)
gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
if gdsaDirErr != nil {
log.Error("read dir fail")
return gdsaDirErr
@ -75,7 +76,7 @@ func (d *GoogleDrive) refreshToken() error {
}
}
gdsaFileThisContent, err := os.ReadFile(gdsaFileThis)
gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
if err != nil {
return err
}

View File

@ -58,33 +58,9 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
URL: f.BaseURL + "=d",
}, nil
} else if strings.Contains(f.MimeType, "video/") {
var width, height int
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
switch {
// 1080P
case width == 1920 && height == 1080:
return &model.Link{
URL: f.BaseURL + "=m37",
}, nil
// 720P
case width == 1280 && height == 720:
return &model.Link{
URL: f.BaseURL + "=m22",
}, nil
// 360P
case width == 640 && height == 360:
return &model.Link{
URL: f.BaseURL + "=m18",
}, nil
default:
return &model.Link{
URL: f.BaseURL + "=dv",
}, nil
}
return &model.Link{
URL: f.BaseURL + "=dv",
}, nil
}
return &model.Link{}, nil
}

View File

@ -151,7 +151,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
var resp MediaItem
query := map[string]string{
"fields": "mediaMetadata,baseUrl,mimeType",
"fields": "baseUrl,mimeType",
}
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
req.SetQueryParams(query)

View File

@ -1,406 +0,0 @@
package halalcloud
import (
"context"
"crypto/sha1"
"fmt"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/internal/op"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/city404/v6-public-rpc-proto/go/v6/common"
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
"github.com/rclone/rclone/lib/readers"
"github.com/zzzhr1990/go-common-entity/userfile"
"io"
"net/url"
"path"
"strconv"
"time"
)
type HalalCloud struct {
*HalalCommon
model.Storage
Addition
uploadThread int
}
func (d *HalalCloud) Config() driver.Config {
return config
}
func (d *HalalCloud) GetAddition() driver.Additional {
return &d.Addition
}
func (d *HalalCloud) Init(ctx context.Context) error {
d.uploadThread, _ = strconv.Atoi(d.UploadThread)
if d.uploadThread < 1 || d.uploadThread > 32 {
d.uploadThread, d.UploadThread = 3, "3"
}
if d.HalalCommon == nil {
d.HalalCommon = &HalalCommon{
Common: &Common{},
AuthService: &AuthService{
appID: func() string {
if d.Addition.AppID != "" {
return d.Addition.AppID
}
return AppID
}(),
appVersion: func() string {
if d.Addition.AppVersion != "" {
return d.Addition.AppVersion
}
return AppVersion
}(),
appSecret: func() string {
if d.Addition.AppSecret != "" {
return d.Addition.AppSecret
}
return AppSecret
}(),
tr: &TokenResp{
RefreshToken: d.Addition.RefreshToken,
},
},
UserInfo: &UserInfo{},
refreshTokenFunc: func(token string) error {
d.Addition.RefreshToken = token
op.MustSaveDriverStorage(d)
return nil
},
}
}
// 防止重复登录
if d.Addition.RefreshToken == "" || !d.IsLogin() {
as, err := d.NewAuthServiceWithOauth()
if err != nil {
d.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
return err
}
d.HalalCommon.AuthService = as
d.SetTokenResp(as.tr)
op.MustSaveDriverStorage(d)
}
var err error
d.HalalCommon.serv, err = d.NewAuthService(d.Addition.RefreshToken)
if err != nil {
return err
}
return nil
}
func (d *HalalCloud) Drop(ctx context.Context) error {
return nil
}
func (d *HalalCloud) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
return d.getFiles(ctx, dir)
}
func (d *HalalCloud) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
return d.getLink(ctx, file, args)
}
func (d *HalalCloud) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
return d.makeDir(ctx, parentDir, dirName)
}
func (d *HalalCloud) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
return d.move(ctx, srcObj, dstDir)
}
func (d *HalalCloud) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
return d.rename(ctx, srcObj, newName)
}
func (d *HalalCloud) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
return d.copy(ctx, srcObj, dstDir)
}
func (d *HalalCloud) Remove(ctx context.Context, obj model.Obj) error {
return d.remove(ctx, obj)
}
func (d *HalalCloud) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
return d.put(ctx, dstDir, stream, up)
}
func (d *HalalCloud) IsLogin() bool {
if d.AuthService.tr == nil {
return false
}
serv, err := d.NewAuthService(d.Addition.RefreshToken)
if err != nil {
return false
}
ctx, cancel := context.WithTimeout(context.Background(), time.Second*5)
defer cancel()
result, err := pbPublicUser.NewPubUserClient(serv.GetGrpcConnection()).Get(ctx, &pbPublicUser.User{
Identity: "",
})
if result == nil || err != nil {
return false
}
d.UserInfo.Identity = result.Identity
d.UserInfo.CreateTs = result.CreateTs
d.UserInfo.Name = result.Name
d.UserInfo.UpdateTs = result.UpdateTs
return true
}
type HalalCommon struct {
*Common
*AuthService // 登录信息
*UserInfo // 用户信息
refreshTokenFunc func(token string) error
serv *AuthService
}
func (d *HalalCloud) SetTokenResp(tr *TokenResp) {
d.Addition.RefreshToken = tr.RefreshToken
}
func (d *HalalCloud) getFiles(ctx context.Context, dir model.Obj) ([]model.Obj, error) {
files := make([]model.Obj, 0)
limit := int64(100)
token := ""
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
opDir := d.GetCurrentDir(dir)
for {
result, err := client.List(ctx, &pubUserFile.FileListRequest{
Parent: &pubUserFile.File{Path: opDir},
ListInfo: &common.ScanListRequest{
Limit: limit,
Token: token,
},
})
if err != nil {
return nil, err
}
for i := 0; len(result.Files) > i; i++ {
files = append(files, (*Files)(result.Files[i]))
}
if result.ListInfo == nil || result.ListInfo.Token == "" {
break
}
token = result.ListInfo.Token
}
return files, nil
}
func (d *HalalCloud) getLink(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
client := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection())
ctx1, cancelFunc := context.WithCancel(context.Background())
defer cancelFunc()
result, err := client.ParseFileSlice(ctx1, (*pubUserFile.File)(file.(*Files)))
if err != nil {
return nil, err
}
fileAddrs := []*pubUserFile.SliceDownloadInfo{}
var addressDuration int64
nodesNumber := len(result.RawNodes)
nodesIndex := nodesNumber - 1
startIndex, endIndex := 0, nodesIndex
for nodesIndex >= 0 {
if nodesIndex >= 200 {
endIndex = 200
} else {
endIndex = nodesNumber
}
for ; endIndex <= nodesNumber; endIndex += 200 {
if endIndex == 0 {
endIndex = 1
}
sliceAddress, err := client.GetSliceDownloadAddress(ctx, &pubUserFile.SliceDownloadAddressRequest{
Identity: result.RawNodes[startIndex:endIndex],
Version: 1,
})
if err != nil {
return nil, err
}
addressDuration = sliceAddress.ExpireAt
fileAddrs = append(fileAddrs, sliceAddress.Addresses...)
startIndex = endIndex
nodesIndex -= 200
}
}
size := result.FileSize
chunks := getChunkSizes(result.Sizes)
var finalClosers utils.Closers
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
length := httpRange.Length
if httpRange.Length >= 0 && httpRange.Start+httpRange.Length >= size {
length = -1
}
if err != nil {
return nil, fmt.Errorf("open download file failed: %w", err)
}
oo := &openObject{
ctx: ctx,
d: fileAddrs,
chunk: &[]byte{},
chunks: &chunks,
skip: httpRange.Start,
sha: result.Sha1,
shaTemp: sha1.New(),
}
finalClosers.Add(oo)
return readers.NewLimitedReadCloser(oo, length), nil
}
var duration time.Duration
if addressDuration != 0 {
duration = time.Until(time.UnixMilli(addressDuration))
} else {
duration = time.Until(time.Now().Add(time.Hour))
}
resultRangeReadCloser := &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: finalClosers}
return &model.Link{
RangeReadCloser: resultRangeReadCloser,
Expiration: &duration,
}, nil
}
func (d *HalalCloud) makeDir(ctx context.Context, dir model.Obj, name string) (model.Obj, error) {
newDir := userfile.NewFormattedPath(d.GetCurrentOpDir(dir, []string{name}, 0)).GetPath()
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Create(ctx, &pubUserFile.File{
Path: newDir,
})
return nil, err
}
func (d *HalalCloud) move(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
oldDir := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
newDir := userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath()
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Move(ctx, &pubUserFile.BatchOperationRequest{
Source: []*pubUserFile.File{
{
Identity: obj.GetID(),
Path: oldDir,
},
},
Dest: &pubUserFile.File{
Identity: dir.GetID(),
Path: newDir,
},
})
return nil, err
}
func (d *HalalCloud) rename(ctx context.Context, obj model.Obj, name string) (model.Obj, error) {
id := obj.GetID()
newPath := userfile.NewFormattedPath(d.GetCurrentOpDir(obj, []string{name}, 0)).GetPath()
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Rename(ctx, &pubUserFile.File{
Path: newPath,
Identity: id,
Name: name,
})
return nil, err
}
func (d *HalalCloud) copy(ctx context.Context, obj model.Obj, dir model.Obj) (model.Obj, error) {
id := obj.GetID()
sourcePath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
if len(id) > 0 {
sourcePath = ""
}
dest := &pubUserFile.File{
Identity: dir.GetID(),
Path: userfile.NewFormattedPath(d.GetCurrentDir(dir)).GetPath(),
}
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Copy(ctx, &pubUserFile.BatchOperationRequest{
Source: []*pubUserFile.File{
{
Path: sourcePath,
Identity: id,
},
},
Dest: dest,
})
return nil, err
}
func (d *HalalCloud) remove(ctx context.Context, obj model.Obj) error {
id := obj.GetID()
newPath := userfile.NewFormattedPath(d.GetCurrentDir(obj)).GetPath()
//if len(id) > 0 {
// newPath = ""
//}
_, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).Delete(ctx, &pubUserFile.BatchOperationRequest{
Source: []*pubUserFile.File{
{
Path: newPath,
Identity: id,
},
},
})
return err
}
func (d *HalalCloud) put(ctx context.Context, dstDir model.Obj, fileStream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
newDir := path.Join(dstDir.GetPath(), fileStream.GetName())
result, err := pubUserFile.NewPubUserFileClient(d.HalalCommon.serv.GetGrpcConnection()).CreateUploadToken(ctx, &pubUserFile.File{
Path: newDir,
})
if err != nil {
return nil, err
}
u, _ := url.Parse(result.Endpoint)
u.Host = "s3." + u.Host
result.Endpoint = u.String()
s, err := session.NewSession(&aws.Config{
HTTPClient: base.HttpClient,
Credentials: credentials.NewStaticCredentials(result.AccessKey, result.SecretKey, result.Token),
Region: aws.String(result.Region),
Endpoint: aws.String(result.Endpoint),
S3ForcePathStyle: aws.Bool(true),
})
if err != nil {
return nil, err
}
uploader := s3manager.NewUploader(s, func(u *s3manager.Uploader) {
u.Concurrency = d.uploadThread
})
if fileStream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = fileStream.GetSize() / (s3manager.MaxUploadParts - 1)
}
_, err = uploader.UploadWithContext(ctx, &s3manager.UploadInput{
Bucket: aws.String(result.Bucket),
Key: aws.String(result.Key),
Body: io.TeeReader(fileStream, driver.NewProgress(fileStream.GetSize(), up)),
})
return nil, err
}
var _ driver.Driver = (*HalalCloud)(nil)

View File

@ -1,38 +0,0 @@
package halalcloud
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
// Usually one of two
driver.RootPath
// define other
RefreshToken string `json:"refresh_token" required:"true" help:"login type is refresh_token,this is required"`
UploadThread string `json:"upload_thread" default:"3" help:"1 <= thread <= 32"`
AppID string `json:"app_id" required:"true" default:"devDebugger/1.0"`
AppVersion string `json:"app_version" required:"true" default:"1.0.0"`
AppSecret string `json:"app_secret" required:"true" default:"Nkx3Y2xvZ2luLmNu"`
}
var config = driver.Config{
Name: "HalalCloud",
LocalSort: false,
OnlyLocal: true,
OnlyProxy: true,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "/",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &HalalCloud{}
})
}

View File

@ -1,52 +0,0 @@
package halalcloud
import "google.golang.org/grpc"
func defaultOptions() halalOptions {
return halalOptions{
// onRefreshTokenRefreshed: func(string) {},
grpcOptions: []grpc.DialOption{
grpc.WithDefaultCallOptions(grpc.MaxCallRecvMsgSize(1024 * 1024 * 32)),
// grpc.WithMaxMsgSize(1024 * 1024 * 1024),
},
}
}
type HalalOption interface {
apply(*halalOptions)
}
// halalOptions configure a RPC call. halalOptions are set by the HalalOption
// values passed to Dial.
type halalOptions struct {
onTokenRefreshed func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)
grpcOptions []grpc.DialOption
}
// funcDialOption wraps a function that modifies halalOptions into an
// implementation of the DialOption interface.
type funcDialOption struct {
f func(*halalOptions)
}
func (fdo *funcDialOption) apply(do *halalOptions) {
fdo.f(do)
}
func newFuncDialOption(f func(*halalOptions)) *funcDialOption {
return &funcDialOption{
f: f,
}
}
func WithRefreshTokenRefreshedCallback(s func(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64)) HalalOption {
return newFuncDialOption(func(o *halalOptions) {
o.onTokenRefreshed = s
})
}
func WithGrpcDialOptions(opts ...grpc.DialOption) HalalOption {
return newFuncDialOption(func(o *halalOptions) {
o.grpcOptions = opts
})
}

View File

@ -1,101 +0,0 @@
package halalcloud
import (
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/city404/v6-public-rpc-proto/go/v6/common"
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
"google.golang.org/grpc"
"time"
)
type AuthService struct {
appID string
appVersion string
appSecret string
grpcConnection *grpc.ClientConn
dopts halalOptions
tr *TokenResp
}
type TokenResp struct {
AccessToken string `json:"accessToken,omitempty"`
AccessTokenExpiredAt int64 `json:"accessTokenExpiredAt,omitempty"`
RefreshToken string `json:"refreshToken,omitempty"`
RefreshTokenExpiredAt int64 `json:"refreshTokenExpiredAt,omitempty"`
}
type UserInfo struct {
Identity string `json:"identity,omitempty"`
UpdateTs int64 `json:"updateTs,omitempty"`
Name string `json:"name,omitempty"`
CreateTs int64 `json:"createTs,omitempty"`
}
type OrderByInfo struct {
Field string `json:"field,omitempty"`
Asc bool `json:"asc,omitempty"`
}
type ListInfo struct {
Token string `json:"token,omitempty"`
Limit int64 `json:"limit,omitempty"`
OrderBy []*OrderByInfo `json:"order_by,omitempty"`
Version int32 `json:"version,omitempty"`
}
type FilesList struct {
Files []*Files `json:"files,omitempty"`
ListInfo *common.ScanListRequest `json:"list_info,omitempty"`
}
var _ model.Obj = (*Files)(nil)
type Files pubUserFile.File
func (f *Files) GetSize() int64 {
return f.Size
}
func (f *Files) GetName() string {
return f.Name
}
func (f *Files) ModTime() time.Time {
return time.UnixMilli(f.UpdateTs)
}
func (f *Files) CreateTime() time.Time {
return time.UnixMilli(f.UpdateTs)
}
func (f *Files) IsDir() bool {
return f.Dir
}
func (f *Files) GetHash() utils.HashInfo {
return utils.HashInfo{}
}
func (f *Files) GetID() string {
if len(f.Identity) == 0 {
f.Identity = "/"
}
return f.Identity
}
func (f *Files) GetPath() string {
return f.Path
}
type SteamFile struct {
file model.File
}
func (s *SteamFile) Read(p []byte) (n int, err error) {
return s.file.Read(p)
}
func (s *SteamFile) Close() error {
return s.file.Close()
}

View File

@ -1,385 +0,0 @@
package halalcloud
import (
"bytes"
"context"
"crypto/md5"
"crypto/tls"
"encoding/hex"
"errors"
"fmt"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
pbPublicUser "github.com/city404/v6-public-rpc-proto/go/v6/user"
pubUserFile "github.com/city404/v6-public-rpc-proto/go/v6/userfile"
"github.com/google/uuid"
"github.com/ipfs/go-cid"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/metadata"
"google.golang.org/grpc/status"
"hash"
"io"
"net/http"
"strconv"
"strings"
"sync"
"time"
)
const (
AppID = "devDebugger/1.0"
AppVersion = "1.0.0"
AppSecret = "Nkx3Y2xvZ2luLmNu"
)
const (
grpcServer = "grpcuserapi.2dland.cn:443"
grpcServerAuth = "grpcuserapi.2dland.cn"
)
func (d *HalalCloud) NewAuthServiceWithOauth(options ...HalalOption) (*AuthService, error) {
aService := &AuthService{}
err2 := errors.New("")
svc := d.HalalCommon.AuthService
for _, opt := range options {
opt.apply(&svc.dopts)
}
grpcOptions := svc.dopts.grpcOptions
grpcOptions = append(grpcOptions, grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
ctxx := svc.signContext(method, ctx)
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
return err
}))
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
if err != nil {
return nil, err
}
defer grpcConnection.Close()
userClient := pbPublicUser.NewPubUserClient(grpcConnection)
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer cancel()
stateString := uuid.New().String()
// queryValues.Add("callback", oauthToken.Callback)
oauthToken, err := userClient.CreateAuthToken(ctx, &pbPublicUser.LoginRequest{
ReturnType: 2,
State: stateString,
ReturnUrl: "",
})
if err != nil {
return nil, err
}
if len(oauthToken.State) < 1 {
oauthToken.State = stateString
}
if oauthToken.Url != "" {
return nil, fmt.Errorf(`need verify: <a target="_blank" href="%s">Click Here</a>`, oauthToken.Url)
}
return aService, err2
}
func (d *HalalCloud) NewAuthService(refreshToken string, options ...HalalOption) (*AuthService, error) {
svc := d.HalalCommon.AuthService
if len(refreshToken) < 1 {
refreshToken = d.Addition.RefreshToken
}
if len(d.tr.AccessToken) > 0 {
accessTokenExpiredAt := d.tr.AccessTokenExpiredAt
current := time.Now().UnixMilli()
if accessTokenExpiredAt < current {
// access token expired
d.tr.AccessToken = ""
d.tr.AccessTokenExpiredAt = 0
} else {
svc.tr.AccessTokenExpiredAt = accessTokenExpiredAt
svc.tr.AccessToken = d.tr.AccessToken
}
}
for _, opt := range options {
opt.apply(&svc.dopts)
}
grpcOptions := svc.dopts.grpcOptions
grpcOptions = append(grpcOptions, grpc.WithDefaultCallOptions(grpc.MaxCallSendMsgSize(10*1024*1024), grpc.MaxCallRecvMsgSize(10*1024*1024)), grpc.WithAuthority(grpcServerAuth), grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{})), grpc.WithUnaryInterceptor(func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
ctxx := svc.signContext(method, ctx)
err := invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
if err != nil {
grpcStatus, ok := status.FromError(err)
if ok && grpcStatus.Code() == codes.Unauthenticated && strings.Contains(grpcStatus.Err().Error(), "invalid accesstoken") && len(refreshToken) > 0 {
// refresh token
refreshResponse, err := pbPublicUser.NewPubUserClient(cc).Refresh(ctx, &pbPublicUser.Token{
RefreshToken: refreshToken,
})
if err != nil {
return err
}
if len(refreshResponse.AccessToken) > 0 {
svc.tr.AccessToken = refreshResponse.AccessToken
svc.tr.AccessTokenExpiredAt = refreshResponse.AccessTokenExpireTs
svc.OnAccessTokenRefreshed(refreshResponse.AccessToken, refreshResponse.AccessTokenExpireTs, refreshResponse.RefreshToken, refreshResponse.RefreshTokenExpireTs)
}
// retry
ctxx := svc.signContext(method, ctx)
err = invoker(ctxx, method, req, reply, cc, opts...) // invoking RPC method
if err != nil {
return err
} else {
return nil
}
}
}
return err
}))
grpcConnection, err := grpc.NewClient(grpcServer, grpcOptions...)
if err != nil {
return nil, err
}
svc.grpcConnection = grpcConnection
return svc, err
}
func (s *AuthService) OnAccessTokenRefreshed(accessToken string, accessTokenExpiredAt int64, refreshToken string, refreshTokenExpiredAt int64) {
s.tr.AccessToken = accessToken
s.tr.AccessTokenExpiredAt = accessTokenExpiredAt
s.tr.RefreshToken = refreshToken
s.tr.RefreshTokenExpiredAt = refreshTokenExpiredAt
if s.dopts.onTokenRefreshed != nil {
s.dopts.onTokenRefreshed(accessToken, accessTokenExpiredAt, refreshToken, refreshTokenExpiredAt)
}
}
func (s *AuthService) GetGrpcConnection() *grpc.ClientConn {
return s.grpcConnection
}
func (s *AuthService) Close() {
_ = s.grpcConnection.Close()
}
func (s *AuthService) signContext(method string, ctx context.Context) context.Context {
var kvString []string
currentTimeStamp := strconv.FormatInt(time.Now().UnixMilli(), 10)
bufferedString := bytes.NewBufferString(method)
kvString = append(kvString, "timestamp", currentTimeStamp)
bufferedString.WriteString(currentTimeStamp)
kvString = append(kvString, "appid", AppID)
bufferedString.WriteString(AppID)
kvString = append(kvString, "appversion", AppVersion)
bufferedString.WriteString(AppVersion)
if s.tr != nil && len(s.tr.AccessToken) > 0 {
authorization := "Bearer " + s.tr.AccessToken
kvString = append(kvString, "authorization", authorization)
bufferedString.WriteString(authorization)
}
bufferedString.WriteString(AppSecret)
sign := GetMD5Hash(bufferedString.String())
kvString = append(kvString, "sign", sign)
return metadata.AppendToOutgoingContext(ctx, kvString...)
}
func (d *HalalCloud) GetCurrentOpDir(dir model.Obj, args []string, index int) string {
currentDir := dir.GetPath()
if len(currentDir) == 0 {
currentDir = "/"
}
opPath := currentDir + "/" + args[index]
if strings.HasPrefix(args[index], "/") {
opPath = args[index]
}
return opPath
}
func (d *HalalCloud) GetCurrentDir(dir model.Obj) string {
currentDir := dir.GetPath()
if len(currentDir) == 0 {
currentDir = "/"
}
return currentDir
}
type Common struct {
}
func getRawFiles(addr *pubUserFile.SliceDownloadInfo) ([]byte, error) {
if addr == nil {
return nil, errors.New("addr is nil")
}
client := http.Client{
Timeout: time.Duration(60 * time.Second), // Set timeout to 5 seconds
}
resp, err := client.Get(addr.DownloadAddress)
if err != nil {
return nil, err
}
defer resp.Body.Close()
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("bad status: %s, body: %s", resp.Status, body)
}
if addr.Encrypt > 0 {
cd := uint8(addr.Encrypt)
for idx := 0; idx < len(body); idx++ {
body[idx] = body[idx] ^ cd
}
}
if addr.StoreType != 10 {
sourceCid, err := cid.Decode(addr.Identity)
if err != nil {
return nil, err
}
checkCid, err := sourceCid.Prefix().Sum(body)
if err != nil {
return nil, err
}
if !checkCid.Equals(sourceCid) {
return nil, fmt.Errorf("bad cid: %s, body: %s", checkCid.String(), body)
}
}
return body, nil
}
type openObject struct {
ctx context.Context
mu sync.Mutex
d []*pubUserFile.SliceDownloadInfo
id int
skip int64
chunk *[]byte
chunks *[]chunkSize
closed bool
sha string
shaTemp hash.Hash
}
// get the next chunk
func (oo *openObject) getChunk(ctx context.Context) (err error) {
if oo.id >= len(*oo.chunks) {
return io.EOF
}
var chunk []byte
err = utils.Retry(3, time.Second, func() (err error) {
chunk, err = getRawFiles(oo.d[oo.id])
return err
})
if err != nil {
return err
}
oo.id++
oo.chunk = &chunk
return nil
}
// Read reads up to len(p) bytes into p.
func (oo *openObject) Read(p []byte) (n int, err error) {
oo.mu.Lock()
defer oo.mu.Unlock()
if oo.closed {
return 0, fmt.Errorf("read on closed file")
}
// Skip data at the start if requested
for oo.skip > 0 {
//size := 1024 * 1024
_, size, err := oo.ChunkLocation(oo.id)
if err != nil {
return 0, err
}
if oo.skip < int64(size) {
break
}
oo.id++
oo.skip -= int64(size)
}
if len(*oo.chunk) == 0 {
err = oo.getChunk(oo.ctx)
if err != nil {
return 0, err
}
if oo.skip > 0 {
*oo.chunk = (*oo.chunk)[oo.skip:]
oo.skip = 0
}
}
n = copy(p, *oo.chunk)
*oo.chunk = (*oo.chunk)[n:]
oo.shaTemp.Write(*oo.chunk)
return n, nil
}
// Close closed the file - MAC errors are reported here
func (oo *openObject) Close() (err error) {
oo.mu.Lock()
defer oo.mu.Unlock()
if oo.closed {
return nil
}
// 校验Sha1
if string(oo.shaTemp.Sum(nil)) != oo.sha {
return fmt.Errorf("failed to finish download: %w", err)
}
oo.closed = true
return nil
}
func GetMD5Hash(text string) string {
tHash := md5.Sum([]byte(text))
return hex.EncodeToString(tHash[:])
}
// chunkSize describes a size and position of chunk
type chunkSize struct {
position int64
size int
}
func getChunkSizes(sliceSize []*pubUserFile.SliceSize) (chunks []chunkSize) {
chunks = make([]chunkSize, 0)
for _, s := range sliceSize {
// 对最后一个做特殊处理
if s.EndIndex == 0 {
s.EndIndex = s.StartIndex
}
for j := s.StartIndex; j <= s.EndIndex; j++ {
chunks = append(chunks, chunkSize{position: j, size: int(s.Size)})
}
}
return chunks
}
func (oo *openObject) ChunkLocation(id int) (position int64, size int, err error) {
if id < 0 || id >= len(*oo.chunks) {
return 0, 0, errors.New("invalid arguments")
}
return (*oo.chunks)[id].position, (*oo.chunks)[id].size, nil
}

View File

@ -1,385 +0,0 @@
package template
import (
"context"
"crypto/md5"
"encoding/base64"
"encoding/hex"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type ILanZou struct {
model.Storage
Addition
userID string
account string
upClient *resty.Client
conf Conf
config driver.Config
}
func (d *ILanZou) Config() driver.Config {
return d.config
}
func (d *ILanZou) GetAddition() driver.Additional {
return &d.Addition
}
func (d *ILanZou) Init(ctx context.Context) error {
d.upClient = base.NewRestyClient().SetTimeout(time.Minute * 10)
if d.UUID == "" {
res, err := d.unproved("/getUuid", http.MethodGet, nil)
if err != nil {
return err
}
d.UUID = utils.Json.Get(res, "uuid").ToString()
}
res, err := d.proved("/user/account/map", http.MethodGet, nil)
if err != nil {
return err
}
d.userID = utils.Json.Get(res, "map", "userId").ToString()
d.account = utils.Json.Get(res, "map", "account").ToString()
log.Debugf("[ilanzou] init response: %s", res)
return nil
}
func (d *ILanZou) Drop(ctx context.Context) error {
return nil
}
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var res []ListItem
for {
var resp ListResp
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
params := []string{
"offset=1",
"limit=60",
"folderId=" + dir.GetID(),
"type=0",
}
queryString := strings.Join(params, "&")
req.SetQueryString(queryString).SetResult(&resp)
})
if err != nil {
return nil, err
}
res = append(res, resp.List...)
if resp.TotalPage <= resp.Offset {
break
}
}
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
if err != nil {
return nil, err
}
obj := model.Object{
ID: strconv.FormatInt(f.FileId, 10),
//Path: "",
Name: f.FileName,
Size: f.FileSize * 1024,
Modified: updTime,
Ctime: updTime,
IsFolder: false,
//HashInfo: utils.HashInfo{},
}
if f.FileType == 2 {
obj.IsFolder = true
obj.Size = 0
obj.ID = strconv.FormatInt(f.FolderId, 10)
obj.Name = f.FolderName
}
return &obj, nil
})
}
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
u, err := url.Parse(d.conf.base + "/" + d.conf.unproved + "/file/redirect")
if err != nil {
return nil, err
}
ts, ts_str, err := getTimestamp(d.conf.secret)
params := []string{
"uuid=" + url.QueryEscape(d.UUID),
"devType=6",
"devCode=" + url.QueryEscape(d.UUID),
"devModel=chrome",
"devVersion=" + url.QueryEscape(d.conf.devVersion),
"appVersion=",
"timestamp=" + ts_str,
"appToken=" + url.QueryEscape(d.Token),
"enable=0",
}
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
if err != nil {
return nil, err
}
params = append(params, "downloadId="+url.QueryEscape(hex.EncodeToString(downloadId)))
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), ts)), d.conf.secret)
if err != nil {
return nil, err
}
params = append(params, "auth="+url.QueryEscape(hex.EncodeToString(auth)))
u.RawQuery = strings.Join(params, "&")
realURL := u.String()
// get the url after redirect
res, err := base.NoRedirectClient.R().SetHeaders(map[string]string{
//"Origin": d.conf.site,
"Referer": d.conf.site + "/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
}).Get(realURL)
if err != nil {
return nil, err
}
if res.StatusCode() == 302 {
realURL = res.Header().Get("location")
} else {
return nil, fmt.Errorf("redirect failed, status: %d, msg: %s", res.StatusCode(), utils.Json.Get(res.Body(), "msg").ToString())
}
link := model.Link{URL: realURL}
return &link, nil
}
func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
res, err := d.proved("/file/folder/save", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderDesc": "",
"folderId": parentDir.GetID(),
"folderName": dirName,
})
})
if err != nil {
return nil, err
}
return &model.Object{
ID: utils.Json.Get(res, "list", 0, "id").ToString(),
//Path: "",
Name: dirName,
Size: 0,
Modified: time.Now(),
Ctime: time.Now(),
IsFolder: true,
//HashInfo: utils.HashInfo{},
}, nil
}
func (d *ILanZou) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var fileIds, folderIds []string
if srcObj.IsDir() {
folderIds = []string{srcObj.GetID()}
} else {
fileIds = []string{srcObj.GetID()}
}
_, err := d.proved("/file/folder/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderIds": strings.Join(folderIds, ","),
"fileIds": strings.Join(fileIds, ","),
"targetId": dstDir.GetID(),
})
})
if err != nil {
return nil, err
}
return srcObj, nil
}
func (d *ILanZou) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
var err error
if srcObj.IsDir() {
_, err = d.proved("/file/folder/edit", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderDesc": "",
"folderId": srcObj.GetID(),
"folderName": newName,
})
})
} else {
_, err = d.proved("/file/edit", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileDesc": "",
"fileId": srcObj.GetID(),
"fileName": newName,
})
})
}
if err != nil {
return nil, err
}
return &model.Object{
ID: srcObj.GetID(),
//Path: "",
Name: newName,
Size: srcObj.GetSize(),
Modified: time.Now(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *ILanZou) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// TODO copy obj, optional
return nil, errs.NotImplement
}
func (d *ILanZou) Remove(ctx context.Context, obj model.Obj) error {
var fileIds, folderIds []string
if obj.IsDir() {
folderIds = []string{obj.GetID()}
} else {
fileIds = []string{obj.GetID()}
}
_, err := d.proved("/file/delete", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"folderIds": strings.Join(folderIds, ","),
"fileIds": strings.Join(fileIds, ","),
"status": 0,
})
})
return err
}
const DefaultPartSize = 1024 * 1024 * 8
func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
h := md5.New()
// need to calculate md5 of the full content
tempFile, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
defer func() {
_ = tempFile.Close()
}()
if _, err = utils.CopyWithBuffer(h, tempFile); err != nil {
return nil, err
}
_, err = tempFile.Seek(0, io.SeekStart)
if err != nil {
return nil, err
}
etag := hex.EncodeToString(h.Sum(nil))
// get upToken
res, err := d.proved("/7n/getUpToken", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"fileId": "",
"fileName": stream.GetName(),
"fileSize": stream.GetSize() / 1024,
"folderId": dstDir.GetID(),
"md5": etag,
"type": 1,
})
})
if err != nil {
return nil, err
}
upToken := utils.Json.Get(res, "upToken").ToString()
now := time.Now()
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
var token string
if stream.GetSize() <= DefaultPartSize {
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
"token": upToken,
"key": key,
"fname": stream.GetName(),
}).SetMultipartField("file", stream.GetName(), stream.GetMimetype(), tempFile).
Post("https://upload.qiniup.com/")
if err != nil {
return nil, err
}
token = utils.Json.Get(res.Body(), "token").ToString()
} else {
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
res, err := d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads", d.conf.bucket, keyBase64))
if err != nil {
return nil, err
}
uploadId := utils.Json.Get(res.Body(), "uploadId").ToString()
parts := make([]Part, 0)
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
for i := 1; i <= int(partNum); i++ {
u := fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s/%d", d.conf.bucket, keyBase64, uploadId, i)
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
if err != nil {
return nil, err
}
etag := utils.Json.Get(res.Body(), "etag").ToString()
parts = append(parts, Part{
PartNumber: i,
ETag: etag,
})
}
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(base.Json{
"fnmae": stream.GetName(),
"parts": parts,
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s", d.conf.bucket, keyBase64, uploadId))
if err != nil {
return nil, err
}
token = utils.Json.Get(res.Body(), "token").ToString()
}
// commit upload
var resp UploadResultResp
for i := 0; i < 10; i++ {
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
params := []string{
"tokenList=" + token,
"tokenTime=" + time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
}
queryString := strings.Join(params, "&")
req.SetQueryString(queryString).SetResult(&resp)
})
if err != nil {
return nil, err
}
if len(resp.List) == 0 {
return nil, fmt.Errorf("upload failed, empty response")
}
if resp.List[0].Status == 1 {
break
}
time.Sleep(time.Second * 1)
}
file := resp.List[0]
if file.Status != 1 {
return nil, fmt.Errorf("upload failed, status: %d", resp.List[0].Status)
}
return &model.Object{
ID: strconv.FormatInt(file.FileId, 10),
//Path: ,
Name: file.FileName,
Size: stream.GetSize(),
Modified: stream.ModTime(),
Ctime: stream.CreateTime(),
IsFolder: false,
HashInfo: utils.NewHashInfo(utils.MD5, etag),
}, nil
}
//func (d *ILanZou) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*ILanZou)(nil)

View File

@ -1,80 +0,0 @@
package template
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
driver.RootID
Username string `json:"username" type:"string" required:"true"`
Password string `json:"password" type:"string" required:"true"`
Token string
UUID string
}
type Conf struct {
base string
secret []byte
bucket string
unproved string
proved string
devVersion string
site string
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &ILanZou{
config: driver.Config{
Name: "ILanZou",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "0",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
},
conf: Conf{
base: "https://api.ilanzou.com",
secret: []byte("lanZouY-disk-app"),
bucket: "wpanstore-lanzou",
unproved: "unproved",
proved: "proved",
devVersion: "125",
site: "https://www.ilanzou.com",
},
}
})
op.RegisterDriver(func() driver.Driver {
return &ILanZou{
config: driver.Config{
Name: "FeijiPan",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "0",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: false,
},
conf: Conf{
base: "https://api.feijipan.com",
secret: []byte("dingHao-disk-app"),
bucket: "wpanstore",
unproved: "ws",
proved: "app",
devVersion: "125",
site: "https://www.feijipan.com",
},
}
})
}

View File

@ -1,57 +0,0 @@
package template
type ListResp struct {
Msg string `json:"msg"`
Total int `json:"total"`
Code int `json:"code"`
Offset int `json:"offset"`
TotalPage int `json:"totalPage"`
Limit int `json:"limit"`
List []ListItem `json:"list"`
}
type ListItem struct {
IconId int `json:"iconId"`
IsAmt int `json:"isAmt"`
FolderDesc string `json:"folderDesc,omitempty"`
AddTime string `json:"addTime"`
FolderId int64 `json:"folderId"`
ParentId int64 `json:"parentId"`
ParentName string `json:"parentName"`
NoteType int `json:"noteType,omitempty"`
UpdTime string `json:"updTime"`
IsShare int `json:"isShare"`
FolderIcon string `json:"folderIcon,omitempty"`
FolderName string `json:"folderName,omitempty"`
FileType int `json:"fileType"`
Status int `json:"status"`
IsFileShare int `json:"isFileShare,omitempty"`
FileName string `json:"fileName,omitempty"`
FileStars float64 `json:"fileStars,omitempty"`
IsFileDownload int `json:"isFileDownload,omitempty"`
FileComments int `json:"fileComments,omitempty"`
FileSize int64 `json:"fileSize,omitempty"`
FileIcon string `json:"fileIcon,omitempty"`
FileDownloads int `json:"fileDownloads,omitempty"`
FileUrl interface{} `json:"fileUrl"`
FileLikes int `json:"fileLikes,omitempty"`
FileId int64 `json:"fileId,omitempty"`
}
type Part struct {
PartNumber int `json:"partNumber"`
ETag string `json:"etag"`
}
type UploadResultResp struct {
Msg string `json:"msg"`
Code int `json:"code"`
List []struct {
FileIconId int `json:"fileIconId"`
FileName string `json:"fileName"`
FileIcon string `json:"fileIcon"`
FileId int64 `json:"fileId"`
Status int `json:"status"`
Token string `json:"token"`
} `json:"list"`
}

View File

@ -1,111 +0,0 @@
package template
import (
"encoding/hex"
"fmt"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/drivers/base"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/foxxorcat/mopan-sdk-go"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
func (d *ILanZou) login() error {
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
req.SetBody(base.Json{
"loginName": d.Username,
"loginPwd": d.Password,
})
})
if err != nil {
return err
}
d.Token = utils.Json.Get(res, "data", "appToken").ToString()
if d.Token == "" {
return fmt.Errorf("failed to login: token is empty, resp: %s", res)
}
return nil
}
func getTimestamp(secret []byte) (int64, string, error) {
ts := time.Now().UnixMilli()
tsStr := strconv.FormatInt(ts, 10)
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
if err != nil {
return 0, "", err
}
return ts, hex.EncodeToString(res), nil
}
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
_, ts_str, err := getTimestamp(d.conf.secret)
if err != nil {
return nil, err
}
params := []string{
"uuid=" + url.QueryEscape(d.UUID),
"devType=6",
"devCode=" + url.QueryEscape(d.UUID),
"devModel=chrome",
"devVersion=" + url.QueryEscape(d.conf.devVersion),
"appVersion=",
"timestamp=" + ts_str,
}
if proved {
params = append(params, "appToken="+url.QueryEscape(d.Token))
}
params = append(params, "extra=2")
queryString := strings.Join(params, "&")
req := base.RestyClient.R()
req.SetHeaders(map[string]string{
"Origin": d.conf.site,
"Referer": d.conf.site + "/",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/125.0.0.0 Safari/537.36 Edg/125.0.0.0",
})
if callback != nil {
callback(req)
}
res, err := req.Execute(method, d.conf.base+pathname+"?"+queryString)
if err != nil {
if res != nil {
log.Errorf("[iLanZou] request error: %s", res.String())
}
return nil, err
}
isRetry := len(retry) > 0 && retry[0]
body := res.Body()
code := utils.Json.Get(body, "code").ToInt()
msg := utils.Json.Get(body, "msg").ToString()
if code != 200 {
if !isRetry && proved && (utils.SliceContains([]int{-1, -2}, code) || d.Token == "") {
err = d.login()
if err != nil {
return nil, err
}
return d.request(pathname, method, callback, proved, true)
}
return nil, fmt.Errorf("%d: %s", code, msg)
}
return body, nil
}
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
return d.request("/"+d.conf.unproved+pathname, method, callback, false)
}
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
return d.request("/"+d.conf.proved+pathname, method, callback, true)
}

View File

@ -62,7 +62,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
for _, file := range dirs {
gateurl := *d.gateURL
gateurl.Path = "ipfs/" + file.Hash
gateurl.RawQuery = "filename=" + url.PathEscape(file.Name)
gateurl.RawQuery = "filename=" + file.Name
objlist = append(objlist, &model.ObjectURL{
Object: model.Object{ID: file.Hash, Name: file.Name, Size: int64(file.Size), IsFolder: file.Type == 1},
Url: model.Url{Url: gateurl.String()},
@ -73,7 +73,7 @@ func (d *IPFS) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]
}
func (d *IPFS) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + url.PathEscape(file.GetName())
link := d.Gateway + "/ipfs/" + file.GetID() + "/?filename=" + file.GetName()
return &model.Link{URL: link}, nil
}

View File

@ -1,8 +0,0 @@
// +build linux darwin windows
// +build amd64 arm64
package drivers
import (
_ "github.com/alist-org/alist/v3/drivers/lark"
)

View File

@ -1,397 +0,0 @@
package lark
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"strconv"
"strings"
"time"
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/errs"
"github.com/alist-org/alist/v3/internal/model"
lark "github.com/larksuite/oapi-sdk-go/v3"
larkcore "github.com/larksuite/oapi-sdk-go/v3/core"
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
"golang.org/x/time/rate"
)
type Lark struct {
model.Storage
Addition
client *lark.Client
rootFolderToken string
}
func (c *Lark) Config() driver.Config {
return config
}
func (c *Lark) GetAddition() driver.Additional {
return &c.Addition
}
func (c *Lark) Init(ctx context.Context) error {
c.client = lark.NewClient(c.AppId, c.AppSecret, lark.WithTokenCache(newTokenCache()))
paths := strings.Split(c.RootFolderPath, "/")
token := ""
var ok bool
var file *larkdrive.File
for _, p := range paths {
if p == "" {
token = ""
continue
}
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
if err != nil {
return err
}
for {
ok, file, err = resp.Next()
if !ok {
return errs.ObjectNotFound
}
if err != nil {
return err
}
if *file.Type == "folder" && *file.Name == p {
token = *file.Token
break
}
}
}
c.rootFolderToken = token
return nil
}
func (c *Lark) Drop(ctx context.Context) error {
return nil
}
func (c *Lark) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
token, ok := c.getObjToken(ctx, dir.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
if token == emptyFolderToken {
return nil, nil
}
resp, err := c.client.Drive.File.ListByIterator(ctx, larkdrive.NewListFileReqBuilder().FolderToken(token).Build())
if err != nil {
return nil, err
}
ok = false
var file *larkdrive.File
var res []model.Obj
for {
ok, file, err = resp.Next()
if !ok {
break
}
if err != nil {
return nil, err
}
modifiedUnix, _ := strconv.ParseInt(*file.ModifiedTime, 10, 64)
createdUnix, _ := strconv.ParseInt(*file.CreatedTime, 10, 64)
f := model.Object{
ID: *file.Token,
Path: strings.Join([]string{c.RootFolderPath, dir.GetPath(), *file.Name}, "/"),
Name: *file.Name,
Size: 0,
Modified: time.Unix(modifiedUnix, 0),
Ctime: time.Unix(createdUnix, 0),
IsFolder: *file.Type == "folder",
}
res = append(res, &f)
}
return res, nil
}
func (c *Lark) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
token, ok := c.getObjToken(ctx, file.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
resp, err := c.client.GetTenantAccessTokenBySelfBuiltApp(ctx, &larkcore.SelfBuiltTenantAccessTokenReq{
AppID: c.AppId,
AppSecret: c.AppSecret,
})
if err != nil {
return nil, err
}
if !c.ExternalMode {
accessToken := resp.TenantAccessToken
url := fmt.Sprintf("https://open.feishu.cn/open-apis/drive/v1/files/%s/download", token)
req, err := http.NewRequest(http.MethodGet, url, nil)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", accessToken))
req.Header.Set("Range", "bytes=0-1")
ar, err := http.DefaultClient.Do(req)
if err != nil {
return nil, err
}
if ar.StatusCode != http.StatusPartialContent {
return nil, errors.New("failed to get download link")
}
return &model.Link{
URL: url,
Header: http.Header{
"Authorization": []string{fmt.Sprintf("Bearer %s", accessToken)},
},
}, nil
} else {
url := strings.Join([]string{c.TenantUrlPrefix, "file", token}, "/")
return &model.Link{
URL: url,
}, nil
}
}
func (c *Lark) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
token, ok := c.getObjToken(ctx, parentDir.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
body, err := larkdrive.NewCreateFolderFilePathReqBodyBuilder().FolderToken(token).Name(dirName).Build()
if err != nil {
return nil, err
}
resp, err := c.client.Drive.File.CreateFolder(ctx,
larkdrive.NewCreateFolderFileReqBuilder().Body(body).Build())
if err != nil {
return nil, err
}
if !resp.Success() {
return nil, errors.New(resp.Error())
}
return &model.Object{
ID: *resp.Data.Token,
Path: strings.Join([]string{c.RootFolderPath, parentDir.GetPath(), dirName}, "/"),
Name: dirName,
Size: 0,
IsFolder: true,
}, nil
}
func (c *Lark) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
req := larkdrive.NewMoveFileReqBuilder().
Body(larkdrive.NewMoveFileReqBodyBuilder().
Type("file").
FolderToken(dstDirToken).
Build()).FileToken(srcToken).
Build()
// 发起请求
resp, err := c.client.Drive.File.Move(ctx, req)
if err != nil {
return nil, err
}
if !resp.Success() {
return nil, errors.New(resp.Error())
}
return nil, nil
}
func (c *Lark) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
// TODO rename obj, optional
return nil, errs.NotImplement
}
func (c *Lark) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
srcToken, ok := c.getObjToken(ctx, srcObj.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
dstDirToken, ok := c.getObjToken(ctx, dstDir.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
req := larkdrive.NewCopyFileReqBuilder().
Body(larkdrive.NewCopyFileReqBodyBuilder().
Name(srcObj.GetName()).
Type("file").
FolderToken(dstDirToken).
Build()).FileToken(srcToken).
Build()
// 发起请求
resp, err := c.client.Drive.File.Copy(ctx, req)
if err != nil {
return nil, err
}
if !resp.Success() {
return nil, errors.New(resp.Error())
}
return nil, nil
}
func (c *Lark) Remove(ctx context.Context, obj model.Obj) error {
token, ok := c.getObjToken(ctx, obj.GetPath())
if !ok {
return errs.ObjectNotFound
}
req := larkdrive.NewDeleteFileReqBuilder().
FileToken(token).
Type("file").
Build()
// 发起请求
resp, err := c.client.Drive.File.Delete(ctx, req)
if err != nil {
return err
}
if !resp.Success() {
return errors.New(resp.Error())
}
return nil
}
var uploadLimit = rate.NewLimiter(rate.Every(time.Second), 5)
func (c *Lark) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
token, ok := c.getObjToken(ctx, dstDir.GetPath())
if !ok {
return nil, errs.ObjectNotFound
}
// prepare
req := larkdrive.NewUploadPrepareFileReqBuilder().
FileUploadInfo(larkdrive.NewFileUploadInfoBuilder().
FileName(stream.GetName()).
ParentType(`explorer`).
ParentNode(token).
Size(int(stream.GetSize())).
Build()).
Build()
// 发起请求
uploadLimit.Wait(ctx)
resp, err := c.client.Drive.File.UploadPrepare(ctx, req)
if err != nil {
return nil, err
}
if !resp.Success() {
return nil, errors.New(resp.Error())
}
uploadId := *resp.Data.UploadId
blockSize := *resp.Data.BlockSize
blockCount := *resp.Data.BlockNum
// upload
for i := 0; i < blockCount; i++ {
length := int64(blockSize)
if i == blockCount-1 {
length = stream.GetSize() - int64(i*blockSize)
}
reader := io.LimitReader(stream, length)
req := larkdrive.NewUploadPartFileReqBuilder().
Body(larkdrive.NewUploadPartFileReqBodyBuilder().
UploadId(uploadId).
Seq(i).
Size(int(length)).
File(reader).
Build()).
Build()
// 发起请求
uploadLimit.Wait(ctx)
resp, err := c.client.Drive.File.UploadPart(ctx, req)
if err != nil {
return nil, err
}
if !resp.Success() {
return nil, errors.New(resp.Error())
}
up(float64(i) / float64(blockCount))
}
//close
closeReq := larkdrive.NewUploadFinishFileReqBuilder().
Body(larkdrive.NewUploadFinishFileReqBodyBuilder().
UploadId(uploadId).
BlockNum(blockCount).
Build()).
Build()
// 发起请求
closeResp, err := c.client.Drive.File.UploadFinish(ctx, closeReq)
if err != nil {
return nil, err
}
if !closeResp.Success() {
return nil, errors.New(closeResp.Error())
}
return &model.Object{
ID: *closeResp.Data.FileToken,
}, nil
}
//func (d *Lark) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*Lark)(nil)

View File

@ -1,36 +0,0 @@
package lark
import (
"github.com/alist-org/alist/v3/internal/driver"
"github.com/alist-org/alist/v3/internal/op"
)
type Addition struct {
// Usually one of two
driver.RootPath
// define other
AppId string `json:"app_id" type:"text" help:"app id"`
AppSecret string `json:"app_secret" type:"text" help:"app secret"`
ExternalMode bool `json:"external_mode" type:"bool" help:"external mode"`
TenantUrlPrefix string `json:"tenant_url_prefix" type:"text" help:"tenant url prefix"`
}
var config = driver.Config{
Name: "Lark",
LocalSort: false,
OnlyLocal: false,
OnlyProxy: false,
NoCache: false,
NoUpload: false,
NeedMs: false,
DefaultRoot: "/",
CheckStatus: false,
Alert: "",
NoOverwriteUpload: true,
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Lark{}
})
}

View File

@ -1,32 +0,0 @@
package lark
import (
"context"
"github.com/Xhofe/go-cache"
"time"
)
type TokenCache struct {
cache.ICache[string]
}
func (t *TokenCache) Set(_ context.Context, key string, value string, expireTime time.Duration) error {
t.ICache.Set(key, value, cache.WithEx[string](expireTime))
return nil
}
func (t *TokenCache) Get(_ context.Context, key string) (string, error) {
v, ok := t.ICache.Get(key)
if ok {
return v, nil
}
return "", nil
}
func newTokenCache() *TokenCache {
c := cache.NewMemCache[string]()
return &TokenCache{c}
}

View File

@ -1,66 +0,0 @@
package lark
import (
"context"
"github.com/Xhofe/go-cache"
larkdrive "github.com/larksuite/oapi-sdk-go/v3/service/drive/v1"
log "github.com/sirupsen/logrus"
"path"
"time"
)
const objTokenCacheDuration = 5 * time.Minute
const emptyFolderToken = "empty"
var objTokenCache = cache.NewMemCache[string]()
var exOpts = cache.WithEx[string](objTokenCacheDuration)
func (c *Lark) getObjToken(ctx context.Context, folderPath string) (string, bool) {
if token, ok := objTokenCache.Get(folderPath); ok {
return token, true
}
dir, name := path.Split(folderPath)
// strip the last slash of dir if it exists
if len(dir) > 0 && dir[len(dir)-1] == '/' {
dir = dir[:len(dir)-1]
}
if name == "" {
return c.rootFolderToken, true
}
var parentToken string
var found bool
parentToken, found = c.getObjToken(ctx, dir)
if !found {
return emptyFolderToken, false
}
req := larkdrive.NewListFileReqBuilder().FolderToken(parentToken).Build()
resp, err := c.client.Drive.File.ListByIterator(ctx, req)
if err != nil {
log.WithError(err).Error("failed to list files")
return emptyFolderToken, false
}
var file *larkdrive.File
for {
found, file, err = resp.Next()
if !found {
break
}
if err != nil {
log.WithError(err).Error("failed to get next file")
break
}
if *file.Name == name {
objTokenCache.Set(folderPath, *file.Token, exOpts)
return *file.Token, true
}
}
return emptyFolderToken, false
}

View File

@ -21,7 +21,7 @@ import (
"github.com/alist-org/alist/v3/internal/sign"
"github.com/alist-org/alist/v3/pkg/utils"
"github.com/alist-org/alist/v3/server/common"
"github.com/alist-org/times"
"github.com/djherbis/times"
log "github.com/sirupsen/logrus"
_ "golang.org/x/image/webp"
)
@ -257,18 +257,10 @@ func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
var err error
if utils.SliceContains([]string{"", "delete permanently"}, d.RecycleBinPath) {
if obj.IsDir() {
err = os.RemoveAll(obj.GetPath())
} else {
err = os.Remove(obj.GetPath())
}
if obj.IsDir() {
err = os.RemoveAll(obj.GetPath())
} else {
dstPath := filepath.Join(d.RecycleBinPath, obj.GetName())
if utils.Exists(dstPath) {
dstPath = filepath.Join(d.RecycleBinPath, obj.GetName()+"_"+time.Now().Format("20060102150405"))
}
err = os.Rename(obj.GetPath(), dstPath)
err = os.Remove(obj.GetPath())
}
if err != nil {
return err

View File

@ -11,7 +11,6 @@ type Addition struct {
ThumbCacheFolder string `json:"thumb_cache_folder"`
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
MkdirPerm string `json:"mkdir_perm" default:"777"`
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
}
var config = driver.Config{

View File

@ -188,9 +188,6 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
_ = tempFile.Close()
}()
uploader := s3manager.NewUploader(s)
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
}
input := &s3manager.UploadInput{
Bucket: &resp.Data.Bucket,
Key: &resp.Data.Object,
@ -206,7 +203,7 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
return err
}
h := md5.New()
_, err = utils.CopyWithBuffer(h, tempFile)
_, err = io.Copy(h, tempFile)
if err != nil {
return err
}

View File

@ -8,7 +8,6 @@ import (
"time"
"github.com/alist-org/alist/v3/pkg/http_range"
"github.com/pquerna/otp/totp"
"github.com/rclone/rclone/lib/readers"
"github.com/alist-org/alist/v3/internal/driver"
@ -34,16 +33,8 @@ func (d *Mega) GetAddition() driver.Additional {
}
func (d *Mega) Init(ctx context.Context) error {
var twoFACode = d.TwoFACode
d.c = mega.New()
if d.TwoFASecret != "" {
code, err := totp.GenerateCode(d.TwoFASecret, time.Now())
if err != nil {
return fmt.Errorf("generate totp code failed: %w", err)
}
twoFACode = code
}
return d.c.MultiFactorLogin(d.Email, d.Password, twoFACode)
return d.c.Login(d.Email, d.Password)
}
func (d *Mega) Drop(ctx context.Context) error {

View File

@ -9,10 +9,8 @@ type Addition struct {
// Usually one of two
//driver.RootPath
//driver.RootID
Email string `json:"email" required:"true"`
Password string `json:"password" required:"true"`
TwoFACode string `json:"two_fa_code" required:"false" help:"2FA 6-digit code, filling in the 2FA code alone will not support reloading driver"`
TwoFASecret string `json:"two_fa_secret" required:"false" help:"2FA secret"`
Email string `json:"email" required:"true"`
Password string `json:"password" required:"true"`
}
var config = driver.Config{

View File

@ -43,31 +43,23 @@ func (d *MoPan) Init(ctx context.Context) error {
if d.uploadThread < 1 || d.uploadThread > 32 {
d.uploadThread, d.UploadThread = 3, "3"
}
defer func() { d.SMSCode = "" }()
login := func() (err error) {
var loginData *mopan.LoginResp
if d.SMSCode != "" {
loginData, err = d.client.LoginBySmsStep2(d.Phone, d.SMSCode)
} else {
loginData, err = d.client.Login(d.Phone, d.Password)
}
login := func() error {
data, err := d.client.Login(d.Phone, d.Password)
if err != nil {
return err
}
d.client.SetAuthorization(loginData.Token)
d.client.SetAuthorization(data.Token)
info, err := d.client.GetUserInfo()
if err != nil {
return err
}
d.userID = info.UserID
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, loginData.UserCloudStorageRelations)
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, data.UserCloudStorageRelations)
cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
if d.RootFolderID == "" {
for _, userCloudStorage := range loginData.UserCloudStorageRelations {
for _, userCloudStorage := range data.UserCloudStorageRelations {
if userCloudStorage.Path == "/文件" {
d.RootFolderID = userCloudStorage.FolderID
}
@ -84,20 +76,8 @@ func (d *MoPan) Init(ctx context.Context) error {
op.MustSaveDriverStorage(d)
}
return err
})
var deviceInfo mopan.DeviceInfo
if strings.TrimSpace(d.DeviceInfo) != "" && utils.Json.UnmarshalFromString(d.DeviceInfo, &deviceInfo) == nil {
d.client.SetDeviceInfo(&deviceInfo)
}
d.DeviceInfo, _ = utils.Json.MarshalToString(d.client.GetDeviceInfo())
if strings.Contains(d.SMSCode, "send") {
if _, err := d.client.LoginBySms(d.Phone); err != nil {
return err
}
return errors.New("please enter the SMS code")
}
}).SetDeviceInfo(d.DeviceInfo)
d.DeviceInfo = d.client.GetDeviceInfo()
return login()
}
@ -139,13 +119,10 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
}
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&amp;", "&"), "http://", "https://", 1)
res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(data.DownloadUrl)
res, err := base.NoRedirectClient.R().SetContext(ctx).Head(data.DownloadUrl)
if err != nil {
return nil, err
}
defer func() {
_ = res.RawBody().Close()
}()
if res.StatusCode() == 302 {
data.DownloadUrl = res.Header().Get("location")
}
@ -295,7 +272,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
}
if !initUpdload.FileDataExists {
// utils.Log.Error(d.client.CloudDiskStartBusiness())
utils.Log.Error(d.client.CloudDiskStartBusiness())
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
retry.Attempts(3),
@ -323,7 +300,6 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
if err != nil {
return err
}
req.ContentLength = byteSize
resp, err := base.HttpClient.Do(req)
if err != nil {
return err

View File

@ -8,7 +8,6 @@ import (
type Addition struct {
Phone string `json:"phone" required:"true"`
Password string `json:"password" required:"true"`
SMSCode string `json:"sms_code" help:"input 'send' send sms "`
RootFolderID string `json:"root_folder_id" default:""`

Some files were not shown because too many files have changed in this diff Show More