Compare commits
189 Commits
Author | SHA1 | Date | |
---|---|---|---|
e66abb3f58 | |||
742335f80e | |||
f1979a8bbc | |||
1f835502ba | |||
424ab2d0c0 | |||
858ba19670 | |||
0c7e47a76c | |||
53926d5cd0 | |||
47f4b05517 | |||
6d85f1b0c0 | |||
e49fda3e2a | |||
da5e35578a | |||
812f58ae6d | |||
9bd3c87bcc | |||
c82866975e | |||
aef952ae68 | |||
9222510d8d | |||
d88b54d98a | |||
85a28d9822 | |||
4f7761fe2c | |||
a8c900d09e | |||
8bccb69e8d | |||
0f29a811bf | |||
442c2f77ea | |||
ce06f394f1 | |||
e3e790f461 | |||
f0e8c0e886 | |||
86b35ae5cf | |||
4930f85b90 | |||
85fe65951d | |||
1381e8fb27 | |||
292bbe94ee | |||
bb6747de4e | |||
555ef0eb1a | |||
bff56ffd0f | |||
34b73b94f7 | |||
434892f135 | |||
e6e2d03ba1 | |||
28bb3f6310 | |||
fb729c1846 | |||
4448e08f5b | |||
8020d42b10 | |||
9d5fb7f595 | |||
126cfe9f93 | |||
fd96a7ccf4 | |||
03b9b9a119 | |||
03dbdfc0dd | |||
2683621ed7 | |||
be537aa49b | |||
6f742a68cf | |||
97a4b8321d | |||
8c432d3339 | |||
ff25e51f80 | |||
88831b5d5a | |||
b97c9173af | |||
207c7e05fe | |||
7db27e6da8 | |||
b5cc90cb5a | |||
8a427ddc49 | |||
c36644a172 | |||
45b1ff4a24 | |||
a4a9675616 | |||
8531b23382 | |||
2c15349ce4 | |||
5afd65b65c | |||
e2434029f9 | |||
bdf7abe717 | |||
2c8d003c2e | |||
a006f57637 | |||
be5d94cd11 | |||
977b3cf9ab | |||
182aacd309 | |||
57bac9e0d2 | |||
478470f609 | |||
6b8f35e7fa | |||
697a0ed2d3 | |||
299bfb4d7b | |||
3eca38e599 | |||
ab216ed170 | |||
e91c42c9dc | |||
54f7b21a73 | |||
de56f926cf | |||
6d4ab57a0e | |||
734d4b0354 | |||
74b20dedc3 | |||
83c2269330 | |||
296be88b5f | |||
026e944cbb | |||
8bdfc7ac8e | |||
e4a6b758dc | |||
66b7fe1e1b | |||
f475eb4401 | |||
b99e709bdb | |||
f4dcf4599c | |||
54e75d7287 | |||
d142fc3449 | |||
f23567199b | |||
1420492d81 | |||
b88067ea2f | |||
d5f381ef6f | |||
68af284dad | |||
d26887d211 | |||
3f405de6a9 | |||
6100647310 | |||
34746e951c | |||
b6134dc515 | |||
d455a232ef | |||
fe34d30d17 | |||
0fbb986ba9 | |||
1280070438 | |||
d7f66138eb | |||
b2890f05ab | |||
7583c4d734 | |||
11a30c5044 | |||
de9647a5fa | |||
8d5283604c | |||
867accafd1 | |||
6fc6751463 | |||
f904596cbc | |||
3d51845f57 | |||
a7421d8fc2 | |||
55a14bc271 | |||
91f51f17d0 | |||
4355dae491 | |||
da1c7a4c23 | |||
769281bd40 | |||
3bbdd4fa89 | |||
68f440abdb | |||
65c5ec0c34 | |||
a6325967d0 | |||
4dff49470a | |||
cc86d6f3d1 | |||
c0f9c8ebaf | |||
4fc0a77565 | |||
aaffaee2b5 | |||
8ef8023c20 | |||
cdfbe6dcf2 | |||
94d028743a | |||
7f7335435c | |||
b9e192b29c | |||
69a98eaef6 | |||
1ebc96a4e5 | |||
66e2324cac | |||
7600dc28df | |||
8ef89ad0a4 | |||
35d672217d | |||
1a283bb272 | |||
a008f54f4d | |||
3d7f79cba8 | |||
9ff83a7950 | |||
e719a1a456 | |||
40a6fcbdff | |||
0fd51646f6 | |||
e8958019d9 | |||
e1ef690784 | |||
4024050dd0 | |||
eb918658f0 | |||
fb13dae136 | |||
6b67a36d63 | |||
a64dd4885e | |||
0f03a747d8 | |||
30977cdc6d | |||
106cf720c1 | |||
882112ed1c | |||
2a6ab77295 | |||
f0981a0c8d | |||
57eea4db17 | |||
234852ca61 | |||
809105b67e | |||
02e8c31506 | |||
19b39a5c04 | |||
28e2731594 | |||
b1a279cbcc | |||
352a6a741a | |||
109015567a | |||
9e0fa77ca2 | |||
335b11c698 | |||
8e433355e6 | |||
3504f017b9 | |||
cd2f8077fa | |||
d5b68a91d2 | |||
623c7dcea5 | |||
ecbd6d86cd | |||
7200344ace | |||
b313ac4daa | |||
f2f312b43a | |||
6f6d20e1ba | |||
3231c3d930 | |||
b604e21c69 |
2
.github/stale.yml
vendored
2
.github/stale.yml
vendored
@ -6,6 +6,8 @@ daysUntilClose: 20
|
||||
exemptLabels:
|
||||
- accepted
|
||||
- security
|
||||
- working
|
||||
- pr-welcome
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
|
12
.github/workflows/auto_lang.yml
vendored
12
.github/workflows/auto_lang.yml
vendored
@ -11,27 +11,31 @@ on:
|
||||
- 'cmd/lang.go'
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
auto_lang:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
go-version: [ '1.21' ]
|
||||
name: auto generate lang.json
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout alist
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
path: alist
|
||||
|
||||
- name: Checkout alist-web
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: 'alist-org/alist-web'
|
||||
ref: main
|
||||
|
17
.github/workflows/build.yml
vendored
17
.github/workflows/build.yml
vendored
@ -6,22 +6,29 @@ on:
|
||||
pull_request:
|
||||
branches: [ 'main' ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ubuntu-latest]
|
||||
go-version: [ '1.20' ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Build
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: benjlevesque/short-sha@v2.2
|
||||
id: short-sha
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@ -35,7 +42,7 @@ jobs:
|
||||
bash build.sh dev
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v3
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: alist
|
||||
name: alist_${{ env.SHA }}
|
||||
path: dist
|
64
.github/workflows/build_docker.yml
vendored
64
.github/workflows/build_docker.yml
vendored
@ -3,48 +3,84 @@ name: build_docker
|
||||
on:
|
||||
push:
|
||||
branches: [ main ]
|
||||
pull_request:
|
||||
branches: [ main ]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build_docker:
|
||||
name: Build docker
|
||||
name: Build Docker
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
- name: Replace release with dev
|
||||
run: |
|
||||
sed -i 's/release/dev/g' Dockerfile
|
||||
|
||||
- name: Docker meta with ffmpeg
|
||||
id: meta-ffmpeg
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
flavor: |
|
||||
suffix=-ffmpeg,onlatest=true
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Build go binary
|
||||
run: bash build.sh dev docker-multiplatform
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
if: github.event_name == 'push'
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
file: Dockerfile.ci
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
- name: Build and push with ffmpeg
|
||||
id: docker_build_ffmpeg
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile.ffmpeg
|
||||
push: ${{ github.event_name == 'push' }}
|
||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
build_docker_with_aria2:
|
||||
needs: build_docker
|
||||
name: Build docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
if: github.event_name == 'push'
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
@ -62,4 +98,4 @@ jobs:
|
||||
with:
|
||||
github_token: ${{ secrets.MY_TOKEN }}
|
||||
branch: main
|
||||
repository: alist-org/with_aria2
|
||||
repository: alist-org/with_aria2
|
||||
|
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||
|
@ -14,4 +14,4 @@ jobs:
|
||||
actions: 'remove-labels'
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
issue-number: ${{ github.event.issue.number }}
|
||||
labels: 'working'
|
||||
labels: 'working,pr-welcome'
|
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
@ -21,12 +21,12 @@ jobs:
|
||||
prerelease: true
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@ -53,7 +53,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/desktop-release
|
||||
ref: main
|
||||
|
34
.github/workflows/release_android.yml
vendored
Normal file
34
.github/workflows/release_android.yml
vendored
Normal file
@ -0,0 +1,34 @@
|
||||
name: release_android
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [ published ]
|
||||
|
||||
jobs:
|
||||
release_android:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
bash build.sh release android
|
||||
|
||||
- name: Upload assets
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
files: build/compress/*
|
41
.github/workflows/release_docker.yml
vendored
41
.github/workflows/release_docker.yml
vendored
@ -11,43 +11,70 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-go@v4
|
||||
with:
|
||||
go-version: 'stable'
|
||||
|
||||
- name: Build go binary
|
||||
run: bash build.sh release docker-multiplatform
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v2
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: xhofe
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v4
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: Dockerfile.ci
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
- name: Docker meta with ffmpeg
|
||||
id: meta-ffmpeg
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: xhofe/alist
|
||||
flavor: |
|
||||
latest=true
|
||||
suffix=-ffmpeg,onlatest=true
|
||||
|
||||
- name: Build and push with ffmpeg
|
||||
id: docker_build_ffmpeg
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
file: Dockerfile.ffmpeg
|
||||
push: true
|
||||
tags: ${{ steps.meta-ffmpeg.outputs.tags }}
|
||||
labels: ${{ steps.meta-ffmpeg.outputs.labels }}
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
||||
|
||||
release_docker_with_aria2:
|
||||
needs: release_docker
|
||||
name: Release docker with aria2
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: alist-org/with_aria2
|
||||
ref: main
|
||||
|
6
.github/workflows/release_linux_musl.yml
vendored
6
.github/workflows/release_linux_musl.yml
vendored
@ -9,18 +9,18 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
6
.github/workflows/release_linux_musl_arm.yml
vendored
6
.github/workflows/release_linux_musl_arm.yml
vendored
@ -9,18 +9,18 @@ jobs:
|
||||
strategy:
|
||||
matrix:
|
||||
platform: [ ubuntu-latest ]
|
||||
go-version: [ '1.20' ]
|
||||
go-version: [ '1.21' ]
|
||||
name: Release
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
|
||||
- name: Setup Go
|
||||
uses: actions/setup-go@v4
|
||||
uses: actions/setup-go@v5
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
19
Dockerfile
19
Dockerfile
@ -1,18 +1,23 @@
|
||||
FROM alpine:3.18 as builder
|
||||
FROM alpine:edge as builder
|
||||
LABEL stage=go-builder
|
||||
WORKDIR /app/
|
||||
RUN apk add --no-cache bash curl gcc git go musl-dev
|
||||
COPY go.mod go.sum ./
|
||||
RUN go mod download
|
||||
COPY ./ ./
|
||||
RUN apk add --no-cache bash curl gcc git go musl-dev; \
|
||||
bash build.sh release docker
|
||||
RUN bash build.sh release docker
|
||||
|
||||
FROM alpine:3.18
|
||||
FROM alpine:edge
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
WORKDIR /opt/alist/
|
||||
COPY --from=builder /app/bin/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh && \
|
||||
rm -rf /var/cache/apk/*
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
||||
CMD [ "/entrypoint.sh" ]
|
16
Dockerfile.ci
Normal file
16
Dockerfile.ci
Normal file
@ -0,0 +1,16 @@
|
||||
FROM alpine:edge
|
||||
ARG TARGETPLATFORM
|
||||
LABEL MAINTAINER="i@nn.ci"
|
||||
VOLUME /opt/alist/data/
|
||||
WORKDIR /opt/alist/
|
||||
COPY /${TARGETPLATFORM}/alist ./
|
||||
COPY entrypoint.sh /entrypoint.sh
|
||||
RUN apk update && \
|
||||
apk upgrade --no-cache && \
|
||||
apk add --no-cache bash ca-certificates su-exec tzdata; \
|
||||
chmod +x /entrypoint.sh && \
|
||||
rm -rf /var/cache/apk/* && \
|
||||
/entrypoint.sh version
|
||||
ENV PUID=0 PGID=0 UMASK=022
|
||||
EXPOSE 5244 5245
|
||||
CMD [ "/entrypoint.sh" ]
|
4
Dockerfile.ffmpeg
Normal file
4
Dockerfile.ffmpeg
Normal file
@ -0,0 +1,4 @@
|
||||
FROM xhofe/alist:latest
|
||||
RUN apk update && \
|
||||
apk add --no-cache ffmpeg \
|
||||
rm -rf /var/cache/apk/*
|
22
README.md
22
README.md
@ -43,9 +43,9 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
|
||||
## Features
|
||||
|
||||
- [x] Multiple storage
|
||||
- [x] Multiple storages
|
||||
- [x] Local storage
|
||||
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
||||
- [x] [Aliyundrive](https://www.alipan.com/)
|
||||
- [x] OneDrive / Sharepoint ([global](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
@ -66,7 +66,8 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
||||
- [x] [ILanzou](https://www.ilanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.alipan.com/)
|
||||
- [x] [Google photo](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||
@ -74,6 +75,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] [FeijiPan](https://www.feijipan.com/)
|
||||
- [x] Easy to deploy and out-of-the-box
|
||||
- [x] File preview (PDF, markdown, code, plain text, ...)
|
||||
- [x] Image preview in gallery mode
|
||||
@ -86,7 +88,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
- [x] Protected routes (password protection and authentication)
|
||||
- [x] WebDav (see https://alist.nn.ci/guide/webdav.html for details)
|
||||
- [x] [Docker Deploy](https://hub.docker.com/r/xhofe/alist)
|
||||
- [x] Cloudflare workers proxy
|
||||
- [x] Cloudflare Workers proxy
|
||||
- [x] File/Folder package download
|
||||
- [x] Web upload(Can allow visitors to upload), delete, mkdir, rename, move and copy
|
||||
- [x] Offline download
|
||||
@ -103,7 +105,7 @@ English | [中文](./README_cn.md)| [日本語](./README_ja.md) | [Contributing]
|
||||
|
||||
## Discussion
|
||||
|
||||
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature request only.**
|
||||
Please go to our [discussion forum](https://github.com/Xhofe/alist/discussions) for general questions, **issues are for bug reports and feature requests only.**
|
||||
|
||||
## Sponsor
|
||||
|
||||
@ -112,22 +114,22 @@ https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
### Special sponsors
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## Contributors
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## License
|
||||
|
||||
The `AList` is open-source software licensed under the AGPL-3.0 license.
|
||||
|
||||
## Disclaimer
|
||||
- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
|
||||
- This program is a free and open source project. It is designed to share files on the network disk, which is convenient for downloading and learning Golang. Please abide by relevant laws and regulations when using it, and do not abuse it;
|
||||
- This program is implemented by calling the official sdk/interface, without destroying the official interface behavior;
|
||||
- This program only does 302 redirect/traffic forwarding, and does not intercept, store, or tamper with any user data;
|
||||
- Before using this program, you should understand and bear the corresponding risks, including but not limited to account ban, download speed limit, etc., which is none of this program's business;
|
||||
|
14
README_cn.md
14
README_cn.md
@ -45,7 +45,7 @@
|
||||
|
||||
- [x] 多种存储
|
||||
- [x] 本地存储
|
||||
- [x] [阿里云盘](https://www.aliyundrive.com/)
|
||||
- [x] [阿里云盘](https://www.alipan.com/)
|
||||
- [x] OneDrive / Sharepoint([国际版](https://www.office.com/), [世纪互联](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [天翼云盘](https://cloud.189.cn) (个人云, 家庭云)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
@ -65,7 +65,8 @@
|
||||
- [x] [夸克网盘](https://pan.quark.cn)
|
||||
- [x] [迅雷网盘](https://pan.xunlei.com)
|
||||
- [x] [蓝奏云](https://www.lanzou.com/)
|
||||
- [x] [阿里云盘分享](https://www.aliyundrive.com/)
|
||||
- [x] [蓝奏云优享版](https://www.ilanzou.com/)
|
||||
- [x] [阿里云盘分享](https://www.alipan.com/)
|
||||
- [x] [谷歌相册](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [一刻相册](https://photo.baidu.com/)
|
||||
@ -73,6 +74,7 @@
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] [飞机盘](https://www.feijipan.com/)
|
||||
- [x] 部署方便,开箱即用
|
||||
- [x] 文件预览(PDF、markdown、代码、纯文本……)
|
||||
- [x] 画廊模式下的图像预览
|
||||
@ -110,15 +112,15 @@ AList 是一个开源软件,如果你碰巧喜欢这个项目,并希望我
|
||||
|
||||
### 特别赞助
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (国内API服务器赞助)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
- [VidHub](https://zh.okaapps.com/product/1659622164?ref=alist) - 苹果生态下优雅的网盘视频播放器,iPhone,iPad,Mac,Apple TV全平台支持。
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (国内API服务器赞助)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## 贡献者
|
||||
|
||||
Thanks goes to these wonderful people:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## 许可
|
||||
|
||||
|
14
README_ja.md
14
README_ja.md
@ -45,7 +45,7 @@
|
||||
|
||||
- [x] マルチストレージ
|
||||
- [x] ローカルストレージ
|
||||
- [x] [Aliyundrive](https://www.aliyundrive.com/)
|
||||
- [x] [Aliyundrive](https://www.alipan.com/)
|
||||
- [x] OneDrive / Sharepoint ([グローバル](https://www.office.com/), [cn](https://portal.partner.microsoftonline.cn),de,us)
|
||||
- [x] [189cloud](https://cloud.189.cn) (Personal, Family)
|
||||
- [x] [GoogleDrive](https://drive.google.com/)
|
||||
@ -66,7 +66,8 @@
|
||||
- [x] [Quark](https://pan.quark.cn)
|
||||
- [x] [Thunder](https://pan.xunlei.com)
|
||||
- [x] [Lanzou](https://www.lanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.aliyundrive.com/)
|
||||
- [x] [ILanzou](https://www.ilanzou.com/)
|
||||
- [x] [Aliyundrive share](https://www.alipan.com/)
|
||||
- [x] [Google photo](https://photos.google.com/)
|
||||
- [x] [Mega.nz](https://mega.nz)
|
||||
- [x] [Baidu photo](https://photo.baidu.com/)
|
||||
@ -74,6 +75,7 @@
|
||||
- [x] [115](https://115.com/)
|
||||
- [X] Cloudreve
|
||||
- [x] [Dropbox](https://www.dropbox.com/)
|
||||
- [x] [FeijiPan](https://www.feijipan.com/)
|
||||
- [x] デプロイが簡単で、すぐに使える
|
||||
- [x] ファイルプレビュー (PDF, マークダウン, コード, プレーンテキスト, ...)
|
||||
- [x] ギャラリーモードでの画像プレビュー
|
||||
@ -112,15 +114,15 @@ https://alist.nn.ci/guide/sponsor.html
|
||||
|
||||
### スペシャルスポンサー
|
||||
|
||||
- [亚洲云 - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商](https://www.asiayun.com/aff/QQCOOQKZ) (sponsored Chinese API server)
|
||||
- [找资源 - 阿里云盘资源搜索引擎](https://zhaoziyuan.pw/)
|
||||
- [JetBrains: Essential tools for software developers and teams](https://www.jetbrains.com/)
|
||||
- [VidHub](https://okaapps.com/product/1659622164?ref=alist) - An elegant cloud video player within the Apple ecosystem. Support for iPhone, iPad, Mac, and Apple TV.
|
||||
- [亚洲云](https://www.asiayun.com/aff/QQCOOQKZ) - 高防服务器|服务器租用|福州高防|广东电信|香港服务器|美国服务器|海外服务器 - 国内靠谱的企业级云计算服务提供商 (sponsored Chinese API server)
|
||||
- [找资源](https://zhaoziyuan.pw/) - 阿里云盘资源搜索引擎
|
||||
|
||||
## コントリビューター
|
||||
|
||||
これらの素晴らしい人々に感謝します:
|
||||
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
[](https://github.com/alist-org/alist/graphs/contributors)
|
||||
|
||||
## ライセンス
|
||||
|
||||
|
87
build.sh
87
build.sh
@ -49,6 +49,7 @@ BuildWinArm64() {
|
||||
export GOARCH=arm64
|
||||
export CC=$(pwd)/wrapper/zcc-arm64
|
||||
export CXX=$(pwd)/wrapper/zcxx-arm64
|
||||
export CGO_ENABLED=1
|
||||
go build -o "$1" -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
@ -75,7 +76,7 @@ BuildDev() {
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./dist/$appName-$os_arch -ldflags="$muslflags" -tags=jsoniter .
|
||||
done
|
||||
xgo -targets=windows/amd64,darwin/amd64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
xgo -targets=windows/amd64,darwin/amd64,darwin/arm64 -out "$appName" -ldflags="$ldflags" -tags=jsoniter .
|
||||
mv alist-* dist
|
||||
cd dist
|
||||
cp ./alist-windows-amd64.exe ./alist-windows-amd64-upx.exe
|
||||
@ -84,10 +85,61 @@ BuildDev() {
|
||||
cat md5.txt
|
||||
}
|
||||
|
||||
PrepareBuildDocker() {
|
||||
echo "replace github.com/mattn/go-sqlite3 => github.com/leso-kn/go-sqlite3 v0.0.0-20230710125852-03158dc838ed" >>go.mod
|
||||
go get gorm.io/driver/sqlite@v1.4.4
|
||||
go mod download
|
||||
}
|
||||
|
||||
BuildDocker() {
|
||||
PrepareBuildDocker
|
||||
go build -o ./bin/alist -ldflags="$ldflags" -tags=jsoniter .
|
||||
}
|
||||
|
||||
BuildDockerMultiplatform() {
|
||||
PrepareBuildDocker
|
||||
|
||||
BASE="https://musl.cc/"
|
||||
FILES=(x86_64-linux-musl-cross aarch64-linux-musl-cross i486-linux-musl-cross s390x-linux-musl-cross armv6-linux-musleabihf-cross armv7l-linux-musleabihf-cross)
|
||||
for i in "${FILES[@]}"; do
|
||||
url="${BASE}${i}.tgz"
|
||||
curl -L -o "${i}.tgz" "${url}"
|
||||
sudo tar xf "${i}.tgz" --strip-components 1 -C /usr/local
|
||||
rm -f "${i}.tgz"
|
||||
done
|
||||
|
||||
docker_lflags="--extldflags '-static -fpic' $ldflags"
|
||||
export CGO_ENABLED=1
|
||||
|
||||
OS_ARCHES=(linux-amd64 linux-arm64 linux-386 linux-s390x)
|
||||
CGO_ARGS=(x86_64-linux-musl-gcc aarch64-linux-musl-gcc i486-linux-musl-gcc s390x-linux-musl-gcc)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
os=${os_arch%%-*}
|
||||
arch=${os_arch##*-}
|
||||
export GOOS=$os
|
||||
export GOARCH=$arch
|
||||
export CC=${cgo_cc}
|
||||
echo "building for $os_arch"
|
||||
go build -o ./$os/$arch/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||
done
|
||||
|
||||
DOCKER_ARM_ARCHES=(linux-arm/v6 linux-arm/v7)
|
||||
CGO_ARGS=(armv6-linux-musleabihf-gcc armv7l-linux-musleabihf-gcc)
|
||||
GO_ARM=(6 7)
|
||||
export GOOS=linux
|
||||
export GOARCH=arm
|
||||
for i in "${!DOCKER_ARM_ARCHES[@]}"; do
|
||||
docker_arch=${DOCKER_ARM_ARCHES[$i]}
|
||||
cgo_cc=${CGO_ARGS[$i]}
|
||||
export GOARM=${GO_ARM[$i]}
|
||||
export CC=${cgo_cc}
|
||||
echo "building for $docker_arch"
|
||||
go build -o ./${docker_arch%%-*}/${docker_arch##*-}/alist -ldflags="$docker_lflags" -tags=jsoniter .
|
||||
done
|
||||
}
|
||||
|
||||
BuildRelease() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
@ -159,6 +211,27 @@ BuildReleaseLinuxMuslArm() {
|
||||
done
|
||||
}
|
||||
|
||||
BuildReleaseAndroid() {
|
||||
rm -rf .git/
|
||||
mkdir -p "build"
|
||||
wget https://dl.google.com/android/repository/android-ndk-r26b-linux.zip
|
||||
unzip android-ndk-r26b-linux.zip
|
||||
rm android-ndk-r26b-linux.zip
|
||||
OS_ARCHES=(amd64 arm64 386 arm)
|
||||
CGO_ARGS=(x86_64-linux-android24-clang aarch64-linux-android24-clang i686-linux-android24-clang armv7a-linux-androideabi24-clang)
|
||||
for i in "${!OS_ARCHES[@]}"; do
|
||||
os_arch=${OS_ARCHES[$i]}
|
||||
cgo_cc=$(realpath android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/${CGO_ARGS[$i]})
|
||||
echo building for android-${os_arch}
|
||||
export GOOS=android
|
||||
export GOARCH=${os_arch##*-}
|
||||
export CC=${cgo_cc}
|
||||
export CGO_ENABLED=1
|
||||
go build -o ./build/$appName-android-$os_arch -ldflags="$ldflags" -tags=jsoniter .
|
||||
android-ndk-r26b/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip ./build/$appName-android-$os_arch
|
||||
done
|
||||
}
|
||||
|
||||
MakeRelease() {
|
||||
cd build
|
||||
mkdir compress
|
||||
@ -166,6 +239,11 @@ MakeRelease() {
|
||||
cp "$i" alist
|
||||
tar -czvf compress/"$i".tar.gz alist
|
||||
rm -f alist
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-android-*"); do
|
||||
cp "$i" alist
|
||||
tar -czvf compress/"$i".tar.gz alist
|
||||
rm -f alist
|
||||
done
|
||||
for i in $(find . -type f -name "$appName-darwin-*"); do
|
||||
cp "$i" alist
|
||||
@ -187,6 +265,8 @@ if [ "$1" = "dev" ]; then
|
||||
FetchWebDev
|
||||
if [ "$2" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$2" = "docker-multiplatform" ]; then
|
||||
BuildDockerMultiplatform
|
||||
else
|
||||
BuildDev
|
||||
fi
|
||||
@ -194,12 +274,17 @@ elif [ "$1" = "release" ]; then
|
||||
FetchWebRelease
|
||||
if [ "$2" = "docker" ]; then
|
||||
BuildDocker
|
||||
elif [ "$2" = "docker-multiplatform" ]; then
|
||||
BuildDockerMultiplatform
|
||||
elif [ "$2" = "linux_musl_arm" ]; then
|
||||
BuildReleaseLinuxMuslArm
|
||||
MakeRelease "md5-linux-musl-arm.txt"
|
||||
elif [ "$2" = "linux_musl" ]; then
|
||||
BuildReleaseLinuxMusl
|
||||
MakeRelease "md5-linux-musl.txt"
|
||||
elif [ "$2" = "android" ]; then
|
||||
BuildReleaseAndroid
|
||||
MakeRelease "md5-android.txt"
|
||||
else
|
||||
BuildRelease
|
||||
MakeRelease "md5.txt"
|
||||
|
@ -19,6 +19,7 @@ var AdminCmd = &cobra.Command{
|
||||
Short: "Show admin user's info and some operations about admin user's password",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
@ -57,6 +58,7 @@ var ShowTokenCmd = &cobra.Command{
|
||||
Short: "Show admin token",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
token := setting.GetStr(conf.Token)
|
||||
utils.Log.Infof("Admin token: %s", token)
|
||||
},
|
||||
@ -64,6 +66,7 @@ var ShowTokenCmd = &cobra.Command{
|
||||
|
||||
func setAdminPassword(pwd string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed get admin user: %+v", err)
|
||||
|
@ -15,6 +15,7 @@ var Cancel2FACmd = &cobra.Command{
|
||||
Short: "Delete 2FA of admin user",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
admin, err := op.GetAdmin()
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to get admin user: %+v", err)
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/bootstrap"
|
||||
"github.com/alist-org/alist/v3/internal/bootstrap/data"
|
||||
"github.com/alist-org/alist/v3/internal/db"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
@ -19,6 +20,10 @@ func Init() {
|
||||
bootstrap.InitIndex()
|
||||
}
|
||||
|
||||
func Release() {
|
||||
db.Close()
|
||||
}
|
||||
|
||||
var pid = -1
|
||||
var pidFile string
|
||||
|
||||
|
@ -5,6 +5,8 @@ import (
|
||||
"os"
|
||||
|
||||
"github.com/alist-org/alist/v3/cmd/flags"
|
||||
_ "github.com/alist-org/alist/v3/drivers"
|
||||
_ "github.com/alist-org/alist/v3/internal/offline_download"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
|
@ -2,6 +2,7 @@ package cmd
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/http"
|
||||
@ -13,7 +14,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/cmd/flags"
|
||||
_ "github.com/alist-org/alist/v3/drivers"
|
||||
"github.com/alist-org/alist/v3/internal/bootstrap"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
@ -35,9 +35,9 @@ the address is defined in config file`,
|
||||
utils.Log.Infof("delayed start for %d seconds", conf.Conf.DelayedStart)
|
||||
time.Sleep(time.Duration(conf.Conf.DelayedStart) * time.Second)
|
||||
}
|
||||
bootstrap.InitAria2()
|
||||
bootstrap.InitQbittorrent()
|
||||
bootstrap.InitOfflineDownloadTools()
|
||||
bootstrap.LoadStorages()
|
||||
bootstrap.InitTaskManager()
|
||||
if !flags.Debug && !flags.Dev {
|
||||
gin.SetMode(gin.ReleaseMode)
|
||||
}
|
||||
@ -51,7 +51,7 @@ the address is defined in config file`,
|
||||
httpSrv = &http.Server{Addr: httpBase, Handler: r}
|
||||
go func() {
|
||||
err := httpSrv.ListenAndServe()
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start http: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
@ -62,7 +62,7 @@ the address is defined in config file`,
|
||||
httpsSrv = &http.Server{Addr: httpsBase, Handler: r}
|
||||
go func() {
|
||||
err := httpsSrv.ListenAndServeTLS(conf.Conf.Scheme.CertFile, conf.Conf.Scheme.KeyFile)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start https: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
@ -86,7 +86,7 @@ the address is defined in config file`,
|
||||
}
|
||||
}
|
||||
err = unixSrv.Serve(listener)
|
||||
if err != nil && err != http.ErrServerClosed {
|
||||
if err != nil && !errors.Is(err, http.ErrServerClosed) {
|
||||
utils.Log.Fatalf("failed to start unix: %s", err.Error())
|
||||
}
|
||||
}()
|
||||
@ -100,7 +100,7 @@ the address is defined in config file`,
|
||||
signal.Notify(quit, syscall.SIGINT, syscall.SIGTERM)
|
||||
<-quit
|
||||
utils.Log.Println("Shutdown server...")
|
||||
|
||||
Release()
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Second)
|
||||
defer cancel()
|
||||
var wg sync.WaitGroup
|
||||
|
@ -31,6 +31,7 @@ var disableStorageCmd = &cobra.Command{
|
||||
}
|
||||
mountPath := args[0]
|
||||
Init()
|
||||
defer Release()
|
||||
storage, err := db.GetStorageByMountPath(mountPath)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storage: %+v", err)
|
||||
@ -89,6 +90,7 @@ var listStorageCmd = &cobra.Command{
|
||||
Short: "List all storages",
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
Init()
|
||||
defer Release()
|
||||
storages, _, err := db.GetStorages(1, -1)
|
||||
if err != nil {
|
||||
utils.Log.Errorf("failed to query storages: %+v", err)
|
||||
|
@ -2,19 +2,22 @@ package _115
|
||||
|
||||
import (
|
||||
"context"
|
||||
"strings"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/pkg/errors"
|
||||
"strings"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type Pan115 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
}
|
||||
|
||||
func (d *Pan115) Config() driver.Config {
|
||||
@ -26,14 +29,27 @@ func (d *Pan115) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (d *Pan115) Init(ctx context.Context) error {
|
||||
if d.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||
}
|
||||
return d.login()
|
||||
}
|
||||
|
||||
func (d *Pan115) WaitLimit(ctx context.Context) error {
|
||||
if d.limiter != nil {
|
||||
return d.limiter.Wait(ctx)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files, err := d.getFiles(dir.GetID())
|
||||
if err != nil && !errors.Is(err, driver115.ErrNotExist) {
|
||||
return nil, err
|
||||
@ -44,11 +60,12 @@ func (d *Pan115) List(ctx context.Context, dir model.Obj, args model.ListArgs) (
|
||||
}
|
||||
|
||||
func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
downloadInfo, err := d.client.
|
||||
SetUserAgent(driver115.UA115Browser).
|
||||
Download(file.(*FileObj).PickCode)
|
||||
// recover for upload
|
||||
d.client.SetUserAgent(driver115.UA115Desktop)
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var userAgent = args.Header.Get("User-Agent")
|
||||
downloadInfo, err := d.
|
||||
DownloadWithUA(file.(*FileObj).PickCode, userAgent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -60,6 +77,9 @@ func (d *Pan115) Link(ctx context.Context, file model.Obj, args model.LinkArgs)
|
||||
}
|
||||
|
||||
func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := d.client.Mkdir(parentDir.GetID(), dirName); err != nil {
|
||||
return err
|
||||
}
|
||||
@ -67,22 +87,38 @@ func (d *Pan115) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
}
|
||||
|
||||
func (d *Pan115) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Move(dstDir.GetID(), srcObj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Rename(srcObj.GetID(), newName)
|
||||
}
|
||||
|
||||
func (d *Pan115) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Copy(dstDir.GetID(), srcObj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Remove(ctx context.Context, obj model.Obj) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
return d.client.Delete(obj.GetID())
|
||||
}
|
||||
|
||||
func (d *Pan115) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var (
|
||||
fastInfo *driver115.UploadInitResp
|
||||
dirID = dstDir.GetID()
|
||||
|
@ -6,17 +6,19 @@ import (
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"56" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: true,
|
||||
Name: "115 Cloud",
|
||||
DefaultRoot: "0",
|
||||
//OnlyProxy: true,
|
||||
//OnlyLocal: true,
|
||||
NoOverwriteUpload: true,
|
||||
}
|
||||
|
||||
|
@ -5,12 +5,8 @@ import (
|
||||
"crypto/tls"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
"github.com/orzogc/fake115uploader/cipher"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
@ -18,29 +14,35 @@ import (
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/SheltonZhu/115driver/pkg/driver"
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/aliyun/aliyun-oss-go-sdk/oss"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
crypto "github.com/gaoyb7/115drive-webdav/115"
|
||||
"github.com/orzogc/fake115uploader/cipher"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var UserAgent = driver.UA115Desktop
|
||||
var UserAgent = driver115.UA115Desktop
|
||||
|
||||
func (d *Pan115) login() error {
|
||||
var err error
|
||||
opts := []driver.Option{
|
||||
driver.UA(UserAgent),
|
||||
func(c *driver.Pan115Client) {
|
||||
opts := []driver115.Option{
|
||||
driver115.UA(UserAgent),
|
||||
func(c *driver115.Pan115Client) {
|
||||
c.Client.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
},
|
||||
}
|
||||
d.client = driver.New(opts...)
|
||||
cr := &driver.Credential{}
|
||||
d.client = driver115.New(opts...)
|
||||
cr := &driver115.Credential{}
|
||||
if d.Addition.QRCodeToken != "" {
|
||||
s := &driver.QRCodeSession{
|
||||
s := &driver115.QRCodeSession{
|
||||
UID: d.Addition.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLogin(s); err != nil {
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Addition.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||
@ -59,7 +61,7 @@ func (d *Pan115) login() error {
|
||||
func (d *Pan115) getFiles(fileId string) ([]FileObj, error) {
|
||||
res := make([]FileObj, 0)
|
||||
if d.PageSize <= 0 {
|
||||
d.PageSize = driver.FileListLimit
|
||||
d.PageSize = driver115.FileListLimit
|
||||
}
|
||||
files, err := d.client.ListWithLimit(fileId, d.PageSize)
|
||||
if err != nil {
|
||||
@ -75,6 +77,61 @@ const (
|
||||
appVer = "2.0.3.6"
|
||||
)
|
||||
|
||||
func (c *Pan115) DownloadWithUA(pickCode, ua string) (*driver115.DownloadInfo, error) {
|
||||
key := crypto.GenerateKey()
|
||||
result := driver115.DownloadResp{}
|
||||
params, err := utils.Json.Marshal(map[string]string{"pickcode": pickCode})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data := crypto.Encode(params, key)
|
||||
|
||||
bodyReader := strings.NewReader(url.Values{"data": []string{data}}.Encode())
|
||||
reqUrl := fmt.Sprintf("%s?t=%s", driver115.ApiDownloadGetUrl, driver115.Now().String())
|
||||
req, _ := http.NewRequest(http.MethodPost, reqUrl, bodyReader)
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
req.Header.Set("Cookie", c.Cookie)
|
||||
req.Header.Set("User-Agent", ua)
|
||||
|
||||
resp, err := c.client.Client.GetClient().Do(req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := utils.Json.Unmarshal(body, &result); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err = result.Err(string(body)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bytes, err := crypto.Decode(string(result.EncodedData), key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
downloadInfo := driver115.DownloadData{}
|
||||
if err := utils.Json.Unmarshal(bytes, &downloadInfo); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, info := range downloadInfo {
|
||||
if info.FileSize < 0 {
|
||||
return nil, driver115.ErrDownloadEmpty
|
||||
}
|
||||
info.Header = resp.Request.Header
|
||||
return info, nil
|
||||
}
|
||||
return nil, driver115.ErrUnexpected
|
||||
}
|
||||
|
||||
func (d *Pan115) rapidUpload(fileSize int64, fileName, dirID, preID, fileID string, stream model.FileStreamer) (*driver115.UploadInitResp, error) {
|
||||
var (
|
||||
ecdhCipher *cipher.EcdhCipher
|
||||
@ -249,7 +306,7 @@ func (d *Pan115) UploadByMultipart(params *driver115.UploadOSSParams, fileSize i
|
||||
go func(threadId int) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
errCh <- fmt.Errorf("Recovered in %v", r)
|
||||
errCh <- fmt.Errorf("recovered in %v", r)
|
||||
}
|
||||
}()
|
||||
for chunk := range chunksCh {
|
||||
|
112
drivers/115_share/driver.go
Normal file
112
drivers/115_share/driver.go
Normal file
@ -0,0 +1,112 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"golang.org/x/time/rate"
|
||||
)
|
||||
|
||||
type Pan115Share struct {
|
||||
model.Storage
|
||||
Addition
|
||||
client *driver115.Pan115Client
|
||||
limiter *rate.Limiter
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan115Share) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Init(ctx context.Context) error {
|
||||
if d.LimitRate > 0 {
|
||||
d.limiter = rate.NewLimiter(rate.Limit(d.LimitRate), 1)
|
||||
}
|
||||
|
||||
return d.login()
|
||||
}
|
||||
|
||||
func (d *Pan115Share) WaitLimit(ctx context.Context) error {
|
||||
if d.limiter != nil {
|
||||
return d.limiter.Wait(ctx)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
files := make([]driver115.ShareFile, 0)
|
||||
fileResp, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, dir.GetID(), driver115.QueryLimit(int(d.PageSize)))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, fileResp.Data.List...)
|
||||
total := fileResp.Data.Count
|
||||
count := len(fileResp.Data.List)
|
||||
for total > count {
|
||||
fileResp, err := d.client.GetShareSnap(
|
||||
d.ShareCode, d.ReceiveCode, dir.GetID(),
|
||||
driver115.QueryLimit(int(d.PageSize)), driver115.QueryOffset(count),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
files = append(files, fileResp.Data.List...)
|
||||
count += len(fileResp.Data.List)
|
||||
}
|
||||
|
||||
return utils.SliceConvert(files, transFunc)
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
if err := d.WaitLimit(ctx); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
downloadInfo, err := d.client.DownloadByShareCode(d.ShareCode, d.ReceiveCode, file.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &model.Link{URL: downloadInfo.URL.URL}, nil
|
||||
}
|
||||
|
||||
func (d *Pan115Share) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
func (d *Pan115Share) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
return errs.NotSupport
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan115Share)(nil)
|
34
drivers/115_share/meta.go
Normal file
34
drivers/115_share/meta.go
Normal file
@ -0,0 +1,34 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
Cookie string `json:"cookie" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeToken string `json:"qrcode_token" type:"text" help:"one of QR code token and cookie required"`
|
||||
QRCodeSource string `json:"qrcode_source" type:"select" options:"web,android,ios,linux,mac,windows,tv" default:"linux" help:"select the QR code device, default linux"`
|
||||
PageSize int64 `json:"page_size" type:"number" default:"20" help:"list api per page size of 115 driver"`
|
||||
LimitRate float64 `json:"limit_rate" type:"number" default:"2" help:"limit all api request rate (1r/[limit_rate]s)"`
|
||||
ShareCode string `json:"share_code" type:"text" required:"true" help:"share code of 115 share link"`
|
||||
ReceiveCode string `json:"receive_code" type:"text" required:"true" help:"receive code of 115 share link"`
|
||||
driver.RootID
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "115 Share",
|
||||
DefaultRoot: "",
|
||||
// OnlyProxy: true,
|
||||
// OnlyLocal: true,
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: true,
|
||||
NoUpload: true,
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan115Share{}
|
||||
})
|
||||
}
|
111
drivers/115_share/utils.go
Normal file
111
drivers/115_share/utils.go
Normal file
@ -0,0 +1,111 @@
|
||||
package _115_share
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
driver115 "github.com/SheltonZhu/115driver/pkg/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
var _ model.Obj = (*FileObj)(nil)
|
||||
|
||||
type FileObj struct {
|
||||
Size int64
|
||||
Sha1 string
|
||||
Utm time.Time
|
||||
FileName string
|
||||
isDir bool
|
||||
FileID string
|
||||
}
|
||||
|
||||
func (f *FileObj) CreateTime() time.Time {
|
||||
return f.Utm
|
||||
}
|
||||
|
||||
func (f *FileObj) GetHash() utils.HashInfo {
|
||||
return utils.NewHashInfo(utils.SHA1, f.Sha1)
|
||||
}
|
||||
|
||||
func (f *FileObj) GetSize() int64 {
|
||||
return f.Size
|
||||
}
|
||||
|
||||
func (f *FileObj) GetName() string {
|
||||
return f.FileName
|
||||
}
|
||||
|
||||
func (f *FileObj) ModTime() time.Time {
|
||||
return f.Utm
|
||||
}
|
||||
|
||||
func (f *FileObj) IsDir() bool {
|
||||
return f.isDir
|
||||
}
|
||||
|
||||
func (f *FileObj) GetID() string {
|
||||
return f.FileID
|
||||
}
|
||||
|
||||
func (f *FileObj) GetPath() string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func transFunc(sf driver115.ShareFile) (model.Obj, error) {
|
||||
timeInt, err := strconv.ParseInt(sf.UpdateTime, 10, 64)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var (
|
||||
utm = time.Unix(timeInt, 0)
|
||||
isDir = (sf.IsFile == 0)
|
||||
fileID = string(sf.FileID)
|
||||
)
|
||||
if isDir {
|
||||
fileID = string(sf.CategoryID)
|
||||
}
|
||||
return &FileObj{
|
||||
Size: int64(sf.Size),
|
||||
Sha1: sf.Sha1,
|
||||
Utm: utm,
|
||||
FileName: string(sf.FileName),
|
||||
isDir: isDir,
|
||||
FileID: fileID,
|
||||
}, nil
|
||||
}
|
||||
|
||||
var UserAgent = driver115.UA115Browser
|
||||
|
||||
func (d *Pan115Share) login() error {
|
||||
var err error
|
||||
opts := []driver115.Option{
|
||||
driver115.UA(UserAgent),
|
||||
}
|
||||
d.client = driver115.New(opts...)
|
||||
if _, err := d.client.GetShareSnap(d.ShareCode, d.ReceiveCode, ""); err != nil {
|
||||
return errors.Wrap(err, "failed to get share snap")
|
||||
}
|
||||
cr := &driver115.Credential{}
|
||||
if d.QRCodeToken != "" {
|
||||
s := &driver115.QRCodeSession{
|
||||
UID: d.QRCodeToken,
|
||||
}
|
||||
if cr, err = d.client.QRCodeLoginWithApp(s, driver115.LoginApp(d.QRCodeSource)); err != nil {
|
||||
return errors.Wrap(err, "failed to login by qrcode")
|
||||
}
|
||||
d.Cookie = fmt.Sprintf("UID=%s;CID=%s;SEID=%s", cr.UID, cr.CID, cr.SEID)
|
||||
d.QRCodeToken = ""
|
||||
} else if d.Cookie != "" {
|
||||
if err = cr.FromCookie(d.Cookie); err != nil {
|
||||
return errors.Wrap(err, "failed to login by cookies")
|
||||
}
|
||||
d.client.ImportCredential(cr)
|
||||
} else {
|
||||
return errors.New("missing cookie or qrcode account")
|
||||
}
|
||||
|
||||
return d.client.LoginCheck()
|
||||
}
|
@ -6,6 +6,13 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"golang.org/x/time/rate"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
@ -17,14 +24,12 @@ import (
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
)
|
||||
|
||||
type Pan123 struct {
|
||||
model.Storage
|
||||
Addition
|
||||
apiRateLimit sync.Map
|
||||
}
|
||||
|
||||
func (d *Pan123) Config() driver.Config {
|
||||
@ -232,6 +237,9 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(s)
|
||||
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Key,
|
||||
@ -250,4 +258,11 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Pan123) APIRateLimit(api string) bool {
|
||||
limiter, _ := d.apiRateLimit.LoadOrStore(api,
|
||||
rate.NewLimiter(rate.Every(time.Millisecond*700), 1))
|
||||
ins := limiter.(*rate.Limiter)
|
||||
return ins.Allow()
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123)(nil)
|
||||
|
@ -107,7 +107,7 @@ func (d *Pan123) newUpload(ctx context.Context, upReq *UploadResp, file model.Fi
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(j * 100 / chunkCount)
|
||||
up(float64(j) * 100 / float64(chunkCount))
|
||||
}
|
||||
}
|
||||
// complete s3 upload
|
||||
|
@ -3,12 +3,18 @@ package _123
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/crc32"
|
||||
"math"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
resty "github.com/go-resty/resty/v2"
|
||||
jsoniter "github.com/json-iterator/go"
|
||||
)
|
||||
|
||||
@ -18,7 +24,7 @@ const (
|
||||
Api = "https://www.123pan.com/api"
|
||||
AApi = "https://www.123pan.com/a/api"
|
||||
BApi = "https://www.123pan.com/b/api"
|
||||
MainApi = Api
|
||||
MainApi = BApi
|
||||
SignIn = MainApi + "/user/sign_in"
|
||||
Logout = MainApi + "/user/logout"
|
||||
UserInfo = MainApi + "/user/info"
|
||||
@ -37,6 +43,104 @@ const (
|
||||
//AuthKeySalt = "8-8D$sL8gPjom7bk#cY"
|
||||
)
|
||||
|
||||
func signPath(path string, os string, version string) (k string, v string) {
|
||||
table := []byte{'a', 'd', 'e', 'f', 'g', 'h', 'l', 'm', 'y', 'i', 'j', 'n', 'o', 'p', 'k', 'q', 'r', 's', 't', 'u', 'b', 'c', 'v', 'w', 's', 'z'}
|
||||
random := fmt.Sprintf("%.f", math.Round(1e7*rand.Float64()))
|
||||
now := time.Now().In(time.FixedZone("CST", 8*3600))
|
||||
timestamp := fmt.Sprint(now.Unix())
|
||||
nowStr := []byte(now.Format("200601021504"))
|
||||
for i := 0; i < len(nowStr); i++ {
|
||||
nowStr[i] = table[nowStr[i]-48]
|
||||
}
|
||||
timeSign := fmt.Sprint(crc32.ChecksumIEEE(nowStr))
|
||||
data := strings.Join([]string{timestamp, random, path, os, version, timeSign}, "|")
|
||||
dataSign := fmt.Sprint(crc32.ChecksumIEEE([]byte(data)))
|
||||
return timeSign, strings.Join([]string{timestamp, random, dataSign}, "-")
|
||||
}
|
||||
|
||||
func GetApi(rawUrl string) string {
|
||||
u, _ := url.Parse(rawUrl)
|
||||
query := u.Query()
|
||||
query.Add(signPath(u.Path, "web", "3"))
|
||||
u.RawQuery = query.Encode()
|
||||
return u.String()
|
||||
}
|
||||
|
||||
//func GetApi(url string) string {
|
||||
// vm := js.New()
|
||||
// vm.Set("url", url[22:])
|
||||
// r, err := vm.RunString(`
|
||||
// (function(e){
|
||||
// function A(t, e) {
|
||||
// e = 1 < arguments.length && void 0 !== e ? e : 10;
|
||||
// for (var n = function() {
|
||||
// for (var t = [], e = 0; e < 256; e++) {
|
||||
// for (var n = e, r = 0; r < 8; r++)
|
||||
// n = 1 & n ? 3988292384 ^ n >>> 1 : n >>> 1;
|
||||
// t[e] = n
|
||||
// }
|
||||
// return t
|
||||
// }(), r = function(t) {
|
||||
// t = t.replace(/\\r\\n/g, "\\n");
|
||||
// for (var e = "", n = 0; n < t.length; n++) {
|
||||
// var r = t.charCodeAt(n);
|
||||
// r < 128 ? e += String.fromCharCode(r) : e = 127 < r && r < 2048 ? (e += String.fromCharCode(r >> 6 | 192)) + String.fromCharCode(63 & r | 128) : (e = (e += String.fromCharCode(r >> 12 | 224)) + String.fromCharCode(r >> 6 & 63 | 128)) + String.fromCharCode(63 & r | 128)
|
||||
// }
|
||||
// return e
|
||||
// }(t), a = -1, i = 0; i < r.length; i++)
|
||||
// a = a >>> 8 ^ n[255 & (a ^ r.charCodeAt(i))];
|
||||
// return (a = (-1 ^ a) >>> 0).toString(e)
|
||||
// }
|
||||
//
|
||||
// function v(t) {
|
||||
// return (v = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function(t) {
|
||||
// return typeof t
|
||||
// }
|
||||
// : function(t) {
|
||||
// return t && "function" == typeof Symbol && t.constructor === Symbol && t !== Symbol.prototype ? "symbol" : typeof t
|
||||
// }
|
||||
// )(t)
|
||||
// }
|
||||
//
|
||||
// for (p in a = Math.round(1e7 * Math.random()),
|
||||
// o = Math.round(((new Date).getTime() + 60 * (new Date).getTimezoneOffset() * 1e3 + 288e5) / 1e3).toString(),
|
||||
// m = ["a", "d", "e", "f", "g", "h", "l", "m", "y", "i", "j", "n", "o", "p", "k", "q", "r", "s", "t", "u", "b", "c", "v", "w", "s", "z"],
|
||||
// u = function(t, e, n) {
|
||||
// var r;
|
||||
// n = 2 < arguments.length && void 0 !== n ? n : 8;
|
||||
// return 0 === arguments.length ? null : (r = "object" === v(t) ? t : (10 === "".concat(t).length && (t = 1e3 * Number.parseInt(t)),
|
||||
// new Date(t)),
|
||||
// t += 6e4 * new Date(t).getTimezoneOffset(),
|
||||
// {
|
||||
// y: (r = new Date(t + 36e5 * n)).getFullYear(),
|
||||
// m: r.getMonth() + 1 < 10 ? "0".concat(r.getMonth() + 1) : r.getMonth() + 1,
|
||||
// d: r.getDate() < 10 ? "0".concat(r.getDate()) : r.getDate(),
|
||||
// h: r.getHours() < 10 ? "0".concat(r.getHours()) : r.getHours(),
|
||||
// f: r.getMinutes() < 10 ? "0".concat(r.getMinutes()) : r.getMinutes()
|
||||
// })
|
||||
// }(o),
|
||||
// h = u.y,
|
||||
// g = u.m,
|
||||
// l = u.d,
|
||||
// c = u.h,
|
||||
// u = u.f,
|
||||
// d = [h, g, l, c, u].join(""),
|
||||
// f = [],
|
||||
// d)
|
||||
// f.push(m[Number(d[p])]);
|
||||
// return h = A(f.join("")),
|
||||
// g = A("".concat(o, "|").concat(a, "|").concat(e, "|").concat("web", "|").concat("3", "|").concat(h)),
|
||||
// "".concat(h, "=").concat(o, "-").concat(a, "-").concat(g);
|
||||
// })(url)
|
||||
// `)
|
||||
// if err != nil {
|
||||
// fmt.Println(err)
|
||||
// return url
|
||||
// }
|
||||
// v, _ := r.Export().(string)
|
||||
// return url + "?" + v
|
||||
//}
|
||||
|
||||
func (d *Pan123) login() error {
|
||||
var body base.Json
|
||||
if utils.IsEmailFormat(d.Username) {
|
||||
@ -56,9 +160,9 @@ func (d *Pan123) login() error {
|
||||
SetHeaders(map[string]string{
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"user-agent": "Dart/2.19(dart:io)",
|
||||
"platform": "android",
|
||||
"app-version": "36",
|
||||
"user-agent": "Dart/2.19(dart:io)-alist",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
//"user-agent": base.UserAgent,
|
||||
}).
|
||||
SetBody(body).Post(SignIn)
|
||||
@ -93,9 +197,9 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
"origin": "https://www.123pan.com",
|
||||
"referer": "https://www.123pan.com/",
|
||||
"authorization": "Bearer " + d.AccessToken,
|
||||
"user-agent": "Dart/2.19(dart:io)",
|
||||
"platform": "android",
|
||||
"app-version": "36",
|
||||
"user-agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) alist-client",
|
||||
"platform": "web",
|
||||
"app-version": "3",
|
||||
//"user-agent": base.UserAgent,
|
||||
})
|
||||
if callback != nil {
|
||||
@ -109,7 +213,7 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
// return nil, err
|
||||
//}
|
||||
//req.SetQueryParam("auth-key", *authKey)
|
||||
res, err := req.Execute(method, url)
|
||||
res, err := req.Execute(method, GetApi(url))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -131,17 +235,27 @@ func (d *Pan123) request(url string, method string, callback base.ReqCallback, r
|
||||
func (d *Pan123) getFiles(parentId string) ([]File, error) {
|
||||
page := 1
|
||||
res := make([]File, 0)
|
||||
// 2024-02-06 fix concurrency by 123pan
|
||||
for {
|
||||
if !d.APIRateLimit(FileList) {
|
||||
time.Sleep(time.Millisecond * 200)
|
||||
continue
|
||||
}
|
||||
var resp Files
|
||||
query := map[string]string{
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"Page": strconv.Itoa(page),
|
||||
"driveId": "0",
|
||||
"limit": "100",
|
||||
"next": "0",
|
||||
"orderBy": d.OrderBy,
|
||||
"orderDirection": d.OrderDirection,
|
||||
"parentFileId": parentId,
|
||||
"trashed": "false",
|
||||
"SearchData": "",
|
||||
"Page": strconv.Itoa(page),
|
||||
"OnlyLookAbnormalFile": "0",
|
||||
"event": "homeListFile",
|
||||
"operateType": "4",
|
||||
"inDirectSpace": "false",
|
||||
}
|
||||
_, err := d.request(FileList, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
|
77
drivers/123_link/driver.go
Normal file
77
drivers/123_link/driver.go
Normal file
@ -0,0 +1,77 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"context"
|
||||
stdpath "path"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
)
|
||||
|
||||
type Pan123Link struct {
|
||||
model.Storage
|
||||
Addition
|
||||
root *Node
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
|
||||
func (d *Pan123Link) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Init(ctx context.Context) error {
|
||||
node, err := BuildTree(d.OriginURLs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
node.calSize()
|
||||
d.root = node
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Get(ctx context.Context, path string) (model.Obj, error) {
|
||||
node := GetNodeFromRootByPath(d.root, path)
|
||||
return nodeToObj(node, path)
|
||||
}
|
||||
|
||||
func (d *Pan123Link) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
node := GetNodeFromRootByPath(d.root, dir.GetPath())
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
if node.isFile() {
|
||||
return nil, errs.NotFolder
|
||||
}
|
||||
return utils.SliceConvert(node.Children, func(node *Node) (model.Obj, error) {
|
||||
return nodeToObj(node, stdpath.Join(dir.GetPath(), node.Name))
|
||||
})
|
||||
}
|
||||
|
||||
func (d *Pan123Link) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
node := GetNodeFromRootByPath(d.root, file.GetPath())
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
if node.isFile() {
|
||||
signUrl, err := SignURL(node.Url, d.PrivateKey, d.UID, time.Duration(d.ValidDuration)*time.Minute)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{
|
||||
URL: signUrl,
|
||||
}, nil
|
||||
}
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Pan123Link)(nil)
|
23
drivers/123_link/meta.go
Normal file
23
drivers/123_link/meta.go
Normal file
@ -0,0 +1,23 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
OriginURLs string `json:"origin_urls" type:"text" required:"true" default:"https://vip.123pan.com/29/folder/file.mp3" help:"structure:FolderName:\n [FileSize:][Modified:]Url"`
|
||||
PrivateKey string `json:"private_key"`
|
||||
UID uint64 `json:"uid" type:"number"`
|
||||
ValidDuration int64 `json:"valid_duration" type:"number" default:"30" help:"minutes"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
Name: "123PanLink",
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &Pan123Link{}
|
||||
})
|
||||
}
|
152
drivers/123_link/parse.go
Normal file
152
drivers/123_link/parse.go
Normal file
@ -0,0 +1,152 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
url2 "net/url"
|
||||
stdpath "path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// build tree from text, text structure definition:
|
||||
/**
|
||||
* FolderName:
|
||||
* [FileSize:][Modified:]Url
|
||||
*/
|
||||
/**
|
||||
* For example:
|
||||
* folder1:
|
||||
* name1:url1
|
||||
* url2
|
||||
* folder2:
|
||||
* url3
|
||||
* url4
|
||||
* url5
|
||||
* folder3:
|
||||
* url6
|
||||
* url7
|
||||
* url8
|
||||
*/
|
||||
// if there are no name, use the last segment of url as name
|
||||
func BuildTree(text string) (*Node, error) {
|
||||
lines := strings.Split(text, "\n")
|
||||
var root = &Node{Level: -1, Name: "root"}
|
||||
stack := []*Node{root}
|
||||
for _, line := range lines {
|
||||
// calculate indent
|
||||
indent := 0
|
||||
for i := 0; i < len(line); i++ {
|
||||
if line[i] != ' ' {
|
||||
break
|
||||
}
|
||||
indent++
|
||||
}
|
||||
// if indent is not a multiple of 2, it is an error
|
||||
if indent%2 != 0 {
|
||||
return nil, fmt.Errorf("the line '%s' is not a multiple of 2", line)
|
||||
}
|
||||
// calculate level
|
||||
level := indent / 2
|
||||
line = strings.TrimSpace(line[indent:])
|
||||
// if the line is empty, skip
|
||||
if line == "" {
|
||||
continue
|
||||
}
|
||||
// if level isn't greater than the level of the top of the stack
|
||||
// it is not the child of the top of the stack
|
||||
for level <= stack[len(stack)-1].Level {
|
||||
// pop the top of the stack
|
||||
stack = stack[:len(stack)-1]
|
||||
}
|
||||
// if the line is a folder
|
||||
if isFolder(line) {
|
||||
// create a new node
|
||||
node := &Node{
|
||||
Level: level,
|
||||
Name: strings.TrimSuffix(line, ":"),
|
||||
}
|
||||
// add the node to the top of the stack
|
||||
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
|
||||
// push the node to the stack
|
||||
stack = append(stack, node)
|
||||
} else {
|
||||
// if the line is a file
|
||||
// create a new node
|
||||
node, err := parseFileLine(line)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
node.Level = level
|
||||
// add the node to the top of the stack
|
||||
stack[len(stack)-1].Children = append(stack[len(stack)-1].Children, node)
|
||||
}
|
||||
}
|
||||
return root, nil
|
||||
}
|
||||
|
||||
func isFolder(line string) bool {
|
||||
return strings.HasSuffix(line, ":")
|
||||
}
|
||||
|
||||
// line definition:
|
||||
// [FileSize:][Modified:]Url
|
||||
func parseFileLine(line string) (*Node, error) {
|
||||
// if there is no url, it is an error
|
||||
if !strings.Contains(line, "http://") && !strings.Contains(line, "https://") {
|
||||
return nil, fmt.Errorf("invalid line: %s, because url is required for file", line)
|
||||
}
|
||||
index := strings.Index(line, "http://")
|
||||
if index == -1 {
|
||||
index = strings.Index(line, "https://")
|
||||
}
|
||||
url := line[index:]
|
||||
info := line[:index]
|
||||
node := &Node{
|
||||
Url: url,
|
||||
}
|
||||
name := stdpath.Base(url)
|
||||
unescape, err := url2.PathUnescape(name)
|
||||
if err == nil {
|
||||
name = unescape
|
||||
}
|
||||
node.Name = name
|
||||
if index > 0 {
|
||||
if !strings.HasSuffix(info, ":") {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file info must end with ':'", line)
|
||||
}
|
||||
info = info[:len(info)-1]
|
||||
if info == "" {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file name can't be empty", line)
|
||||
}
|
||||
infoParts := strings.Split(info, ":")
|
||||
size, err := strconv.ParseInt(infoParts[0], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file size must be an integer", line)
|
||||
}
|
||||
node.Size = size
|
||||
if len(infoParts) > 1 {
|
||||
modified, err := strconv.ParseInt(infoParts[1], 10, 64)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("invalid line: %s, because file modified must be an unix timestamp", line)
|
||||
}
|
||||
node.Modified = modified
|
||||
} else {
|
||||
node.Modified = time.Now().Unix()
|
||||
}
|
||||
}
|
||||
return node, nil
|
||||
}
|
||||
|
||||
func splitPath(path string) []string {
|
||||
if path == "/" {
|
||||
return []string{"root"}
|
||||
}
|
||||
parts := strings.Split(path, "/")
|
||||
parts[0] = "root"
|
||||
return parts
|
||||
}
|
||||
|
||||
func GetNodeFromRootByPath(root *Node, path string) *Node {
|
||||
return root.getByPath(splitPath(path))
|
||||
}
|
66
drivers/123_link/types.go
Normal file
66
drivers/123_link/types.go
Normal file
@ -0,0 +1,66 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
// Node is a node in the folder tree
|
||||
type Node struct {
|
||||
Url string
|
||||
Name string
|
||||
Level int
|
||||
Modified int64
|
||||
Size int64
|
||||
Children []*Node
|
||||
}
|
||||
|
||||
func (node *Node) getByPath(paths []string) *Node {
|
||||
if len(paths) == 0 || node == nil {
|
||||
return nil
|
||||
}
|
||||
if node.Name != paths[0] {
|
||||
return nil
|
||||
}
|
||||
if len(paths) == 1 {
|
||||
return node
|
||||
}
|
||||
for _, child := range node.Children {
|
||||
tmp := child.getByPath(paths[1:])
|
||||
if tmp != nil {
|
||||
return tmp
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (node *Node) isFile() bool {
|
||||
return node.Url != ""
|
||||
}
|
||||
|
||||
func (node *Node) calSize() int64 {
|
||||
if node.isFile() {
|
||||
return node.Size
|
||||
}
|
||||
var size int64 = 0
|
||||
for _, child := range node.Children {
|
||||
size += child.calSize()
|
||||
}
|
||||
node.Size = size
|
||||
return size
|
||||
}
|
||||
|
||||
func nodeToObj(node *Node, path string) (model.Obj, error) {
|
||||
if node == nil {
|
||||
return nil, errs.ObjectNotFound
|
||||
}
|
||||
return &model.Object{
|
||||
Name: node.Name,
|
||||
Size: node.Size,
|
||||
Modified: time.Unix(node.Modified, 0),
|
||||
IsFolder: !node.isFile(),
|
||||
Path: path,
|
||||
}, nil
|
||||
}
|
30
drivers/123_link/util.go
Normal file
30
drivers/123_link/util.go
Normal file
@ -0,0 +1,30 @@
|
||||
package _123Link
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"fmt"
|
||||
"math/rand"
|
||||
"net/url"
|
||||
"time"
|
||||
)
|
||||
|
||||
func SignURL(originURL, privateKey string, uid uint64, validDuration time.Duration) (newURL string, err error) {
|
||||
if privateKey == "" {
|
||||
return originURL, nil
|
||||
}
|
||||
var (
|
||||
ts = time.Now().Add(validDuration).Unix() // 有效时间戳
|
||||
rInt = rand.Int() // 随机正整数
|
||||
objURL *url.URL
|
||||
)
|
||||
objURL, err = url.Parse(originURL)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
authKey := fmt.Sprintf("%d-%d-%d-%x", ts, rInt, uid, md5.Sum([]byte(fmt.Sprintf("%s-%d-%d-%d-%s",
|
||||
objURL.Path, ts, rInt, uid, privateKey))))
|
||||
v := objURL.Query()
|
||||
v.Add("auth_key", authKey)
|
||||
objURL.RawQuery = v.Encode()
|
||||
return objURL.String(), nil
|
||||
}
|
@ -35,25 +35,40 @@ func (d *Yun139) Init(ctx context.Context) error {
|
||||
if d.Authorization == "" {
|
||||
return fmt.Errorf("authorization is empty")
|
||||
}
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 2 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||
}
|
||||
d.Account = splits[1]
|
||||
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||
"qryUserExternInfoReq": base.Json{
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "/"
|
||||
}
|
||||
return nil
|
||||
case MetaPersonal:
|
||||
if len(d.Addition.RootFolderID) == 0 {
|
||||
d.RootFolderID = "root"
|
||||
}
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
decode, err := base64.StdEncoding.DecodeString(d.Authorization)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
decodeStr := string(decode)
|
||||
splits := strings.Split(decodeStr, ":")
|
||||
if len(splits) < 2 {
|
||||
return fmt.Errorf("authorization is invalid, splits < 2")
|
||||
}
|
||||
d.Account = splits[1]
|
||||
_, err = d.post("/orchestration/personalCloud/user/v1.0/qryUserExternInfo", base.Json{
|
||||
"qryUserExternInfoReq": base.Json{
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
}, nil)
|
||||
return err
|
||||
}, nil)
|
||||
return err
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Drop(ctx context.Context) error {
|
||||
@ -61,35 +76,65 @@ func (d *Yun139) Drop(ctx context.Context) error {
|
||||
}
|
||||
|
||||
func (d *Yun139) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
if d.isFamily() {
|
||||
return d.familyGetFiles(dir.GetID())
|
||||
} else {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
return d.personalGetFiles(dir.GetID())
|
||||
case MetaPersonal:
|
||||
return d.getFiles(dir.GetID())
|
||||
case MetaFamily:
|
||||
return d.familyGetFiles(dir.GetID())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
u, err := d.getLink(file.GetID())
|
||||
var url string
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
url, err = d.personalGetLink(file.GetID())
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
url, err = d.getLink(file.GetID())
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Link{URL: u}, nil
|
||||
return &model.Link{URL: url}, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
data := base.Json{
|
||||
"createCatalogExtReq": base.Json{
|
||||
"parentCatalogID": parentDir.GetID(),
|
||||
"newCatalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"parentFileId": parentDir.GetID(),
|
||||
"name": dirName,
|
||||
"description": "",
|
||||
"type": "folder",
|
||||
"fileRenameMode": "force_rename",
|
||||
}
|
||||
pathname := "/hcy/file/create"
|
||||
_, err = d.personalPost(pathname, data, nil)
|
||||
case MetaPersonal:
|
||||
data := base.Json{
|
||||
"createCatalogExtReq": base.Json{
|
||||
"parentCatalogID": parentDir.GetID(),
|
||||
"newCatalogName": dirName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
|
||||
if d.isFamily() {
|
||||
data = base.Json{
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/catalog/v1.0/createCatalogExt"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
case MetaFamily:
|
||||
data := base.Json{
|
||||
"cloudID": d.CloudID,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
@ -97,144 +142,198 @@ func (d *Yun139) MakeDir(ctx context.Context, parentDir model.Obj, dirName strin
|
||||
},
|
||||
"docLibName": dirName,
|
||||
}
|
||||
pathname = "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||
pathname := "/orchestration/familyCloud/cloudCatalog/v1.0/createCloudDoc"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if d.isFamily() {
|
||||
return errs.NotImplement
|
||||
}
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": "304",
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
func (d *Yun139) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{srcObj.GetID()},
|
||||
"toParentFileId": dstDir.GetID(),
|
||||
}
|
||||
pathname := "/hcy/file/batchMove"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
case MetaPersonal:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": "304",
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
if d.isFamily() {
|
||||
return errs.NotImplement
|
||||
}
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
data = base.Json{
|
||||
"catalogID": srcObj.GetID(),
|
||||
"catalogName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileId": srcObj.GetID(),
|
||||
"name": newName,
|
||||
"description": "",
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
|
||||
} else {
|
||||
data = base.Json{
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
pathname := "/hcy/file/update"
|
||||
_, err = d.personalPost(pathname, data, nil)
|
||||
case MetaPersonal:
|
||||
var data base.Json
|
||||
var pathname string
|
||||
if srcObj.IsDir() {
|
||||
data = base.Json{
|
||||
"catalogID": srcObj.GetID(),
|
||||
"catalogName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/catalog/v1.0/updateCatalogInfo"
|
||||
} else {
|
||||
data = base.Json{
|
||||
"contentID": srcObj.GetID(),
|
||||
"contentName": newName,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||
}
|
||||
pathname = "/orchestration/personalCloud/content/v1.0/updateContentInfo"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
if d.isFamily() {
|
||||
return errs.NotImplement
|
||||
}
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": 309,
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
var err error
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{srcObj.GetID()},
|
||||
"toParentFileId": dstDir.GetID(),
|
||||
}
|
||||
pathname := "/hcy/file/batchCopy"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
return err
|
||||
case MetaPersonal:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if srcObj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, srcObj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, srcObj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 3,
|
||||
"actionType": 309,
|
||||
"taskInfo": base.Json{
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
"newCatalogID": dstDir.GetID(),
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err = d.post(pathname, data, nil)
|
||||
default:
|
||||
err = errs.NotImplement
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Yun139) Remove(ctx context.Context, obj model.Obj) error {
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if obj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, obj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, obj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 2,
|
||||
"actionType": 201,
|
||||
"taskInfo": base.Json{
|
||||
"newCatalogID": "",
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
if d.isFamily() {
|
||||
data = base.Json{
|
||||
"catalogList": catalogInfoList,
|
||||
"contentList": contentInfoList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
"sourceCatalogType": 1002,
|
||||
"taskType": 2,
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
data := base.Json{
|
||||
"fileIds": []string{obj.GetID()},
|
||||
}
|
||||
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
pathname := "/hcy/recyclebin/batchTrash"
|
||||
_, err := d.personalPost(pathname, data, nil)
|
||||
return err
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
var contentInfoList []string
|
||||
var catalogInfoList []string
|
||||
if obj.IsDir() {
|
||||
catalogInfoList = append(catalogInfoList, obj.GetID())
|
||||
} else {
|
||||
contentInfoList = append(contentInfoList, obj.GetID())
|
||||
}
|
||||
data := base.Json{
|
||||
"createBatchOprTaskReq": base.Json{
|
||||
"taskType": 2,
|
||||
"actionType": 201,
|
||||
"taskInfo": base.Json{
|
||||
"newCatalogID": "",
|
||||
"contentInfoList": contentInfoList,
|
||||
"catalogInfoList": catalogInfoList,
|
||||
},
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
if d.isFamily() {
|
||||
data = base.Json{
|
||||
"catalogList": catalogInfoList,
|
||||
"contentList": contentInfoList,
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
"sourceCatalogType": 1002,
|
||||
"taskType": 2,
|
||||
}
|
||||
pathname = "/orchestration/familyCloud/batchOprTask/v1.0/createBatchOprTask"
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
_, err := d.post(pathname, data, nil)
|
||||
return err
|
||||
}
|
||||
|
||||
const (
|
||||
@ -254,94 +353,208 @@ func getPartSize(size int64) int64 {
|
||||
}
|
||||
|
||||
func (d *Yun139) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
data := base.Json{
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"fileCount": 1,
|
||||
"totalSize": 0, // 去除上传大小限制
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": 0, // 去除上传大小限制
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
"parentCatalogID": dstDir.GetID(),
|
||||
"newCatalogName": "",
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||
if d.isFamily() {
|
||||
data = d.newJson(base.Json{
|
||||
"fileCount": 1,
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"path": "",
|
||||
"seqNo": "",
|
||||
"totalSize": 0,
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": 0,
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
var err error
|
||||
fullHash := stream.GetHash().GetHash(utils.SHA256)
|
||||
if len(fullHash) <= 0 {
|
||||
tmpF, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fullHash, err = utils.HashFile(utils.SHA256, tmpF)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
// return errs.NotImplement
|
||||
data := base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"contentType": "application/octet-stream",
|
||||
"parallelUpload": false,
|
||||
"partInfos": []base.Json{{
|
||||
"parallelHashCtx": base.Json{
|
||||
"partOffset": 0,
|
||||
},
|
||||
"partNumber": 1,
|
||||
"partSize": stream.GetSize(),
|
||||
}},
|
||||
})
|
||||
pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
||||
return errs.NotImplement
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err := d.post(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
var partSize = getPartSize(stream.GetSize())
|
||||
part := (stream.GetSize() + partSize - 1) / partSize
|
||||
if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
"size": stream.GetSize(),
|
||||
"parentFileId": dstDir.GetID(),
|
||||
"name": stream.GetName(),
|
||||
"type": "file",
|
||||
"fileRenameMode": "auto_rename",
|
||||
}
|
||||
|
||||
start := i * partSize
|
||||
byteSize := stream.GetSize() - start
|
||||
if byteSize > partSize {
|
||||
byteSize = partSize
|
||||
}
|
||||
|
||||
limitReader := io.LimitReader(stream, byteSize)
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
||||
pathname := "/hcy/file/create"
|
||||
var resp PersonalUploadResp
|
||||
_, err = d.personalPost(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if resp.Data.Exist || resp.Data.RapidUpload {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
// Update Progress
|
||||
r := io.TeeReader(stream, p)
|
||||
|
||||
req, err := http.NewRequest("PUT", resp.Data.PartInfos[0].UploadUrl, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
||||
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
|
||||
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
||||
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
||||
req.Header.Set("rangeType", "0")
|
||||
req.ContentLength = byteSize
|
||||
req.Header.Set("Content-Type", "application/octet-stream")
|
||||
req.Header.Set("Content-Length", fmt.Sprint(stream.GetSize()))
|
||||
req.Header.Set("Origin", "https://yun.139.com")
|
||||
req.Header.Set("Referer", "https://yun.139.com/")
|
||||
req.ContentLength = stream.GetSize()
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("%+v", res)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
data = base.Json{
|
||||
"contentHash": fullHash,
|
||||
"contentHashAlgorithm": "SHA256",
|
||||
"fileId": resp.Data.FileId,
|
||||
"uploadId": resp.Data.UploadId,
|
||||
}
|
||||
_, err = d.personalPost("/hcy/file/complete", data, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
case MetaPersonal:
|
||||
fallthrough
|
||||
case MetaFamily:
|
||||
data := base.Json{
|
||||
"manualRename": 2,
|
||||
"operation": 0,
|
||||
"fileCount": 1,
|
||||
"totalSize": 0, // 去除上传大小限制
|
||||
"uploadContentList": []base.Json{{
|
||||
"contentName": stream.GetName(),
|
||||
"contentSize": 0, // 去除上传大小限制
|
||||
// "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
}},
|
||||
"parentCatalogID": dstDir.GetID(),
|
||||
"newCatalogName": "",
|
||||
"commonAccountInfo": base.Json{
|
||||
"account": d.Account,
|
||||
"accountType": 1,
|
||||
},
|
||||
}
|
||||
pathname := "/orchestration/personalCloud/uploadAndDownload/v1.0/pcUploadFileRequest"
|
||||
if d.isFamily() {
|
||||
// data = d.newJson(base.Json{
|
||||
// "fileCount": 1,
|
||||
// "manualRename": 2,
|
||||
// "operation": 0,
|
||||
// "path": "",
|
||||
// "seqNo": "",
|
||||
// "totalSize": 0,
|
||||
// "uploadContentList": []base.Json{{
|
||||
// "contentName": stream.GetName(),
|
||||
// "contentSize": 0,
|
||||
// // "digest": "5a3231986ce7a6b46e408612d385bafa"
|
||||
// }},
|
||||
// })
|
||||
// pathname = "/orchestration/familyCloud/content/v1.0/getFileUploadURL"
|
||||
return errs.NotImplement
|
||||
}
|
||||
var resp UploadResp
|
||||
_, err := d.post(pathname, data, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Progress
|
||||
p := driver.NewProgress(stream.GetSize(), up)
|
||||
|
||||
var partSize = getPartSize(stream.GetSize())
|
||||
part := (stream.GetSize() + partSize - 1) / partSize
|
||||
if part == 0 {
|
||||
part = 1
|
||||
}
|
||||
for i := int64(0); i < part; i++ {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return ctx.Err()
|
||||
}
|
||||
|
||||
start := i * partSize
|
||||
byteSize := stream.GetSize() - start
|
||||
if byteSize > partSize {
|
||||
byteSize = partSize
|
||||
}
|
||||
|
||||
limitReader := io.LimitReader(stream, byteSize)
|
||||
// Update Progress
|
||||
r := io.TeeReader(limitReader, p)
|
||||
req, err := http.NewRequest("POST", resp.Data.UploadResult.RedirectionURL, r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
req = req.WithContext(ctx)
|
||||
req.Header.Set("Content-Type", "text/plain;name="+unicode(stream.GetName()))
|
||||
req.Header.Set("contentSize", strconv.FormatInt(stream.GetSize(), 10))
|
||||
req.Header.Set("range", fmt.Sprintf("bytes=%d-%d", start, start+byteSize-1))
|
||||
req.Header.Set("uploadtaskID", resp.Data.UploadResult.UploadTaskID)
|
||||
req.Header.Set("rangeType", "0")
|
||||
req.ContentLength = byteSize
|
||||
|
||||
res, err := base.HttpClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_ = res.Body.Close()
|
||||
log.Debugf("%+v", res)
|
||||
if res.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("unexpected status code: %d", res.StatusCode)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
default:
|
||||
return errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
func (d *Yun139) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
switch d.Addition.Type {
|
||||
case MetaPersonalNew:
|
||||
var resp base.Json
|
||||
var uri string
|
||||
data := base.Json{
|
||||
"category": "video",
|
||||
"fileId": args.Obj.GetID(),
|
||||
}
|
||||
switch args.Method {
|
||||
case "video_preview":
|
||||
uri = "/hcy/videoPreview/getPreviewInfo"
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
}
|
||||
_, err := d.personalPost(uri, data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return resp["data"], nil
|
||||
default:
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*Yun139)(nil)
|
||||
|
@ -9,7 +9,7 @@ type Addition struct {
|
||||
//Account string `json:"account" required:"true"`
|
||||
Authorization string `json:"authorization" type:"text" required:"true"`
|
||||
driver.RootID
|
||||
Type string `json:"type" type:"select" options:"personal,family" default:"personal"`
|
||||
Type string `json:"type" type:"select" options:"personal,family,personal_new" default:"personal"`
|
||||
CloudID string `json:"cloud_id"`
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,11 @@
|
||||
package _139
|
||||
|
||||
const (
|
||||
MetaPersonal string = "personal"
|
||||
MetaFamily string = "family"
|
||||
MetaPersonalNew string = "personal_new"
|
||||
)
|
||||
|
||||
type BaseResp struct {
|
||||
Success bool `json:"success"`
|
||||
Code string `json:"code"`
|
||||
@ -10,7 +16,7 @@ type Catalog struct {
|
||||
CatalogID string `json:"catalogID"`
|
||||
CatalogName string `json:"catalogName"`
|
||||
//CatalogType int `json:"catalogType"`
|
||||
//CreateTime string `json:"createTime"`
|
||||
CreateTime string `json:"createTime"`
|
||||
UpdateTime string `json:"updateTime"`
|
||||
//IsShared bool `json:"isShared"`
|
||||
//CatalogLevel int `json:"catalogLevel"`
|
||||
@ -63,7 +69,7 @@ type Content struct {
|
||||
//ParentCatalogID string `json:"parentCatalogId"`
|
||||
//Channel string `json:"channel"`
|
||||
//GeoLocFlag string `json:"geoLocFlag"`
|
||||
//Digest string `json:"digest"`
|
||||
Digest string `json:"digest"`
|
||||
//Version string `json:"version"`
|
||||
//FileEtag string `json:"fileEtag"`
|
||||
//FileVersion string `json:"fileVersion"`
|
||||
@ -141,7 +147,7 @@ type CloudContent struct {
|
||||
//ContentSuffix string `json:"contentSuffix"`
|
||||
ContentSize int64 `json:"contentSize"`
|
||||
//ContentDesc string `json:"contentDesc"`
|
||||
//CreateTime string `json:"createTime"`
|
||||
CreateTime string `json:"createTime"`
|
||||
//Shottime interface{} `json:"shottime"`
|
||||
LastUpdateTime string `json:"lastUpdateTime"`
|
||||
ThumbnailURL string `json:"thumbnailURL"`
|
||||
@ -165,7 +171,7 @@ type CloudCatalog struct {
|
||||
CatalogID string `json:"catalogID"`
|
||||
CatalogName string `json:"catalogName"`
|
||||
//CloudID string `json:"cloudID"`
|
||||
//CreateTime string `json:"createTime"`
|
||||
CreateTime string `json:"createTime"`
|
||||
LastUpdateTime string `json:"lastUpdateTime"`
|
||||
//Creator string `json:"creator"`
|
||||
//CreatorNickname string `json:"creatorNickname"`
|
||||
@ -185,3 +191,42 @@ type QueryContentListResp struct {
|
||||
RecallContent interface{} `json:"recallContent"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type PersonalThumbnail struct {
|
||||
Style string `json:"style"`
|
||||
Url string `json:"url"`
|
||||
}
|
||||
|
||||
type PersonalFileItem struct {
|
||||
FileId string `json:"fileId"`
|
||||
Name string `json:"name"`
|
||||
Size int64 `json:"size"`
|
||||
Type string `json:"type"`
|
||||
CreatedAt string `json:"createdAt"`
|
||||
UpdatedAt string `json:"updatedAt"`
|
||||
Thumbnails []PersonalThumbnail `json:"thumbnailUrls"`
|
||||
}
|
||||
|
||||
type PersonalListResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
Items []PersonalFileItem `json:"items"`
|
||||
NextPageCursor string `json:"nextPageCursor"`
|
||||
}
|
||||
}
|
||||
|
||||
type PersonalPartInfo struct {
|
||||
PartNumber int `json:"partNumber"`
|
||||
UploadUrl string `json:"uploadUrl"`
|
||||
}
|
||||
|
||||
type PersonalUploadResp struct {
|
||||
BaseResp
|
||||
Data struct {
|
||||
FileId string `json:"fileId"`
|
||||
PartInfos []PersonalPartInfo `json:"partInfos"`
|
||||
Exist bool `json:"exist"`
|
||||
RapidUpload bool `json:"rapidUpload"`
|
||||
UploadId string `json:"uploadId"`
|
||||
}
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func calSign(body, ts, randStr string) string {
|
||||
}
|
||||
|
||||
func getTime(t string) time.Time {
|
||||
stamp, _ := time.ParseInLocation("20060102150405", t, time.Local)
|
||||
stamp, _ := time.ParseInLocation("20060102150405", t, utils.CNLoc)
|
||||
return stamp
|
||||
}
|
||||
|
||||
@ -139,6 +139,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
|
||||
Name: catalog.CatalogName,
|
||||
Size: 0,
|
||||
Modified: getTime(catalog.UpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
IsFolder: true,
|
||||
}
|
||||
files = append(files, &f)
|
||||
@ -150,6 +151,7 @@ func (d *Yun139) getFiles(catalogID string) ([]model.Obj, error) {
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.UpdateTime),
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, content.Digest),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
@ -202,6 +204,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
Size: 0,
|
||||
IsFolder: true,
|
||||
Modified: getTime(catalog.LastUpdateTime),
|
||||
Ctime: getTime(catalog.CreateTime),
|
||||
}
|
||||
files = append(files, &f)
|
||||
}
|
||||
@ -212,6 +215,7 @@ func (d *Yun139) familyGetFiles(catalogID string) ([]model.Obj, error) {
|
||||
Name: content.ContentName,
|
||||
Size: content.ContentSize,
|
||||
Modified: getTime(content.LastUpdateTime),
|
||||
Ctime: getTime(content.CreateTime),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: content.ThumbnailURL},
|
||||
//Thumbnail: content.BigthumbnailURL,
|
||||
@ -248,3 +252,154 @@ func unicode(str string) string {
|
||||
textUnquoted := textQuoted[1 : len(textQuoted)-1]
|
||||
return textUnquoted
|
||||
}
|
||||
|
||||
func (d *Yun139) personalRequest(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
url := "https://personal-kd-njs.yun.139.com" + pathname
|
||||
req := base.RestyClient.R()
|
||||
randStr := random.String(16)
|
||||
ts := time.Now().Format("2006-01-02 15:04:05")
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
body, err := utils.Json.Marshal(req.Body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
sign := calSign(string(body), ts, randStr)
|
||||
svcType := "1"
|
||||
if d.isFamily() {
|
||||
svcType = "2"
|
||||
}
|
||||
req.SetHeaders(map[string]string{
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Authorization": "Basic " + d.Authorization,
|
||||
"Caller": "web",
|
||||
"Cms-Device": "default",
|
||||
"Mcloud-Channel": "1000101",
|
||||
"Mcloud-Client": "10701",
|
||||
"Mcloud-Route": "001",
|
||||
"Mcloud-Sign": fmt.Sprintf("%s,%s,%s", ts, randStr, sign),
|
||||
"Mcloud-Version": "7.13.0",
|
||||
"Origin": "https://yun.139.com",
|
||||
"Referer": "https://yun.139.com/w/",
|
||||
"x-DeviceInfo": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||",
|
||||
"x-huawei-channelSrc": "10000034",
|
||||
"x-inner-ntwk": "2",
|
||||
"x-m4c-caller": "PC",
|
||||
"x-m4c-src": "10002",
|
||||
"x-SvcType": svcType,
|
||||
"X-Yun-Api-Version": "v1",
|
||||
"X-Yun-App-Channel": "10000034",
|
||||
"X-Yun-Channel-Source": "10000034",
|
||||
"X-Yun-Client-Info": "||9|7.13.0|chrome|120.0.0.0|||windows 10||zh-CN|||dW5kZWZpbmVk||",
|
||||
"X-Yun-Module-Type": "100",
|
||||
"X-Yun-Svc-Type": "1",
|
||||
})
|
||||
|
||||
var e BaseResp
|
||||
req.SetResult(&e)
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugln(res.String())
|
||||
if !e.Success {
|
||||
return nil, errors.New(e.Message)
|
||||
}
|
||||
if resp != nil {
|
||||
err = utils.Json.Unmarshal(res.Body(), resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
func (d *Yun139) personalPost(pathname string, data interface{}, resp interface{}) ([]byte, error) {
|
||||
return d.personalRequest(pathname, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, resp)
|
||||
}
|
||||
|
||||
func getPersonalTime(t string) time.Time {
|
||||
stamp, err := time.ParseInLocation("2006-01-02T15:04:05.999-07:00", t, utils.CNLoc)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return stamp
|
||||
}
|
||||
|
||||
func (d *Yun139) personalGetFiles(fileId string) ([]model.Obj, error) {
|
||||
files := make([]model.Obj, 0)
|
||||
nextPageCursor := ""
|
||||
for {
|
||||
data := base.Json{
|
||||
"imageThumbnailStyleList": []string{"Small", "Large"},
|
||||
"orderBy": "updated_at",
|
||||
"orderDirection": "DESC",
|
||||
"pageInfo": base.Json{
|
||||
"pageCursor": nextPageCursor,
|
||||
"pageSize": 100,
|
||||
},
|
||||
"parentFileId": fileId,
|
||||
}
|
||||
var resp PersonalListResp
|
||||
_, err := d.personalPost("/hcy/file/list", data, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nextPageCursor = resp.Data.NextPageCursor
|
||||
for _, item := range resp.Data.Items {
|
||||
var isFolder = (item.Type == "folder")
|
||||
var f model.Obj
|
||||
if isFolder {
|
||||
f = &model.Object{
|
||||
ID: item.FileId,
|
||||
Name: item.Name,
|
||||
Size: 0,
|
||||
Modified: getPersonalTime(item.UpdatedAt),
|
||||
Ctime: getPersonalTime(item.CreatedAt),
|
||||
IsFolder: isFolder,
|
||||
}
|
||||
} else {
|
||||
var Thumbnails = item.Thumbnails
|
||||
var ThumbnailUrl string
|
||||
if len(Thumbnails) > 0 {
|
||||
ThumbnailUrl = Thumbnails[len(Thumbnails)-1].Url
|
||||
}
|
||||
f = &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: item.FileId,
|
||||
Name: item.Name,
|
||||
Size: item.Size,
|
||||
Modified: getPersonalTime(item.UpdatedAt),
|
||||
Ctime: getPersonalTime(item.CreatedAt),
|
||||
IsFolder: isFolder,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: ThumbnailUrl},
|
||||
}
|
||||
}
|
||||
files = append(files, f)
|
||||
}
|
||||
if len(nextPageCursor) == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (d *Yun139) personalGetLink(fileId string) (string, error) {
|
||||
data := base.Json{
|
||||
"fileId": fileId,
|
||||
}
|
||||
res, err := d.personalPost("/hcy/file/getDownloadUrl",
|
||||
data, nil)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var cdnUrl = jsoniter.Get(res, "data", "cdnUrl").ToString()
|
||||
if cdnUrl != "" {
|
||||
return cdnUrl, nil
|
||||
} else {
|
||||
return jsoniter.Get(res, "data", "url").ToString(), nil
|
||||
}
|
||||
}
|
||||
|
@ -380,7 +380,7 @@ func (d *Cloud189) newUpload(ctx context.Context, dstDir model.Obj, file model.F
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(int(i * 100 / count))
|
||||
up(float64(i) * 100 / float64(count))
|
||||
}
|
||||
fileMd5 := hex.EncodeToString(md5Sum.Sum(nil))
|
||||
sliceMd5 := fileMd5
|
||||
|
@ -27,10 +27,15 @@ type Cloud189PC struct {
|
||||
tokenInfo *AppSessionResp
|
||||
|
||||
uploadThread int
|
||||
|
||||
storageConfig driver.Config
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Config() driver.Config {
|
||||
return config
|
||||
if y.storageConfig.Name == "" {
|
||||
y.storageConfig = config
|
||||
}
|
||||
return y.storageConfig
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) GetAddition() driver.Additional {
|
||||
@ -38,6 +43,9 @@ func (y *Cloud189PC) GetAddition() driver.Additional {
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Init(ctx context.Context) (err error) {
|
||||
// 兼容旧上传接口
|
||||
y.storageConfig.NoOverwriteUpload = y.isFamily() && (y.Addition.RapidUpload || y.Addition.UploadMethod == "old")
|
||||
|
||||
// 处理个人云和家庭云参数
|
||||
if y.isFamily() && y.RootFolderID == "-11" {
|
||||
y.RootFolderID = ""
|
||||
@ -118,10 +126,11 @@ func (y *Cloud189PC) Link(ctx context.Context, file model.Obj, args model.LinkAr
|
||||
|
||||
// 重定向获取真实链接
|
||||
downloadUrl.URL = strings.Replace(strings.ReplaceAll(downloadUrl.URL, "&", "&"), "http://", "https://", 1)
|
||||
res, err := base.NoRedirectClient.R().SetContext(ctx).Get(downloadUrl.URL)
|
||||
res, err := base.NoRedirectClient.R().SetContext(ctx).SetDoNotParseResponse(true).Get(downloadUrl.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer res.RawBody().Close()
|
||||
if res.StatusCode() == 302 {
|
||||
downloadUrl.URL = res.Header().Get("location")
|
||||
}
|
||||
@ -302,6 +311,13 @@ func (y *Cloud189PC) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// 响应时间长,按需启用
|
||||
if y.Addition.RapidUpload {
|
||||
if newObj, err := y.RapidUpload(ctx, dstDir, stream); err == nil {
|
||||
return newObj, nil
|
||||
}
|
||||
}
|
||||
|
||||
switch y.UploadMethod {
|
||||
case "old":
|
||||
return y.OldUpload(ctx, dstDir, stream, up)
|
||||
|
@ -16,6 +16,7 @@ type Addition struct {
|
||||
FamilyID string `json:"family_id"`
|
||||
UploadMethod string `json:"upload_method" type:"select" options:"stream,rapid,old" default:"stream"`
|
||||
UploadThread string `json:"upload_thread" default:"3" help:"1<=thread<=32"`
|
||||
RapidUpload bool `json:"rapid_upload"`
|
||||
NoUseOcr bool `json:"no_use_ocr"`
|
||||
}
|
||||
|
||||
|
@ -513,7 +513,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(int(threadG.Success()) * 100 / count)
|
||||
up(float64(threadG.Success()) * 100 / float64(count))
|
||||
return nil
|
||||
})
|
||||
}
|
||||
@ -546,16 +546,30 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
||||
return resp.toFile(), nil
|
||||
}
|
||||
|
||||
func (y *Cloud189PC) RapidUpload(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
|
||||
fileMd5 := stream.GetHash().GetHash(utils.MD5)
|
||||
if len(fileMd5) < utils.MD5.Width {
|
||||
return nil, errors.New("invalid hash")
|
||||
}
|
||||
|
||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, stream.GetName(), fmt.Sprint(stream.GetSize()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if uploadInfo.FileDataExists != 1 {
|
||||
return nil, errors.New("rapid upload fail")
|
||||
}
|
||||
|
||||
return y.OldUploadCommit(ctx, uploadInfo.FileCommitUrl, uploadInfo.UploadFileId)
|
||||
}
|
||||
|
||||
// 快传
|
||||
func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// 需要获取完整文件md5,必须支持 io.Seek
|
||||
tempFile, err := file.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
|
||||
var sliceSize = partSize(file.GetSize())
|
||||
count := int(math.Ceil(float64(file.GetSize()) / float64(sliceSize)))
|
||||
@ -662,7 +676,7 @@ func (y *Cloud189PC) FastUpload(ctx context.Context, dstDir model.Obj, file mode
|
||||
return err
|
||||
}
|
||||
|
||||
up(int(threadG.Success()) * 100 / len(uploadUrls))
|
||||
up(float64(threadG.Success()) * 100 / float64(len(uploadUrls)))
|
||||
uploadProgress.UploadParts[i] = ""
|
||||
return nil
|
||||
})
|
||||
@ -739,68 +753,24 @@ func (y *Cloud189PC) GetMultiUploadUrls(ctx context.Context, uploadFileId string
|
||||
|
||||
// 旧版本上传,家庭云不支持覆盖
|
||||
func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// 需要获取完整文件md5,必须支持 io.Seek
|
||||
tempFile, err := file.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
|
||||
// 计算md5
|
||||
fileMd5 := md5.New()
|
||||
if _, err := io.Copy(fileMd5, tempFile); err != nil {
|
||||
fileMd5, err := utils.HashFile(utils.MD5, tempFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if _, err = tempFile.Seek(0, io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fileMd5Hex := strings.ToUpper(hex.EncodeToString(fileMd5.Sum(nil)))
|
||||
|
||||
// 创建上传会话
|
||||
var uploadInfo CreateUploadFileResp
|
||||
|
||||
fullUrl := API_URL + "/createUploadFile.action"
|
||||
if y.isFamily() {
|
||||
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||
}
|
||||
_, err = y.post(fullUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
if y.isFamily() {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"familyId": y.FamilyID,
|
||||
"fileMd5": fileMd5Hex,
|
||||
"fileName": file.GetName(),
|
||||
"fileSize": fmt.Sprint(file.GetSize()),
|
||||
"parentId": dstDir.GetID(),
|
||||
"resumePolicy": "1",
|
||||
})
|
||||
} else {
|
||||
req.SetFormData(map[string]string{
|
||||
"parentFolderId": dstDir.GetID(),
|
||||
"fileName": file.GetName(),
|
||||
"size": fmt.Sprint(file.GetSize()),
|
||||
"md5": fileMd5Hex,
|
||||
"opertype": "3",
|
||||
"flag": "1",
|
||||
"resumePolicy": "1",
|
||||
"isLog": "0",
|
||||
// "baseFileId": "",
|
||||
// "lastWrite":"",
|
||||
// "localPath": strings.ReplaceAll(param.LocalPath, "\\", "/"),
|
||||
// "fileExt": "",
|
||||
})
|
||||
}
|
||||
}, &uploadInfo)
|
||||
|
||||
uploadInfo, err := y.OldUploadCreate(ctx, dstDir.GetID(), fileMd5, file.GetName(), fmt.Sprint(file.GetSize()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// 网盘中不存在该文件,开始上传
|
||||
status := GetUploadFileStatusResp{CreateUploadFileResp: uploadInfo}
|
||||
for status.Size < file.GetSize() && status.FileDataExists != 1 {
|
||||
status := GetUploadFileStatusResp{CreateUploadFileResp: *uploadInfo}
|
||||
for status.GetSize() < file.GetSize() && status.FileDataExists != 1 {
|
||||
if utils.IsCanceled(ctx) {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
@ -839,28 +809,70 @@ func (y *Cloud189PC) OldUpload(ctx context.Context, dstDir model.Obj, file model
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if _, err := tempFile.Seek(status.GetSize(), io.SeekStart); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
up(int(status.Size / file.GetSize()))
|
||||
up(float64(status.GetSize()) / float64(file.GetSize()) * 100)
|
||||
}
|
||||
|
||||
// 提交
|
||||
return y.OldUploadCommit(ctx, status.FileCommitUrl, status.UploadFileId)
|
||||
}
|
||||
|
||||
// 创建上传会话
|
||||
func (y *Cloud189PC) OldUploadCreate(ctx context.Context, parentID string, fileMd5, fileName, fileSize string) (*CreateUploadFileResp, error) {
|
||||
var uploadInfo CreateUploadFileResp
|
||||
|
||||
fullUrl := API_URL + "/createUploadFile.action"
|
||||
if y.isFamily() {
|
||||
fullUrl = API_URL + "/family/file/createFamilyFile.action"
|
||||
}
|
||||
_, err := y.post(fullUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
if y.isFamily() {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"familyId": y.FamilyID,
|
||||
"parentId": parentID,
|
||||
"fileMd5": fileMd5,
|
||||
"fileName": fileName,
|
||||
"fileSize": fileSize,
|
||||
"resumePolicy": "1",
|
||||
})
|
||||
} else {
|
||||
req.SetFormData(map[string]string{
|
||||
"parentFolderId": parentID,
|
||||
"fileName": fileName,
|
||||
"size": fileSize,
|
||||
"md5": fileMd5,
|
||||
"opertype": "3",
|
||||
"flag": "1",
|
||||
"resumePolicy": "1",
|
||||
"isLog": "0",
|
||||
})
|
||||
}
|
||||
}, &uploadInfo)
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &uploadInfo, nil
|
||||
}
|
||||
|
||||
// 提交上传文件
|
||||
func (y *Cloud189PC) OldUploadCommit(ctx context.Context, fileCommitUrl string, uploadFileID int64) (model.Obj, error) {
|
||||
var resp OldCommitUploadFileResp
|
||||
_, err = y.post(status.FileCommitUrl, func(req *resty.Request) {
|
||||
_, err := y.post(fileCommitUrl, func(req *resty.Request) {
|
||||
req.SetContext(ctx)
|
||||
if y.isFamily() {
|
||||
req.SetHeaders(map[string]string{
|
||||
"ResumePolicy": "1",
|
||||
"UploadFileId": fmt.Sprint(status.UploadFileId),
|
||||
"UploadFileId": fmt.Sprint(uploadFileID),
|
||||
"FamilyId": fmt.Sprint(y.FamilyID),
|
||||
})
|
||||
} else {
|
||||
req.SetFormData(map[string]string{
|
||||
"opertype": "3",
|
||||
"resumePolicy": "1",
|
||||
"uploadFileId": fmt.Sprint(status.UploadFileId),
|
||||
"uploadFileId": fmt.Sprint(uploadFileID),
|
||||
"isLog": "0",
|
||||
})
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
@ -174,13 +175,13 @@ func (d *AListV3) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AListV3) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
_, err := d.request("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||
_, err := d.requestWithTimeout("/fs/put", http.MethodPut, func(req *resty.Request) {
|
||||
req.SetHeader("File-Path", path.Join(dstDir.GetPath(), stream.GetName())).
|
||||
SetHeader("Password", d.MetaPassword).
|
||||
SetHeader("Content-Length", strconv.FormatInt(stream.GetSize(), 10)).
|
||||
SetContentLength(true).
|
||||
SetBody(io.ReadCloser(stream))
|
||||
})
|
||||
}, time.Hour*6)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -3,6 +3,7 @@ package alist_v3
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
@ -56,3 +57,33 @@ func (d *AListV3) request(api, method string, callback base.ReqCallback, retry .
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *AListV3) requestWithTimeout(api, method string, callback base.ReqCallback, timeout time.Duration, retry ...bool) ([]byte, error) {
|
||||
url := d.Address + "/api" + api
|
||||
client := base.NewRestyClient().SetTimeout(timeout)
|
||||
req := client.R()
|
||||
req.SetHeader("Authorization", d.Token)
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
log.Debugf("[alist_v3] response body: %s", res.String())
|
||||
if res.StatusCode() >= 400 {
|
||||
return nil, fmt.Errorf("request failed, status: %s", res.Status())
|
||||
}
|
||||
code := utils.Json.Get(res.Body(), "code").ToInt()
|
||||
if code != 200 {
|
||||
if (code == 401 || code == 403) && !utils.IsBool(retry...) {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.requestWithTimeout(api, method, callback, timeout, true)
|
||||
}
|
||||
return nil, fmt.Errorf("request failed,code: %d, message: %s", code, utils.Json.Get(res.Body(), "message").ToString())
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
"io"
|
||||
"math"
|
||||
"math/big"
|
||||
@ -15,6 +14,8 @@ import (
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/stream"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/conf"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -51,7 +52,7 @@ func (d *AliDrive) Init(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
// get driver id
|
||||
res, err, _ := d.request("https://api.aliyundrive.com/v2/user/get", http.MethodPost, nil, nil)
|
||||
res, err, _ := d.request("https://api.alipan.com/v2/user/get", http.MethodPost, nil, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -105,7 +106,7 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
||||
"file_id": file.GetID(),
|
||||
"expire_sec": 14400,
|
||||
}
|
||||
res, err, _ := d.request("https://api.aliyundrive.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
res, err, _ := d.request("https://api.alipan.com/v2/file/get_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, nil)
|
||||
if err != nil {
|
||||
@ -113,14 +114,14 @@ func (d *AliDrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
||||
}
|
||||
return &model.Link{
|
||||
Header: http.Header{
|
||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
||||
"Referer": []string{"https://www.alipan.com/"},
|
||||
},
|
||||
URL: utils.Json.Get(res, "url").ToString(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *AliDrive) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, _ := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"check_name_mode": "refuse",
|
||||
"drive_id": d.DriveId,
|
||||
@ -138,7 +139,7 @@ func (d *AliDrive) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AliDrive) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, _ := d.request("https://api.alipan.com/v3/file/update", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"check_name_mode": "refuse",
|
||||
"drive_id": d.DriveId,
|
||||
@ -155,7 +156,7 @@ func (d *AliDrive) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *AliDrive) Remove(ctx context.Context, obj model.Obj) error {
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, _ := d.request("https://api.alipan.com/v2/recyclebin/trash", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": obj.GetID(),
|
||||
@ -215,7 +216,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
||||
}
|
||||
|
||||
var resp UploadResp
|
||||
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(reqBody)
|
||||
}, &resp)
|
||||
|
||||
@ -269,7 +270,7 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
||||
n, _ := io.NewSectionReader(localFile, o.Int64(), 8).Read(buf[:8])
|
||||
reqBody["proof_code"] = base64.StdEncoding.EncodeToString(buf[:n])
|
||||
|
||||
_, err, e := d.request("https://api.aliyundrive.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, e := d.request("https://api.alipan.com/adrive/v2/file/createWithFolders", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(reqBody)
|
||||
}, &resp)
|
||||
if err != nil && e.Code != "PreHashMatched" {
|
||||
@ -304,11 +305,11 @@ func (d *AliDrive) Put(ctx context.Context, dstDir model.Obj, streamer model.Fil
|
||||
}
|
||||
res.Body.Close()
|
||||
if count > 0 {
|
||||
up(i * 100 / count)
|
||||
up(float64(i) * 100 / float64(count))
|
||||
}
|
||||
}
|
||||
var resp2 base.Json
|
||||
_, err, e = d.request("https://api.aliyundrive.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, e = d.request("https://api.alipan.com/v2/file/complete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"drive_id": d.DriveId,
|
||||
"file_id": resp.FileId,
|
||||
@ -333,10 +334,10 @@ func (d *AliDrive) Other(ctx context.Context, args model.OtherArgs) (interface{}
|
||||
}
|
||||
switch args.Method {
|
||||
case "doc_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||
url = "https://api.alipan.com/v2/file/get_office_preview_url"
|
||||
data["access_token"] = d.AccessToken
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
data["url_expire_sec"] = 14400
|
||||
default:
|
||||
|
@ -26,7 +26,7 @@ func (d *AliDrive) createSession() error {
|
||||
state.retry = 0
|
||||
return fmt.Errorf("createSession failed after three retries")
|
||||
}
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, _ := d.request("https://api.alipan.com/users/v1/users/device/create_session", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"deviceName": "samsung",
|
||||
"modelName": "SM-G9810",
|
||||
@ -42,7 +42,7 @@ func (d *AliDrive) createSession() error {
|
||||
}
|
||||
|
||||
// func (d *AliDrive) renewSession() error {
|
||||
// _, err, _ := d.request("https://api.aliyundrive.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||
// _, err, _ := d.request("https://api.alipan.com/users/v1/users/device/renew_session", http.MethodPost, nil, nil)
|
||||
// return err
|
||||
// }
|
||||
|
||||
@ -58,7 +58,7 @@ func (d *AliDrive) sign() {
|
||||
// do others that not defined in Driver interface
|
||||
|
||||
func (d *AliDrive) refreshToken() error {
|
||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||
url := "https://auth.alipan.com/v2/account/token"
|
||||
var resp base.TokenResp
|
||||
var e RespErr
|
||||
_, err := base.RestyClient.R().
|
||||
@ -85,7 +85,7 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
||||
req := base.RestyClient.R()
|
||||
state, ok := global.Load(d.UserID)
|
||||
if !ok {
|
||||
if url == "https://api.aliyundrive.com/v2/user/get" {
|
||||
if url == "https://api.alipan.com/v2/user/get" {
|
||||
state = &State{}
|
||||
} else {
|
||||
return nil, fmt.Errorf("can't load user state, user_id: %s", d.UserID), RespErr{}
|
||||
@ -94,8 +94,8 @@ func (d *AliDrive) request(url, method string, callback base.ReqCallback, resp i
|
||||
req.SetHeaders(map[string]string{
|
||||
"Authorization": "Bearer\t" + d.AccessToken,
|
||||
"content-type": "application/json",
|
||||
"origin": "https://www.aliyundrive.com",
|
||||
"Referer": "https://aliyundrive.com/",
|
||||
"origin": "https://www.alipan.com",
|
||||
"Referer": "https://alipan.com/",
|
||||
"X-Signature": state.signature,
|
||||
"x-request-id": uuid.NewString(),
|
||||
"X-Canary": "client=Android,app=adrive,version=v4.1.0",
|
||||
@ -158,7 +158,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
|
||||
"video_thumbnail_process": "video/snapshot,t_0,f_jpg,ar_auto,w_300",
|
||||
"url_expire_sec": 14400,
|
||||
}
|
||||
_, err, _ := d.request("https://api.aliyundrive.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
|
||||
_, err, _ := d.request("https://api.alipan.com/v2/file/list", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(data)
|
||||
}, &resp)
|
||||
|
||||
@ -172,7 +172,7 @@ func (d *AliDrive) getFiles(fileId string) ([]File, error) {
|
||||
}
|
||||
|
||||
func (d *AliDrive) batch(srcId, dstId string, url string) error {
|
||||
res, err, _ := d.request("https://api.aliyundrive.com/v3/batch", http.MethodPost, func(req *resty.Request) {
|
||||
res, err, _ := d.request("https://api.alipan.com/v3/batch", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"requests": []base.Json{
|
||||
{
|
||||
|
@ -93,7 +93,7 @@ func (d *AliyundriveOpen) link(ctx context.Context, file model.Obj) (*model.Link
|
||||
}
|
||||
url = utils.Json.Get(res, "streamsUrl", d.LIVPDownloadFormat).ToString()
|
||||
}
|
||||
exp := time.Hour
|
||||
exp := time.Minute
|
||||
return &model.Link{
|
||||
URL: url,
|
||||
Expiration: &exp,
|
||||
|
@ -11,7 +11,7 @@ type Addition struct {
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
OrderBy string `json:"order_by" type:"select" options:"name,size,updated_at,created_at"`
|
||||
OrderDirection string `json:"order_direction" type:"select" options:"ASC,DESC"`
|
||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.xhofe.top/alist/ali_open/token"`
|
||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.nn.ci/alist/ali_open/token"`
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
|
||||
@ -36,7 +36,7 @@ var config = driver.Config{
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &AliyundriveOpen{
|
||||
base: "https://openapi.aliyundrive.com",
|
||||
base: "https://openapi.alipan.com",
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"io"
|
||||
"math"
|
||||
"net/http"
|
||||
@ -16,6 +15,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/avast/retry-go"
|
||||
"github.com/go-resty/resty/v2"
|
||||
@ -258,6 +258,7 @@ func (d *AliyundriveOpen) upload(ctx context.Context, dstDir model.Obj, stream m
|
||||
return nil, err
|
||||
}
|
||||
offset += partSize
|
||||
up(float64(i*100) / float64(count))
|
||||
}
|
||||
} else {
|
||||
log.Debugf("[aliyundrive_open] rapid upload success, file id: %s", createResp.FileId)
|
||||
|
@ -86,7 +86,7 @@ func (d *AliyundriveOpen) refreshToken() error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
log.Infof("[ali_open] toekn exchange: %s -> %s", d.RefreshToken, refresh)
|
||||
log.Infof("[ali_open] token exchange: %s -> %s", d.RefreshToken, refresh)
|
||||
d.RefreshToken, d.AccessToken = refresh, access
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
|
@ -105,7 +105,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
|
||||
"share_id": d.ShareId,
|
||||
}
|
||||
var resp ShareLinkResp
|
||||
_, err := d.request("https://api.aliyundrive.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
_, err := d.request("https://api.alipan.com/v2/file/get_share_link_download_url", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetHeader(CanaryHeaderKey, CanaryHeaderValue).SetBody(data).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
@ -113,7 +113,7 @@ func (d *AliyundriveShare) link(ctx context.Context, file model.Obj) (*model.Lin
|
||||
}
|
||||
return &model.Link{
|
||||
Header: http.Header{
|
||||
"Referer": []string{"https://www.aliyundrive.com/"},
|
||||
"Referer": []string{"https://www.alipan.com/"},
|
||||
},
|
||||
URL: resp.DownloadUrl,
|
||||
}, nil
|
||||
@ -128,9 +128,9 @@ func (d *AliyundriveShare) Other(ctx context.Context, args model.OtherArgs) (int
|
||||
}
|
||||
switch args.Method {
|
||||
case "doc_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||
url = "https://api.alipan.com/v2/file/get_office_preview_url"
|
||||
case "video_preview":
|
||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||
url = "https://api.alipan.com/v2/file/get_video_preview_play_info"
|
||||
data["category"] = "live_transcoding"
|
||||
default:
|
||||
return nil, errs.NotSupport
|
||||
|
@ -16,7 +16,7 @@ const (
|
||||
)
|
||||
|
||||
func (d *AliyundriveShare) refreshToken() error {
|
||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||
url := "https://auth.alipan.com/v2/account/token"
|
||||
var resp base.TokenResp
|
||||
var e ErrorResp
|
||||
_, err := base.RestyClient.R().
|
||||
@ -47,7 +47,7 @@ func (d *AliyundriveShare) getShareToken() error {
|
||||
var resp ShareTokenResp
|
||||
_, err := base.RestyClient.R().
|
||||
SetResult(&resp).SetError(&e).SetBody(data).
|
||||
Post("https://api.aliyundrive.com/v2/share_link/get_share_token")
|
||||
Post("https://api.alipan.com/v2/share_link/get_share_token")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -116,7 +116,7 @@ func (d *AliyundriveShare) getFiles(fileId string) ([]File, error) {
|
||||
SetHeader("x-share-token", d.ShareToken).
|
||||
SetHeader(CanaryHeaderKey, CanaryHeaderValue).
|
||||
SetResult(&resp).SetError(&e).SetBody(data).
|
||||
Post("https://api.aliyundrive.com/adrive/v3/file/list")
|
||||
Post("https://api.alipan.com/adrive/v3/file/list")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -2,7 +2,9 @@ package drivers
|
||||
|
||||
import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/115"
|
||||
_ "github.com/alist-org/alist/v3/drivers/115_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/123"
|
||||
_ "github.com/alist-org/alist/v3/drivers/123_link"
|
||||
_ "github.com/alist-org/alist/v3/drivers/123_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/139"
|
||||
_ "github.com/alist-org/alist/v3/drivers/189"
|
||||
@ -16,12 +18,14 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_netdisk"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/baidu_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/chaoxing"
|
||||
_ "github.com/alist-org/alist/v3/drivers/cloudreve"
|
||||
_ "github.com/alist-org/alist/v3/drivers/crypt"
|
||||
_ "github.com/alist-org/alist/v3/drivers/dropbox"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ftp"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_drive"
|
||||
_ "github.com/alist-org/alist/v3/drivers/google_photo"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ilanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/ipfs_api"
|
||||
_ "github.com/alist-org/alist/v3/drivers/lanzou"
|
||||
_ "github.com/alist-org/alist/v3/drivers/local"
|
||||
@ -33,6 +37,7 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak"
|
||||
_ "github.com/alist-org/alist/v3/drivers/pikpak_share"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quark_uc"
|
||||
_ "github.com/alist-org/alist/v3/drivers/quqi"
|
||||
_ "github.com/alist-org/alist/v3/drivers/s3"
|
||||
_ "github.com/alist-org/alist/v3/drivers/seafile"
|
||||
_ "github.com/alist-org/alist/v3/drivers/sftp"
|
||||
@ -44,6 +49,7 @@ import (
|
||||
_ "github.com/alist-org/alist/v3/drivers/url_tree"
|
||||
_ "github.com/alist-org/alist/v3/drivers/uss"
|
||||
_ "github.com/alist-org/alist/v3/drivers/virtual"
|
||||
_ "github.com/alist-org/alist/v3/drivers/vtencent"
|
||||
_ "github.com/alist-org/alist/v3/drivers/webdav"
|
||||
_ "github.com/alist-org/alist/v3/drivers/weiyun"
|
||||
_ "github.com/alist-org/alist/v3/drivers/wopan"
|
||||
|
@ -5,7 +5,6 @@ import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"net/url"
|
||||
@ -28,10 +27,9 @@ type BaiduNetdisk struct {
|
||||
Addition
|
||||
|
||||
uploadThread int
|
||||
vipType int // 会员类型,0普通用户(4G/4M)、1普通会员(10G/16M)、2超级会员(20G/32M)
|
||||
}
|
||||
|
||||
const DefaultSliceSize int64 = 4 * utils.MB
|
||||
|
||||
func (d *BaiduNetdisk) Config() driver.Config {
|
||||
return config
|
||||
}
|
||||
@ -54,7 +52,11 @@ func (d *BaiduNetdisk) Init(ctx context.Context) error {
|
||||
"method": "uinfo",
|
||||
}, nil)
|
||||
log.Debugf("[baidu] get uinfo: %s", string(res))
|
||||
return err
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.vipType = utils.Json.Get(res, "vip_type").ToInt()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Drop(ctx context.Context) error {
|
||||
@ -146,24 +148,50 @@ func (d *BaiduNetdisk) Remove(ctx context.Context, obj model.Obj) error {
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) PutRapid(ctx context.Context, dstDir model.Obj, stream model.FileStreamer) (model.Obj, error) {
|
||||
contentMd5 := stream.GetHash().GetHash(utils.MD5)
|
||||
if len(contentMd5) < utils.MD5.Width {
|
||||
return nil, errors.New("invalid hash")
|
||||
}
|
||||
|
||||
streamSize := stream.GetSize()
|
||||
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||
mtime := stream.ModTime().Unix()
|
||||
ctime := stream.CreateTime().Unix()
|
||||
blockList, _ := utils.Json.MarshalToString([]string{contentMd5})
|
||||
|
||||
var newFile File
|
||||
_, err := d.create(path, streamSize, 0, "", blockList, &newFile, mtime, ctime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fileToObj(newFile), nil
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
// rapid upload
|
||||
if newObj, err := d.PutRapid(ctx, dstDir, stream); err == nil {
|
||||
return newObj, nil
|
||||
}
|
||||
|
||||
tempFile, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
streamSize := stream.GetSize()
|
||||
count := int(math.Max(math.Ceil(float64(streamSize)/float64(DefaultSliceSize)), 1))
|
||||
lastBlockSize := streamSize % DefaultSliceSize
|
||||
sliceSize := d.getSliceSize()
|
||||
count := int(math.Max(math.Ceil(float64(streamSize)/float64(sliceSize)), 1))
|
||||
lastBlockSize := streamSize % sliceSize
|
||||
if streamSize > 0 && lastBlockSize == 0 {
|
||||
lastBlockSize = DefaultSliceSize
|
||||
lastBlockSize = sliceSize
|
||||
}
|
||||
|
||||
//cal md5 for first 256k data
|
||||
const SliceSize int64 = 256 * 1024
|
||||
// cal md5
|
||||
blockList := make([]string, 0, count)
|
||||
byteSize := DefaultSliceSize
|
||||
byteSize := sliceSize
|
||||
fileMd5H := md5.New()
|
||||
sliceMd5H := md5.New()
|
||||
sliceMd5H2 := md5.New()
|
||||
@ -186,9 +214,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
contentMd5 := hex.EncodeToString(fileMd5H.Sum(nil))
|
||||
sliceMd5 := hex.EncodeToString(sliceMd5H2.Sum(nil))
|
||||
blockListStr, _ := utils.Json.MarshalToString(blockList)
|
||||
|
||||
rawPath := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||
path := encodeURIComponent(rawPath)
|
||||
path := stdpath.Join(dstDir.GetPath(), stream.GetName())
|
||||
mtime := stream.ModTime().Unix()
|
||||
ctime := stream.CreateTime().Unix()
|
||||
|
||||
@ -196,13 +222,23 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
// 尝试获取之前的进度
|
||||
precreateResp, ok := base.GetUploadProgress[*PrecreateResp](d, d.AccessToken, contentMd5)
|
||||
if !ok {
|
||||
data := fmt.Sprintf("path=%s&size=%d&isdir=0&autoinit=1&rtype=3&block_list=%s&content-md5=%s&slice-md5=%s&local_mtime=%d&local_ctime=%d",
|
||||
path, streamSize, blockListStr, contentMd5, sliceMd5, mtime, ctime)
|
||||
params := map[string]string{
|
||||
"method": "precreate",
|
||||
}
|
||||
log.Debugf("[baidu_netdisk] precreate data: %s", data)
|
||||
_, err = d.post("/xpan/file", params, data, &precreateResp)
|
||||
form := map[string]string{
|
||||
"path": path,
|
||||
"size": strconv.FormatInt(streamSize, 10),
|
||||
"isdir": "0",
|
||||
"autoinit": "1",
|
||||
"rtype": "3",
|
||||
"block_list": blockListStr,
|
||||
"content-md5": contentMd5,
|
||||
"slice-md5": sliceMd5,
|
||||
}
|
||||
joinTime(form, ctime, mtime)
|
||||
|
||||
log.Debugf("[baidu_netdisk] precreate data: %s", form)
|
||||
_, err = d.postForm("/xpan/file", params, form, &precreateResp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -225,7 +261,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
break
|
||||
}
|
||||
|
||||
i, partseq, offset, byteSize := i, partseq, int64(partseq)*DefaultSliceSize, DefaultSliceSize
|
||||
i, partseq, offset, byteSize := i, partseq, int64(partseq)*sliceSize, sliceSize
|
||||
if partseq+1 == count {
|
||||
byteSize = lastBlockSize
|
||||
}
|
||||
@ -242,7 +278,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
|
||||
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
|
||||
precreateResp.BlockList[i] = -1
|
||||
return nil
|
||||
})
|
||||
@ -258,12 +294,13 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
||||
|
||||
// step.3 创建文件
|
||||
var newFile File
|
||||
_, err = d.create(rawPath, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile, mtime, ctime)
|
||||
_, err = d.create(path, streamSize, 0, precreateResp.Uploadid, blockListStr, &newFile, mtime, ctime)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return fileToObj(newFile), nil
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) uploadSlice(ctx context.Context, params map[string]string, fileName string, file io.Reader) error {
|
||||
res, err := base.RestyClient.R().
|
||||
SetContext(ctx).
|
||||
|
@ -1,7 +1,6 @@
|
||||
package baidu_netdisk
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"path"
|
||||
"strconv"
|
||||
"time"
|
||||
@ -71,7 +70,9 @@ func fileToObj(f File) *model.ObjThumb {
|
||||
Modified: time.Unix(f.LocalMtime, 0),
|
||||
Ctime: time.Unix(f.LocalCtime, 0),
|
||||
IsFolder: f.Isdir == 1,
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||
|
||||
// 直接获取的MD5是错误的
|
||||
// HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{Thumbnail: f.Thumbs.Url3},
|
||||
}
|
||||
|
@ -1,11 +1,10 @@
|
||||
package baidu_netdisk
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -22,7 +21,7 @@ import (
|
||||
|
||||
func (d *BaiduNetdisk) refreshToken() error {
|
||||
err := d._refreshToken()
|
||||
if err != nil && err == errs.EmptyToken {
|
||||
if err != nil && errors.Is(err, errs.EmptyToken) {
|
||||
err = d._refreshToken()
|
||||
}
|
||||
return err
|
||||
@ -74,21 +73,16 @@ func (d *BaiduNetdisk) request(furl string, method string, callback base.ReqCall
|
||||
log.Info("refreshing baidu_netdisk token.")
|
||||
err2 := d.refreshToken()
|
||||
if err2 != nil {
|
||||
return err2
|
||||
return retry.Unrecoverable(err2)
|
||||
}
|
||||
}
|
||||
|
||||
err2 := fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
|
||||
if !utils.SliceContains([]int{2}, errno) {
|
||||
err2 = retry.Unrecoverable(err2)
|
||||
}
|
||||
return err2
|
||||
return fmt.Errorf("req: [%s] ,errno: %d, refer to https://pan.baidu.com/union/doc/", furl, errno)
|
||||
}
|
||||
result = res.Body()
|
||||
return nil
|
||||
},
|
||||
retry.LastErrorOnly(true),
|
||||
retry.Attempts(5),
|
||||
retry.Attempts(3),
|
||||
retry.Delay(time.Second),
|
||||
retry.DelayType(retry.BackOffDelay))
|
||||
return result, err
|
||||
@ -100,10 +94,10 @@ func (d *BaiduNetdisk) get(pathname string, params map[string]string, resp inter
|
||||
}, resp)
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) post(pathname string, params map[string]string, data interface{}, resp interface{}) ([]byte, error) {
|
||||
func (d *BaiduNetdisk) postForm(pathname string, params map[string]string, form map[string]string, resp interface{}) ([]byte, error) {
|
||||
return d.request("https://pan.baidu.com/rest/2.0"+pathname, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetQueryParams(params)
|
||||
req.SetBody(data)
|
||||
req.SetFormData(form)
|
||||
}, resp)
|
||||
}
|
||||
|
||||
@ -158,6 +152,9 @@ func (d *BaiduNetdisk) linkOfficial(file model.Obj, args model.LinkArgs) (*model
|
||||
//if res.StatusCode() == 302 {
|
||||
u = res.Header().Get("location")
|
||||
//}
|
||||
|
||||
updateObjMd5(file, "pan.baidu.com", u)
|
||||
|
||||
return &model.Link{
|
||||
URL: u,
|
||||
Header: http.Header{
|
||||
@ -180,6 +177,9 @@ func (d *BaiduNetdisk) linkCrack(file model.Obj, args model.LinkArgs) (*model.Li
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
updateObjMd5(file, d.CustomCrackUA, resp.Info[0].Dlink)
|
||||
|
||||
return &model.Link{
|
||||
URL: resp.Info[0].Dlink,
|
||||
Header: http.Header{
|
||||
@ -194,29 +194,73 @@ func (d *BaiduNetdisk) manage(opera string, filelist any) ([]byte, error) {
|
||||
"opera": opera,
|
||||
}
|
||||
marshal, _ := utils.Json.MarshalToString(filelist)
|
||||
data := fmt.Sprintf("async=0&filelist=%s&ondup=fail", marshal)
|
||||
return d.post("/xpan/file", params, data, nil)
|
||||
return d.postForm("/xpan/file", params, map[string]string{
|
||||
"async": "0",
|
||||
"filelist": marshal,
|
||||
"ondup": "fail",
|
||||
}, nil)
|
||||
}
|
||||
|
||||
func (d *BaiduNetdisk) create(path string, size int64, isdir int, uploadid, block_list string, resp any, mtime, ctime int64) ([]byte, error) {
|
||||
params := map[string]string{
|
||||
"method": "create",
|
||||
}
|
||||
data := ""
|
||||
if mtime == 0 || ctime == 0 {
|
||||
data = fmt.Sprintf("path=%s&size=%d&isdir=%d&rtype=3", encodeURIComponent(path), size, isdir)
|
||||
} else {
|
||||
data = fmt.Sprintf("path=%s&size=%d&isdir=%d&rtype=3&local_mtime=%d&local_ctime=%d", encodeURIComponent(path), size, isdir, mtime, ctime)
|
||||
form := map[string]string{
|
||||
"path": path,
|
||||
"size": strconv.FormatInt(size, 10),
|
||||
"isdir": strconv.Itoa(isdir),
|
||||
"rtype": "3",
|
||||
}
|
||||
if mtime != 0 && ctime != 0 {
|
||||
joinTime(form, ctime, mtime)
|
||||
}
|
||||
|
||||
if uploadid != "" {
|
||||
data += fmt.Sprintf("&uploadid=%s&block_list=%s", uploadid, block_list)
|
||||
form["uploadid"] = uploadid
|
||||
}
|
||||
return d.post("/xpan/file", params, data, resp)
|
||||
if block_list != "" {
|
||||
form["block_list"] = block_list
|
||||
}
|
||||
return d.postForm("/xpan/file", params, form, resp)
|
||||
}
|
||||
|
||||
func encodeURIComponent(str string) string {
|
||||
r := url.QueryEscape(str)
|
||||
r = strings.ReplaceAll(r, "+", "%20")
|
||||
return r
|
||||
func joinTime(form map[string]string, ctime, mtime int64) {
|
||||
form["local_mtime"] = strconv.FormatInt(mtime, 10)
|
||||
form["local_ctime"] = strconv.FormatInt(ctime, 10)
|
||||
}
|
||||
|
||||
func updateObjMd5(obj model.Obj, userAgent, u string) {
|
||||
object := model.GetRawObject(obj)
|
||||
if object != nil {
|
||||
req, _ := http.NewRequest(http.MethodHead, u, nil)
|
||||
req.Header.Add("User-Agent", userAgent)
|
||||
resp, _ := base.HttpClient.Do(req)
|
||||
if resp != nil {
|
||||
contentMd5 := resp.Header.Get("Content-Md5")
|
||||
object.HashInfo = utils.NewHashInfo(utils.MD5, contentMd5)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
DefaultSliceSize int64 = 4 * utils.MB
|
||||
VipSliceSize = 16 * utils.MB
|
||||
SVipSliceSize = 32 * utils.MB
|
||||
)
|
||||
|
||||
func (d *BaiduNetdisk) getSliceSize() int64 {
|
||||
switch d.vipType {
|
||||
case 1:
|
||||
return VipSliceSize
|
||||
case 2:
|
||||
return SVipSliceSize
|
||||
default:
|
||||
return DefaultSliceSize
|
||||
}
|
||||
}
|
||||
|
||||
// func encodeURIComponent(str string) string {
|
||||
// r := url.QueryEscape(str)
|
||||
// r = strings.ReplaceAll(r, "+", "%20")
|
||||
// return r
|
||||
// }
|
||||
|
@ -227,14 +227,14 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
return nil, fmt.Errorf("file size cannot be zero")
|
||||
}
|
||||
|
||||
// TODO:
|
||||
// 暂时没有找到妙传方式
|
||||
|
||||
// 需要获取完整文件md5,必须支持 io.Seek
|
||||
tempFile, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
|
||||
const DEFAULT int64 = 1 << 22
|
||||
const SliceSize int64 = 1 << 18
|
||||
@ -329,7 +329,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(int(threadG.Success()) * 100 / len(precreateResp.BlockList))
|
||||
up(float64(threadG.Success()) * 100 / float64(len(precreateResp.BlockList)))
|
||||
precreateResp.BlockList[i] = -1
|
||||
return nil
|
||||
})
|
||||
|
@ -2,9 +2,10 @@ package baiduphoto
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
@ -52,34 +53,26 @@ type (
|
||||
Ctime int64 `json:"ctime"` // 创建时间 s
|
||||
Mtime int64 `json:"mtime"` // 修改时间 s
|
||||
Thumburl []string `json:"thumburl"`
|
||||
|
||||
parseTime *time.Time
|
||||
Md5 string `json:"md5"`
|
||||
}
|
||||
)
|
||||
|
||||
func (c *File) GetSize() int64 { return c.Size }
|
||||
func (c *File) GetName() string { return getFileName(c.Path) }
|
||||
func (c *File) ModTime() time.Time {
|
||||
if c.parseTime == nil {
|
||||
c.parseTime = toTime(c.Mtime)
|
||||
}
|
||||
return *c.parseTime
|
||||
}
|
||||
func (c *File) IsDir() bool { return false }
|
||||
func (c *File) GetID() string { return "" }
|
||||
func (c *File) GetPath() string { return "" }
|
||||
func (c *File) GetSize() int64 { return c.Size }
|
||||
func (c *File) GetName() string { return getFileName(c.Path) }
|
||||
func (c *File) CreateTime() time.Time { return time.Unix(c.Ctime, 0) }
|
||||
func (c *File) ModTime() time.Time { return time.Unix(c.Mtime, 0) }
|
||||
func (c *File) IsDir() bool { return false }
|
||||
func (c *File) GetID() string { return "" }
|
||||
func (c *File) GetPath() string { return "" }
|
||||
func (c *File) Thumb() string {
|
||||
if len(c.Thumburl) > 0 {
|
||||
return c.Thumburl[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (c *File) CreateTime() time.Time {
|
||||
return time.Unix(c.Ctime, 0)
|
||||
}
|
||||
|
||||
func (c *File) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
return utils.NewHashInfo(utils.MD5, c.Md5)
|
||||
}
|
||||
|
||||
/*相册部分*/
|
||||
@ -117,25 +110,17 @@ type (
|
||||
}
|
||||
)
|
||||
|
||||
func (a *Album) CreateTime() time.Time {
|
||||
return time.Unix(a.CreationTime, 0)
|
||||
}
|
||||
|
||||
func (a *Album) GetHash() utils.HashInfo {
|
||||
return utils.HashInfo{}
|
||||
}
|
||||
|
||||
func (a *Album) GetSize() int64 { return 0 }
|
||||
func (a *Album) GetName() string { return a.Title }
|
||||
func (a *Album) ModTime() time.Time {
|
||||
if a.parseTime == nil {
|
||||
a.parseTime = toTime(a.Mtime)
|
||||
}
|
||||
return *a.parseTime
|
||||
}
|
||||
func (a *Album) IsDir() bool { return true }
|
||||
func (a *Album) GetID() string { return "" }
|
||||
func (a *Album) GetPath() string { return "" }
|
||||
func (a *Album) GetSize() int64 { return 0 }
|
||||
func (a *Album) GetName() string { return a.Title }
|
||||
func (a *Album) CreateTime() time.Time { return time.Unix(a.CreationTime, 0) }
|
||||
func (a *Album) ModTime() time.Time { return time.Unix(a.Mtime, 0) }
|
||||
func (a *Album) IsDir() bool { return true }
|
||||
func (a *Album) GetID() string { return "" }
|
||||
func (a *Album) GetPath() string { return "" }
|
||||
|
||||
type (
|
||||
CopyFileResp struct {
|
||||
|
@ -33,6 +33,7 @@ func NewRestyClient() *resty.Client {
|
||||
client := resty.New().
|
||||
SetHeader("user-agent", UserAgent).
|
||||
SetRetryCount(3).
|
||||
SetRetryResetReaders(true).
|
||||
SetTimeout(DefaultTimeout).
|
||||
SetTLSClientConfig(&tls.Config{InsecureSkipVerify: conf.Conf.TlsInsecureSkipVerify})
|
||||
return client
|
||||
|
297
drivers/chaoxing/driver.go
Normal file
297
drivers/chaoxing/driver.go
Normal file
@ -0,0 +1,297 @@
|
||||
package chaoxing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/cron"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/go-resty/resty/v2"
|
||||
"google.golang.org/appengine/log"
|
||||
)
|
||||
|
||||
type ChaoXing struct {
|
||||
model.Storage
|
||||
Addition
|
||||
cron *cron.Cron
|
||||
config driver.Config
|
||||
conf Conf
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Config() driver.Config {
|
||||
return d.config
|
||||
}
|
||||
|
||||
func (d *ChaoXing) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *ChaoXing) refreshCookie() error {
|
||||
cookie, err := d.Login()
|
||||
if err != nil {
|
||||
d.Status = err.Error()
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
d.Addition.Cookie = cookie
|
||||
op.MustSaveDriverStorage(d)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Init(ctx context.Context) error {
|
||||
err := d.refreshCookie()
|
||||
if err != nil {
|
||||
log.Errorf(ctx, err.Error())
|
||||
}
|
||||
d.cron = cron.NewCron(time.Hour * 12)
|
||||
d.cron.Do(func() {
|
||||
err = d.refreshCookie()
|
||||
if err != nil {
|
||||
log.Errorf(ctx, err.Error())
|
||||
}
|
||||
})
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Drop(ctx context.Context) error {
|
||||
d.cron.Stop()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.GetFiles(dir.GetID())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
|
||||
return fileToObj(src), nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
var resp DownResp
|
||||
ua := d.conf.ua
|
||||
fileId := strings.Split(file.GetID(), "$")[1]
|
||||
_, err := d.requestDownload("/screen/note_note/files/status/"+fileId, http.MethodPost, func(req *resty.Request) {
|
||||
req.SetHeader("User-Agent", ua)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
u := resp.Download
|
||||
return &model.Link{
|
||||
URL: u,
|
||||
Header: http.Header{
|
||||
"Cookie": []string{d.Cookie},
|
||||
"Referer": []string{d.conf.referer},
|
||||
"User-Agent": []string{ua},
|
||||
},
|
||||
Concurrency: 2,
|
||||
PartSize: 10 * utils.MB,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"name": dirName,
|
||||
"pid": parentDir.GetID(),
|
||||
}
|
||||
var resp ListFileResp
|
||||
_, err := d.request("/pc/resource/addResourceFolder", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Result != 1 {
|
||||
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"folderIds": srcObj.GetID(),
|
||||
"targetId": dstDir.GetID(),
|
||||
}
|
||||
if !srcObj.IsDir() {
|
||||
query = map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"recIds": strings.Split(srcObj.GetID(), "$")[0],
|
||||
"targetId": dstDir.GetID(),
|
||||
}
|
||||
}
|
||||
var resp ListFileResp
|
||||
_, err := d.request("/pc/resource/moveResource", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !resp.Status {
|
||||
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"folderId": srcObj.GetID(),
|
||||
"name": newName,
|
||||
}
|
||||
path := "/pc/resource/updateResourceFolderName"
|
||||
if !srcObj.IsDir() {
|
||||
// path = "/pc/resource/updateResourceFileName"
|
||||
// query = map[string]string{
|
||||
// "bbsid": d.Addition.Bbsid,
|
||||
// "recIds": strings.Split(srcObj.GetID(), "$")[0],
|
||||
// "name": newName,
|
||||
// }
|
||||
return errors.New("此网盘不支持修改文件名")
|
||||
}
|
||||
var resp ListFileResp
|
||||
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Result != 1 {
|
||||
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
// TODO copy obj, optional
|
||||
return errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Remove(ctx context.Context, obj model.Obj) error {
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"folderIds": obj.GetID(),
|
||||
}
|
||||
path := "/pc/resource/deleteResourceFolder"
|
||||
var resp ListFileResp
|
||||
if !obj.IsDir() {
|
||||
path = "/pc/resource/deleteResourceFile"
|
||||
query = map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"recIds": strings.Split(obj.GetID(), "$")[0],
|
||||
}
|
||||
}
|
||||
_, err := d.request(path, http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Result != 1 {
|
||||
msg := fmt.Sprintf("error:%s", resp.Msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
var resp UploadDataRsp
|
||||
_, err := d.request("https://noteyd.chaoxing.com/pc/files/getUploadConfig", http.MethodGet, func(req *resty.Request) {
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if resp.Result != 1 {
|
||||
return errors.New("get upload data error")
|
||||
}
|
||||
body := &bytes.Buffer{}
|
||||
writer := multipart.NewWriter(body)
|
||||
filePart, err := writer.CreateFormFile("file", stream.GetName())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.Copy(filePart, stream)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writer.WriteField("_token", resp.Msg.Token)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = writer.WriteField("puid", fmt.Sprintf("%d", resp.Msg.Puid))
|
||||
if err != nil {
|
||||
fmt.Println("Error writing param2 to request body:", err)
|
||||
return err
|
||||
}
|
||||
err = writer.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req, err := http.NewRequest("POST", "https://pan-yz.chaoxing.com/upload", body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
|
||||
resps, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resps.Body.Close()
|
||||
bodys, err := io.ReadAll(resps.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var fileRsp UploadFileDataRsp
|
||||
err = json.Unmarshal(bodys, &fileRsp)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if fileRsp.Msg != "success" {
|
||||
return errors.New(fileRsp.Msg)
|
||||
}
|
||||
uploadDoneParam := UploadDoneParam{Key: fileRsp.ObjectID, Cataid: "100000019", Param: fileRsp.Data}
|
||||
params, err := json.Marshal(uploadDoneParam)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"pid": dstDir.GetID(),
|
||||
"type": "yunpan",
|
||||
"params": url.QueryEscape("[" + string(params) + "]"),
|
||||
}
|
||||
var respd ListFileResp
|
||||
_, err = d.request("/pc/resource/addResource", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &respd)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if respd.Result != 1 {
|
||||
msg := fmt.Sprintf("error:%v", resp.Msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var _ driver.Driver = (*ChaoXing)(nil)
|
47
drivers/chaoxing/meta.go
Normal file
47
drivers/chaoxing/meta.go
Normal file
@ -0,0 +1,47 @@
|
||||
package chaoxing
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
// 此程序挂载的是超星小组网盘,需要代理才能使用;
|
||||
// 登录超星后进入个人空间,进入小组,新建小组,点击进去。
|
||||
// url中就有bbsid的参数,系统限制单文件大小2G,没有总容量限制
|
||||
type Addition struct {
|
||||
// 超星用户名及密码
|
||||
UserName string `json:"user_name" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
// 从自己新建的小组url里获取
|
||||
Bbsid string `json:"bbsid" required:"true"`
|
||||
driver.RootID
|
||||
// 可不填,程序会自动登录获取
|
||||
Cookie string `json:"cookie"`
|
||||
}
|
||||
|
||||
type Conf struct {
|
||||
ua string
|
||||
referer string
|
||||
api string
|
||||
DowloadApi string
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &ChaoXing{
|
||||
config: driver.Config{
|
||||
Name: "ChaoXingGroupDrive",
|
||||
OnlyProxy: true,
|
||||
OnlyLocal: false,
|
||||
DefaultRoot: "-1",
|
||||
NoOverwriteUpload: true,
|
||||
},
|
||||
conf: Conf{
|
||||
ua: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) quark-cloud-drive/2.5.20 Chrome/100.0.4896.160 Electron/18.3.5.4-b478491100 Safari/537.36 Channel/pckk_other_ch",
|
||||
referer: "https://chaoxing.com/",
|
||||
api: "https://groupweb.chaoxing.com",
|
||||
DowloadApi: "https://noteyd.chaoxing.com",
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
279
drivers/chaoxing/types.go
Normal file
279
drivers/chaoxing/types.go
Normal file
@ -0,0 +1,279 @@
|
||||
package chaoxing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
)
|
||||
|
||||
type Resp struct {
|
||||
Result int `json:"result"`
|
||||
}
|
||||
|
||||
type UserAuth struct {
|
||||
GroupAuth struct {
|
||||
AddData int `json:"addData"`
|
||||
AddDataFolder int `json:"addDataFolder"`
|
||||
AddLebel int `json:"addLebel"`
|
||||
AddManager int `json:"addManager"`
|
||||
AddMem int `json:"addMem"`
|
||||
AddTopicFolder int `json:"addTopicFolder"`
|
||||
AnonymousAddReply int `json:"anonymousAddReply"`
|
||||
AnonymousAddTopic int `json:"anonymousAddTopic"`
|
||||
BatchOperation int `json:"batchOperation"`
|
||||
DelData int `json:"delData"`
|
||||
DelDataFolder int `json:"delDataFolder"`
|
||||
DelMem int `json:"delMem"`
|
||||
DelTopicFolder int `json:"delTopicFolder"`
|
||||
Dismiss int `json:"dismiss"`
|
||||
ExamEnc string `json:"examEnc"`
|
||||
GroupChat int `json:"groupChat"`
|
||||
IsShowCircleChatButton int `json:"isShowCircleChatButton"`
|
||||
IsShowCircleCloudButton int `json:"isShowCircleCloudButton"`
|
||||
IsShowCompanyButton int `json:"isShowCompanyButton"`
|
||||
Join int `json:"join"`
|
||||
MemberShowRankSet int `json:"memberShowRankSet"`
|
||||
ModifyDataFolder int `json:"modifyDataFolder"`
|
||||
ModifyExpose int `json:"modifyExpose"`
|
||||
ModifyName int `json:"modifyName"`
|
||||
ModifyShowPic int `json:"modifyShowPic"`
|
||||
ModifyTopicFolder int `json:"modifyTopicFolder"`
|
||||
ModifyVisibleState int `json:"modifyVisibleState"`
|
||||
OnlyMgrScoreSet int `json:"onlyMgrScoreSet"`
|
||||
Quit int `json:"quit"`
|
||||
SendNotice int `json:"sendNotice"`
|
||||
ShowActivityManage int `json:"showActivityManage"`
|
||||
ShowActivitySet int `json:"showActivitySet"`
|
||||
ShowAttentionSet int `json:"showAttentionSet"`
|
||||
ShowAutoClearStatus int `json:"showAutoClearStatus"`
|
||||
ShowBarcode int `json:"showBarcode"`
|
||||
ShowChatRoomSet int `json:"showChatRoomSet"`
|
||||
ShowCircleActivitySet int `json:"showCircleActivitySet"`
|
||||
ShowCircleSet int `json:"showCircleSet"`
|
||||
ShowCmem int `json:"showCmem"`
|
||||
ShowDataFolder int `json:"showDataFolder"`
|
||||
ShowDelReason int `json:"showDelReason"`
|
||||
ShowForward int `json:"showForward"`
|
||||
ShowGroupChat int `json:"showGroupChat"`
|
||||
ShowGroupChatSet int `json:"showGroupChatSet"`
|
||||
ShowGroupSquareSet int `json:"showGroupSquareSet"`
|
||||
ShowLockAddSet int `json:"showLockAddSet"`
|
||||
ShowManager int `json:"showManager"`
|
||||
ShowManagerIdentitySet int `json:"showManagerIdentitySet"`
|
||||
ShowNeedDelReasonSet int `json:"showNeedDelReasonSet"`
|
||||
ShowNotice int `json:"showNotice"`
|
||||
ShowOnlyManagerReplySet int `json:"showOnlyManagerReplySet"`
|
||||
ShowRank int `json:"showRank"`
|
||||
ShowRank2 int `json:"showRank2"`
|
||||
ShowRecycleBin int `json:"showRecycleBin"`
|
||||
ShowReplyByClass int `json:"showReplyByClass"`
|
||||
ShowReplyNeedCheck int `json:"showReplyNeedCheck"`
|
||||
ShowSignbanSet int `json:"showSignbanSet"`
|
||||
ShowSpeechSet int `json:"showSpeechSet"`
|
||||
ShowTopicCheck int `json:"showTopicCheck"`
|
||||
ShowTopicNeedCheck int `json:"showTopicNeedCheck"`
|
||||
ShowTransferSet int `json:"showTransferSet"`
|
||||
} `json:"groupAuth"`
|
||||
OperationAuth struct {
|
||||
Add int `json:"add"`
|
||||
AddTopicToFolder int `json:"addTopicToFolder"`
|
||||
ChoiceSet int `json:"choiceSet"`
|
||||
DelTopicFromFolder int `json:"delTopicFromFolder"`
|
||||
Delete int `json:"delete"`
|
||||
Reply int `json:"reply"`
|
||||
ScoreSet int `json:"scoreSet"`
|
||||
TopSet int `json:"topSet"`
|
||||
Update int `json:"update"`
|
||||
} `json:"operationAuth"`
|
||||
}
|
||||
|
||||
// 手机端学习通上传的文件的json内容(content字段)与网页端上传的有所不同
|
||||
// 网页端json `"puid": 54321, "size": 12345`
|
||||
// 手机端json `"puid": "54321". "size": "12345"`
|
||||
type int_str int
|
||||
|
||||
// json 字符串数字和纯数字解析
|
||||
func (ios *int_str) UnmarshalJSON(data []byte) error {
|
||||
intValue, err := strconv.Atoi(string(bytes.Trim(data, "\"")))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
*ios = int_str(intValue)
|
||||
return nil
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Cataid int `json:"cataid"`
|
||||
Cfid int `json:"cfid"`
|
||||
Content struct {
|
||||
Cfid int `json:"cfid"`
|
||||
Pid int `json:"pid"`
|
||||
FolderName string `json:"folderName"`
|
||||
ShareType int `json:"shareType"`
|
||||
Preview string `json:"preview"`
|
||||
Filetype string `json:"filetype"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
IsImg bool `json:"isImg"`
|
||||
ParentPath string `json:"parentPath"`
|
||||
Icon string `json:"icon"`
|
||||
Suffix string `json:"suffix"`
|
||||
Duration int `json:"duration"`
|
||||
Pantype string `json:"pantype"`
|
||||
Puid int_str `json:"puid"`
|
||||
Filepath string `json:"filepath"`
|
||||
Crc string `json:"crc"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Residstr string `json:"residstr"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
DownPath string `json:"downPath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
Restype string `json:"restype"`
|
||||
Size int_str `json:"size"`
|
||||
UploadDate string `json:"uploadDate"`
|
||||
FileSize string `json:"fileSize"`
|
||||
Name string `json:"name"`
|
||||
FileID string `json:"fileId"`
|
||||
} `json:"content"`
|
||||
CreatorID int `json:"creatorId"`
|
||||
DesID string `json:"des_id"`
|
||||
ID int `json:"id"`
|
||||
Inserttime int64 `json:"inserttime"`
|
||||
Key string `json:"key"`
|
||||
Norder int `json:"norder"`
|
||||
OwnerID int `json:"ownerId"`
|
||||
OwnerType int `json:"ownerType"`
|
||||
Path string `json:"path"`
|
||||
Rid int `json:"rid"`
|
||||
Status int `json:"status"`
|
||||
Topsign int `json:"topsign"`
|
||||
}
|
||||
|
||||
type ListFileResp struct {
|
||||
Msg string `json:"msg"`
|
||||
Result int `json:"result"`
|
||||
Status bool `json:"status"`
|
||||
UserAuth UserAuth `json:"userAuth"`
|
||||
List []File `json:"list"`
|
||||
}
|
||||
|
||||
type DownResp struct {
|
||||
Msg string `json:"msg"`
|
||||
Duration int `json:"duration"`
|
||||
Download string `json:"download"`
|
||||
FileStatus string `json:"fileStatus"`
|
||||
URL string `json:"url"`
|
||||
Status bool `json:"status"`
|
||||
}
|
||||
|
||||
type UploadDataRsp struct {
|
||||
Result int `json:"result"`
|
||||
Msg struct {
|
||||
Puid int `json:"puid"`
|
||||
Token string `json:"token"`
|
||||
} `json:"msg"`
|
||||
}
|
||||
|
||||
type UploadFileDataRsp struct {
|
||||
Result bool `json:"result"`
|
||||
Msg string `json:"msg"`
|
||||
Crc string `json:"crc"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Resid int64 `json:"resid"`
|
||||
Puid int `json:"puid"`
|
||||
Data struct {
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate time.Time `json:"uploadDate"`
|
||||
ModifyDate time.Time `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
} `json:"data"`
|
||||
}
|
||||
|
||||
type UploadDoneParam struct {
|
||||
Cataid string `json:"cataid"`
|
||||
Key string `json:"key"`
|
||||
Param struct {
|
||||
DisableOpt bool `json:"disableOpt"`
|
||||
Resid int64 `json:"resid"`
|
||||
Crc string `json:"crc"`
|
||||
Puid int `json:"puid"`
|
||||
Isfile bool `json:"isfile"`
|
||||
Pantype string `json:"pantype"`
|
||||
Size int `json:"size"`
|
||||
Name string `json:"name"`
|
||||
ObjectID string `json:"objectId"`
|
||||
Restype string `json:"restype"`
|
||||
UploadDate time.Time `json:"uploadDate"`
|
||||
ModifyDate time.Time `json:"modifyDate"`
|
||||
UploadDateFormat string `json:"uploadDateFormat"`
|
||||
Residstr string `json:"residstr"`
|
||||
Suffix string `json:"suffix"`
|
||||
Preview string `json:"preview"`
|
||||
Thumbnail string `json:"thumbnail"`
|
||||
Creator int `json:"creator"`
|
||||
Duration int `json:"duration"`
|
||||
IsImg bool `json:"isImg"`
|
||||
PreviewURL string `json:"previewUrl"`
|
||||
Filetype string `json:"filetype"`
|
||||
Filepath string `json:"filepath"`
|
||||
Sort int `json:"sort"`
|
||||
Topsort int `json:"topsort"`
|
||||
ResTypeValue int `json:"resTypeValue"`
|
||||
Extinfo string `json:"extinfo"`
|
||||
} `json:"param"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.Object {
|
||||
if len(f.Content.FolderName) > 0 {
|
||||
return &model.Object{
|
||||
ID: fmt.Sprintf("%d", f.ID),
|
||||
Name: f.Content.FolderName,
|
||||
Size: 0,
|
||||
Modified: time.UnixMilli(f.Inserttime),
|
||||
IsFolder: true,
|
||||
}
|
||||
}
|
||||
paserTime, err := time.Parse("2006-01-02 15:04", f.Content.UploadDate)
|
||||
if err != nil {
|
||||
paserTime = time.Now()
|
||||
}
|
||||
return &model.Object{
|
||||
ID: fmt.Sprintf("%d$%s", f.ID, f.Content.FileID),
|
||||
Name: f.Content.Name,
|
||||
Size: int64(f.Content.Size),
|
||||
Modified: paserTime,
|
||||
IsFolder: false,
|
||||
}
|
||||
}
|
183
drivers/chaoxing/util.go
Normal file
183
drivers/chaoxing/util.go
Normal file
@ -0,0 +1,183 @@
|
||||
package chaoxing
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"encoding/base64"
|
||||
"errors"
|
||||
"fmt"
|
||||
"mime/multipart"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/go-resty/resty/v2"
|
||||
)
|
||||
|
||||
func (d *ChaoXing) requestDownload(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
u := d.conf.DowloadApi + pathname
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"Cookie": d.Cookie,
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Referer": d.conf.referer,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
var e Resp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) request(pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
|
||||
u := d.conf.api + pathname
|
||||
if strings.Contains(pathname, "getUploadConfig") {
|
||||
u = pathname
|
||||
}
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"Cookie": d.Cookie,
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"Referer": d.conf.referer,
|
||||
})
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if resp != nil {
|
||||
req.SetResult(resp)
|
||||
}
|
||||
var e Resp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return res.Body(), nil
|
||||
}
|
||||
|
||||
func (d *ChaoXing) GetFiles(parent string) ([]File, error) {
|
||||
files := make([]File, 0)
|
||||
query := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"folderId": parent,
|
||||
"recType": "1",
|
||||
}
|
||||
var resp ListFileResp
|
||||
_, err := d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
}, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if resp.Result != 1 {
|
||||
msg := fmt.Sprintf("error code is:%d", resp.Result)
|
||||
return nil, errors.New(msg)
|
||||
}
|
||||
if len(resp.List) > 0 {
|
||||
files = append(files, resp.List...)
|
||||
}
|
||||
querys := map[string]string{
|
||||
"bbsid": d.Addition.Bbsid,
|
||||
"folderId": parent,
|
||||
"recType": "2",
|
||||
}
|
||||
var resps ListFileResp
|
||||
_, err = d.request("/pc/resource/getResourceList", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(querys)
|
||||
}, &resps)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, file := range resps.List {
|
||||
// 手机端超星上传的文件没有fileID字段,但ObjectID与fileID相同,可代替
|
||||
if file.Content.FileID == "" {
|
||||
file.Content.FileID = file.Content.ObjectID
|
||||
}
|
||||
files = append(files, file)
|
||||
}
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func EncryptByAES(message, key string) (string, error) {
|
||||
aesKey := []byte(key)
|
||||
plainText := []byte(message)
|
||||
block, err := aes.NewCipher(aesKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
iv := aesKey[:aes.BlockSize]
|
||||
mode := cipher.NewCBCEncrypter(block, iv)
|
||||
padding := aes.BlockSize - len(plainText)%aes.BlockSize
|
||||
paddedText := append(plainText, byte(padding))
|
||||
for i := 0; i < padding-1; i++ {
|
||||
paddedText = append(paddedText, byte(padding))
|
||||
}
|
||||
ciphertext := make([]byte, len(paddedText))
|
||||
mode.CryptBlocks(ciphertext, paddedText)
|
||||
encrypted := base64.StdEncoding.EncodeToString(ciphertext)
|
||||
return encrypted, nil
|
||||
}
|
||||
|
||||
func CookiesToString(cookies []*http.Cookie) string {
|
||||
var cookieStr string
|
||||
for _, cookie := range cookies {
|
||||
cookieStr += cookie.Name + "=" + cookie.Value + "; "
|
||||
}
|
||||
if len(cookieStr) > 2 {
|
||||
cookieStr = cookieStr[:len(cookieStr)-2]
|
||||
}
|
||||
return cookieStr
|
||||
}
|
||||
|
||||
func (d *ChaoXing) Login() (string, error) {
|
||||
transferKey := "u2oh6Vu^HWe4_AES"
|
||||
body := &bytes.Buffer{}
|
||||
writer := multipart.NewWriter(body)
|
||||
uname, err := EncryptByAES(d.Addition.UserName, transferKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
password, err := EncryptByAES(d.Addition.Password, transferKey)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = writer.WriteField("uname", uname)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = writer.WriteField("password", password)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = writer.WriteField("t", "true")
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
err = writer.Close()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// Create the request
|
||||
req, err := http.NewRequest("POST", "https://passport2.chaoxing.com/fanyalogin", body)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
req.Header.Set("Content-Type", writer.FormDataContentType())
|
||||
req.Header.Set("Content-Length", fmt.Sprintf("%d", body.Len()))
|
||||
resp, err := http.DefaultClient.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
return CookiesToString(resp.Cookies()), nil
|
||||
|
||||
}
|
@ -49,7 +49,19 @@ func (d *Cloudreve) List(ctx context.Context, dir model.Obj, args model.ListArgs
|
||||
}
|
||||
|
||||
return utils.SliceConvert(r.Objects, func(src Object) (model.Obj, error) {
|
||||
return objectToObj(src), nil
|
||||
thumb, err := d.GetThumb(src)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if src.Type == "dir" && d.EnableThumbAndFolderSize {
|
||||
var dprop DirectoryProp
|
||||
err = d.request(http.MethodGet, "/object/property/"+src.Id+"?is_folder=true", nil, &dprop)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
src.Size = dprop.Size
|
||||
}
|
||||
return objectToObj(src, thumb), nil
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -9,11 +9,12 @@ type Addition struct {
|
||||
// Usually one of two
|
||||
driver.RootPath
|
||||
// define other
|
||||
Address string `json:"address" required:"true"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Cookie string `json:"cookie"`
|
||||
CustomUA string `json:"custom_ua"`
|
||||
Address string `json:"address" required:"true"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Cookie string `json:"cookie"`
|
||||
CustomUA string `json:"custom_ua"`
|
||||
EnableThumbAndFolderSize bool `json:"enable_thumb_and_folder_size"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -44,13 +44,20 @@ type Object struct {
|
||||
SourceEnabled bool `json:"source_enabled"`
|
||||
}
|
||||
|
||||
func objectToObj(f Object) *model.Object {
|
||||
return &model.Object{
|
||||
ID: f.Id,
|
||||
Name: f.Name,
|
||||
Size: int64(f.Size),
|
||||
Modified: f.Date,
|
||||
IsFolder: f.Type == "dir",
|
||||
type DirectoryProp struct {
|
||||
Size int `json:"size"`
|
||||
}
|
||||
|
||||
func objectToObj(f Object, t model.Thumbnail) *model.ObjThumb {
|
||||
return &model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: f.Id,
|
||||
Name: f.Name,
|
||||
Size: int64(f.Size),
|
||||
Modified: f.Date,
|
||||
IsFolder: f.Type == "dir",
|
||||
},
|
||||
Thumbnail: t,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,3 +149,26 @@ func convertSrc(obj model.Obj) map[string]interface{} {
|
||||
m["items"] = items
|
||||
return m
|
||||
}
|
||||
|
||||
func (d *Cloudreve) GetThumb(file Object) (model.Thumbnail, error) {
|
||||
if !d.Addition.EnableThumbAndFolderSize {
|
||||
return model.Thumbnail{}, nil
|
||||
}
|
||||
ua := d.CustomUA
|
||||
if ua == "" {
|
||||
ua = base.UserAgent
|
||||
}
|
||||
req := base.NoRedirectClient.R()
|
||||
req.SetHeaders(map[string]string{
|
||||
"Cookie": "cloudreve-session=" + d.Cookie,
|
||||
"Accept": "image/webp,image/apng,image/svg+xml,image/*,*/*;q=0.8",
|
||||
"User-Agent": ua,
|
||||
})
|
||||
resp, err := req.Execute(http.MethodGet, d.Address+"/api/v3/file/thumb/"+file.Id)
|
||||
if err != nil {
|
||||
return model.Thumbnail{}, err
|
||||
}
|
||||
return model.Thumbnail{
|
||||
Thumbnail: resp.Header().Get("Location"),
|
||||
}, nil
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/alist-org/alist/v3/server/common"
|
||||
rcCrypt "github.com/rclone/rclone/backend/crypt"
|
||||
"github.com/rclone/rclone/fs/config/configmap"
|
||||
"github.com/rclone/rclone/fs/config/obscure"
|
||||
@ -123,6 +124,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
//filter illegal files
|
||||
continue
|
||||
}
|
||||
if !d.ShowHidden && strings.HasPrefix(name, ".") {
|
||||
continue
|
||||
}
|
||||
objRes := model.Object{
|
||||
Name: name,
|
||||
Size: 0,
|
||||
@ -144,6 +148,9 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
//filter illegal files
|
||||
continue
|
||||
}
|
||||
if !d.ShowHidden && strings.HasPrefix(name, ".") {
|
||||
continue
|
||||
}
|
||||
objRes := model.Object{
|
||||
Name: name,
|
||||
Size: size,
|
||||
@ -152,7 +159,10 @@ func (d *Crypt) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([
|
||||
Ctime: obj.CreateTime(),
|
||||
// discarding hash as it's encrypted
|
||||
}
|
||||
if !ok {
|
||||
if d.Thumbnail && thumb == "" {
|
||||
thumb = utils.EncodePath(common.GetApiUrl(nil) + stdpath.Join("/d", args.ReqPath, ".thumbnails", name+".webp"), true)
|
||||
}
|
||||
if !ok && !d.Thumbnail {
|
||||
result = append(result, &objRes)
|
||||
} else {
|
||||
objWithThumb := model.ObjThumb{
|
||||
|
@ -19,6 +19,10 @@ type Addition struct {
|
||||
Salt string `json:"salt" confidential:"true" help:"If you don't know what is salt, treat it as a second password. Optional but recommended"`
|
||||
EncryptedSuffix string `json:"encrypted_suffix" required:"true" default:".bin" help:"for advanced user only! encrypted files will have this suffix"`
|
||||
FileNameEncoding string `json:"filename_encoding" type:"select" required:"true" options:"base64,base32,base32768" default:"base64" help:"for advanced user only!"`
|
||||
|
||||
Thumbnail bool `json:"thumbnail" required:"true" default:"false" help:"enable thumbnail which pre-generated under .thumbnails folder"`
|
||||
|
||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -45,7 +45,25 @@ func (d *Dropbox) Init(ctx context.Context) error {
|
||||
if result != query {
|
||||
return fmt.Errorf("failed to check user: %s", string(res))
|
||||
}
|
||||
return nil
|
||||
d.RootNamespaceId, err = d.GetRootNamespaceId(ctx)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
func (d *Dropbox) GetRootNamespaceId(ctx context.Context) (string, error) {
|
||||
res, err := d.request("/2/users/get_current_account", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(nil)
|
||||
})
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
var currentAccountResp CurrentAccountResp
|
||||
err = utils.Json.Unmarshal(res, ¤tAccountResp)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
rootNamespaceId := currentAccountResp.RootInfo.RootNamespaceId
|
||||
return rootNamespaceId, nil
|
||||
}
|
||||
|
||||
func (d *Dropbox) Drop(ctx context.Context) error {
|
||||
@ -203,7 +221,7 @@ func (d *Dropbox) Put(ctx context.Context, dstDir model.Obj, stream model.FileSt
|
||||
_ = res.Body.Close()
|
||||
|
||||
if count > 0 {
|
||||
up((i + 1) * 100 / count)
|
||||
up(float64(i+1) * 100 / float64(count))
|
||||
}
|
||||
|
||||
offset += byteSize
|
||||
|
@ -17,7 +17,8 @@ type Addition struct {
|
||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||
|
||||
AccessToken string
|
||||
AccessToken string
|
||||
RootNamespaceId string
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -23,6 +23,13 @@ type RefreshTokenErrorResp struct {
|
||||
ErrorDescription string `json:"error_description"`
|
||||
}
|
||||
|
||||
type CurrentAccountResp struct {
|
||||
RootInfo struct {
|
||||
RootNamespaceId string `json:"root_namespace_id"`
|
||||
HomeNamespaceId string `json:"home_namespace_id"`
|
||||
} `json:"root_info"`
|
||||
}
|
||||
|
||||
type File struct {
|
||||
Tag string `json:".tag"`
|
||||
Name string `json:"name"`
|
||||
|
@ -46,12 +46,22 @@ func (d *Dropbox) refreshToken() error {
|
||||
func (d *Dropbox) request(uri, method string, callback base.ReqCallback, retry ...bool) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
req.SetHeader("Authorization", "Bearer "+d.AccessToken)
|
||||
if method == http.MethodPost {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
if d.RootNamespaceId != "" {
|
||||
apiPathRootJson, err := utils.Json.MarshalToString(map[string]interface{}{
|
||||
".tag": "root",
|
||||
"root": d.RootNamespaceId,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetHeader("Dropbox-API-Path-Root", apiPathRootJson)
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
if method == http.MethodPost && req.Body != nil {
|
||||
req.SetHeader("Content-Type", "application/json")
|
||||
}
|
||||
var e ErrorResp
|
||||
req.SetError(&e)
|
||||
res, err := req.Execute(method, d.base+uri)
|
||||
|
@ -5,6 +5,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
@ -23,12 +24,17 @@ type File struct {
|
||||
Name string `json:"name"`
|
||||
MimeType string `json:"mimeType"`
|
||||
ModifiedTime time.Time `json:"modifiedTime"`
|
||||
CreatedTime time.Time `json:"createdTime"`
|
||||
Size string `json:"size"`
|
||||
ThumbnailLink string `json:"thumbnailLink"`
|
||||
ShortcutDetails struct {
|
||||
TargetId string `json:"targetId"`
|
||||
TargetMimeType string `json:"targetMimeType"`
|
||||
} `json:"shortcutDetails"`
|
||||
|
||||
MD5Checksum string `json:"md5Checksum"`
|
||||
SHA1Checksum string `json:"sha1Checksum"`
|
||||
SHA256Checksum string `json:"sha256Checksum"`
|
||||
}
|
||||
|
||||
func fileToObj(f File) *model.ObjThumb {
|
||||
@ -39,10 +45,18 @@ func fileToObj(f File) *model.ObjThumb {
|
||||
ID: f.Id,
|
||||
Name: f.Name,
|
||||
Size: size,
|
||||
Ctime: f.CreatedTime,
|
||||
Modified: f.ModifiedTime,
|
||||
IsFolder: f.MimeType == "application/vnd.google-apps.folder",
|
||||
HashInfo: utils.NewHashInfoByMap(map[*utils.HashType]string{
|
||||
utils.MD5: f.MD5Checksum,
|
||||
utils.SHA1: f.SHA1Checksum,
|
||||
utils.SHA256: f.SHA256Checksum,
|
||||
}),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: f.ThumbnailLink,
|
||||
},
|
||||
Thumbnail: model.Thumbnail{},
|
||||
}
|
||||
if f.MimeType == "application/vnd.google-apps.shortcut" {
|
||||
obj.ID = f.ShortcutDetails.TargetId
|
||||
|
@ -5,14 +5,14 @@ import (
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
@ -43,7 +43,7 @@ func (d *GoogleDrive) refreshToken() error {
|
||||
gdsaFileThis := d.RefreshToken
|
||||
if gdsaFile.IsDir() {
|
||||
if len(d.ServiceAccountFileList) <= 0 {
|
||||
gdsaReadDir, gdsaDirErr := ioutil.ReadDir(d.RefreshToken)
|
||||
gdsaReadDir, gdsaDirErr := os.ReadDir(d.RefreshToken)
|
||||
if gdsaDirErr != nil {
|
||||
log.Error("read dir fail")
|
||||
return gdsaDirErr
|
||||
@ -75,7 +75,7 @@ func (d *GoogleDrive) refreshToken() error {
|
||||
}
|
||||
}
|
||||
|
||||
gdsaFileThisContent, err := ioutil.ReadFile(gdsaFileThis)
|
||||
gdsaFileThisContent, err := os.ReadFile(gdsaFileThis)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -195,7 +195,7 @@ func (d *GoogleDrive) getFiles(id string) ([]File, error) {
|
||||
}
|
||||
query := map[string]string{
|
||||
"orderBy": orderBy,
|
||||
"fields": "files(id,name,mimeType,size,modifiedTime,thumbnailLink,shortcutDetails),nextPageToken",
|
||||
"fields": "files(id,name,mimeType,size,modifiedTime,createdTime,thumbnailLink,shortcutDetails,md5Checksum,sha1Checksum,sha256Checksum),nextPageToken",
|
||||
"pageSize": "1000",
|
||||
"q": fmt.Sprintf("'%s' in parents and trashed = false", id),
|
||||
//"includeItemsFromAllDrives": "true",
|
||||
|
@ -58,9 +58,33 @@ func (d *GooglePhoto) Link(ctx context.Context, file model.Obj, args model.LinkA
|
||||
URL: f.BaseURL + "=d",
|
||||
}, nil
|
||||
} else if strings.Contains(f.MimeType, "video/") {
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=dv",
|
||||
}, nil
|
||||
var width, height int
|
||||
|
||||
fmt.Sscanf(f.MediaMetadata.Width, "%d", &width)
|
||||
fmt.Sscanf(f.MediaMetadata.Height, "%d", &height)
|
||||
|
||||
switch {
|
||||
// 1080P
|
||||
case width == 1920 && height == 1080:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m37",
|
||||
}, nil
|
||||
// 720P
|
||||
case width == 1280 && height == 720:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m22",
|
||||
}, nil
|
||||
// 360P
|
||||
case width == 640 && height == 360:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=m18",
|
||||
}, nil
|
||||
default:
|
||||
return &model.Link{
|
||||
URL: f.BaseURL + "=dv",
|
||||
}, nil
|
||||
}
|
||||
|
||||
}
|
||||
return &model.Link{}, nil
|
||||
}
|
||||
|
@ -151,7 +151,7 @@ func (d *GooglePhoto) getMedia(id string) (MediaItem, error) {
|
||||
var resp MediaItem
|
||||
|
||||
query := map[string]string{
|
||||
"fields": "baseUrl,mimeType",
|
||||
"fields": "mediaMetadata,baseUrl,mimeType",
|
||||
}
|
||||
_, err := d.request(fmt.Sprintf("https://photoslibrary.googleapis.com/v1/mediaItems/%s", id), http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(query)
|
||||
|
367
drivers/ilanzou/driver.go
Normal file
367
drivers/ilanzou/driver.go
Normal file
@ -0,0 +1,367 @@
|
||||
package template
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/foxxorcat/mopan-sdk-go"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
type ILanZou struct {
|
||||
model.Storage
|
||||
Addition
|
||||
|
||||
userID string
|
||||
account string
|
||||
upClient *resty.Client
|
||||
conf Conf
|
||||
config driver.Config
|
||||
}
|
||||
|
||||
func (d *ILanZou) Config() driver.Config {
|
||||
return d.config
|
||||
}
|
||||
|
||||
func (d *ILanZou) GetAddition() driver.Additional {
|
||||
return &d.Addition
|
||||
}
|
||||
|
||||
func (d *ILanZou) Init(ctx context.Context) error {
|
||||
d.upClient = base.NewRestyClient().SetTimeout(time.Minute * 10)
|
||||
if d.UUID == "" {
|
||||
res, err := d.unproved("/getUuid", http.MethodGet, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.UUID = utils.Json.Get(res, "uuid").ToString()
|
||||
}
|
||||
res, err := d.proved("/user/account/map", http.MethodGet, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.userID = utils.Json.Get(res, "map", "userId").ToString()
|
||||
d.account = utils.Json.Get(res, "map", "account").ToString()
|
||||
log.Debugf("[ilanzou] init response: %s", res)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
offset := 1
|
||||
limit := 60
|
||||
var res []ListItem
|
||||
for {
|
||||
var resp ListResp
|
||||
_, err := d.proved("/record/file/list", http.MethodGet, func(req *resty.Request) {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"type": "0",
|
||||
"folderId": dir.GetID(),
|
||||
"offset": strconv.Itoa(offset),
|
||||
"limit": strconv.Itoa(limit),
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
res = append(res, resp.List...)
|
||||
if resp.TotalPage <= resp.Offset {
|
||||
break
|
||||
}
|
||||
offset++
|
||||
}
|
||||
return utils.SliceConvert(res, func(f ListItem) (model.Obj, error) {
|
||||
updTime, err := time.ParseInLocation("2006-01-02 15:04:05", f.UpdTime, time.Local)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
obj := model.Object{
|
||||
ID: strconv.FormatInt(f.FileId, 10),
|
||||
//Path: "",
|
||||
Name: f.FileName,
|
||||
Size: f.FileSize * 1024,
|
||||
Modified: updTime,
|
||||
Ctime: updTime,
|
||||
IsFolder: false,
|
||||
//HashInfo: utils.HashInfo{},
|
||||
}
|
||||
if f.FileType == 2 {
|
||||
obj.IsFolder = true
|
||||
obj.Size = 0
|
||||
obj.ID = strconv.FormatInt(f.FolderId, 10)
|
||||
obj.Name = f.FolderName
|
||||
}
|
||||
return &obj, nil
|
||||
})
|
||||
}
|
||||
|
||||
func (d *ILanZou) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
||||
u, err := url.Parse(d.conf.base + "/" + d.conf.unproved + "/file/redirect")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query := u.Query()
|
||||
query.Set("uuid", d.UUID)
|
||||
query.Set("devType", "6")
|
||||
query.Set("devCode", d.UUID)
|
||||
query.Set("devModel", "chrome")
|
||||
query.Set("devVersion", "120")
|
||||
query.Set("appVersion", "")
|
||||
ts, err := getTimestamp(d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query.Set("timestamp", ts)
|
||||
//query.Set("appToken", d.Token)
|
||||
query.Set("enable", "1")
|
||||
downloadId, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%s", file.GetID(), d.userID)), d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query.Set("downloadId", hex.EncodeToString(downloadId))
|
||||
auth, err := mopan.AesEncrypt([]byte(fmt.Sprintf("%s|%d", file.GetID(), time.Now().UnixMilli())), d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query.Set("auth", hex.EncodeToString(auth))
|
||||
u.RawQuery = query.Encode()
|
||||
link := model.Link{URL: u.String()}
|
||||
return &link, nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
||||
res, err := d.proved("/file/folder/save", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"folderDesc": "",
|
||||
"folderId": parentDir.GetID(),
|
||||
"folderName": dirName,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
ID: utils.Json.Get(res, "list", "0", "id").ToString(),
|
||||
//Path: "",
|
||||
Name: dirName,
|
||||
Size: 0,
|
||||
Modified: time.Now(),
|
||||
Ctime: time.Now(),
|
||||
IsFolder: true,
|
||||
//HashInfo: utils.HashInfo{},
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
var fileIds, folderIds []string
|
||||
if srcObj.IsDir() {
|
||||
folderIds = []string{srcObj.GetID()}
|
||||
} else {
|
||||
fileIds = []string{srcObj.GetID()}
|
||||
}
|
||||
_, err := d.proved("/file/folder/move", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"folderIds": strings.Join(folderIds, ","),
|
||||
"fileIds": strings.Join(fileIds, ","),
|
||||
"targetId": dstDir.GetID(),
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return srcObj, nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
||||
var err error
|
||||
if srcObj.IsDir() {
|
||||
_, err = d.proved("/file/folder/edit", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"folderDesc": "",
|
||||
"folderId": srcObj.GetID(),
|
||||
"folderName": newName,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
_, err = d.proved("/file/edit", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileDesc": "",
|
||||
"fileId": srcObj.GetID(),
|
||||
"fileName": newName,
|
||||
})
|
||||
})
|
||||
}
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &model.Object{
|
||||
ID: srcObj.GetID(),
|
||||
//Path: "",
|
||||
Name: newName,
|
||||
Size: srcObj.GetSize(),
|
||||
Modified: time.Now(),
|
||||
Ctime: srcObj.CreateTime(),
|
||||
IsFolder: srcObj.IsDir(),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
||||
// TODO copy obj, optional
|
||||
return nil, errs.NotImplement
|
||||
}
|
||||
|
||||
func (d *ILanZou) Remove(ctx context.Context, obj model.Obj) error {
|
||||
var fileIds, folderIds []string
|
||||
if obj.IsDir() {
|
||||
folderIds = []string{obj.GetID()}
|
||||
} else {
|
||||
fileIds = []string{obj.GetID()}
|
||||
}
|
||||
_, err := d.proved("/file/delete", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"folderIds": strings.Join(folderIds, ","),
|
||||
"fileIds": strings.Join(fileIds, ","),
|
||||
"status": 0,
|
||||
})
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
const DefaultPartSize = 1024 * 1024 * 8
|
||||
|
||||
func (d *ILanZou) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
||||
h := md5.New()
|
||||
// need to calculate md5 of the full content
|
||||
tempFile, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
if _, err = io.Copy(h, tempFile); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
etag := hex.EncodeToString(h.Sum(nil))
|
||||
// get upToken
|
||||
res, err := d.proved("/7n/getUpToken", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"fileId": "",
|
||||
"fileName": stream.GetName(),
|
||||
"fileSize": stream.GetSize() / 1024,
|
||||
"folderId": dstDir.GetID(),
|
||||
"md5": etag,
|
||||
"type": 1,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
upToken := utils.Json.Get(res, "upToken").ToString()
|
||||
now := time.Now()
|
||||
key := fmt.Sprintf("disk/%d/%d/%d/%s/%016d", now.Year(), now.Month(), now.Day(), d.account, now.UnixMilli())
|
||||
var token string
|
||||
if stream.GetSize() <= DefaultPartSize {
|
||||
res, err := d.upClient.R().SetMultipartFormData(map[string]string{
|
||||
"token": upToken,
|
||||
"key": key,
|
||||
"fname": stream.GetName(),
|
||||
}).SetMultipartField("file", stream.GetName(), stream.GetMimetype(), tempFile).
|
||||
Post("https://upload.qiniup.com/")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
token = utils.Json.Get(res.Body(), "token").ToString()
|
||||
} else {
|
||||
keyBase64 := base64.URLEncoding.EncodeToString([]byte(key))
|
||||
res, err := d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads", d.conf.bucket, keyBase64))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uploadId := utils.Json.Get(res.Body(), "uploadId").ToString()
|
||||
parts := make([]Part, 0)
|
||||
partNum := (stream.GetSize() + DefaultPartSize - 1) / DefaultPartSize
|
||||
for i := 1; i <= int(partNum); i++ {
|
||||
u := fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s/%d", d.conf.bucket, keyBase64, uploadId, i)
|
||||
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(io.LimitReader(tempFile, DefaultPartSize)).Put(u)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
etag := utils.Json.Get(res.Body(), "etag").ToString()
|
||||
parts = append(parts, Part{
|
||||
PartNumber: i,
|
||||
ETag: etag,
|
||||
})
|
||||
}
|
||||
res, err = d.upClient.R().SetHeader("Authorization", "UpToken "+upToken).SetBody(base.Json{
|
||||
"fnmae": stream.GetName(),
|
||||
"parts": parts,
|
||||
}).Post(fmt.Sprintf("https://upload.qiniup.com/buckets/%s/objects/%s/uploads/%s", d.conf.bucket, keyBase64, uploadId))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
token = utils.Json.Get(res.Body(), "token").ToString()
|
||||
}
|
||||
// commit upload
|
||||
var resp UploadResultResp
|
||||
for i := 0; i < 10; i++ {
|
||||
_, err = d.unproved("/7n/results", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetQueryParams(map[string]string{
|
||||
"tokenList": token,
|
||||
"tokenTime": time.Now().Format("Mon Jan 02 2006 15:04:05 GMT-0700 (MST)"),
|
||||
}).SetResult(&resp)
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(resp.List) == 0 {
|
||||
return nil, fmt.Errorf("upload failed, empty response")
|
||||
}
|
||||
if resp.List[0].Status == 1 {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Second * 1)
|
||||
}
|
||||
file := resp.List[0]
|
||||
if file.Status != 1 {
|
||||
return nil, fmt.Errorf("upload failed, status: %d", resp.List[0].Status)
|
||||
}
|
||||
return &model.Object{
|
||||
ID: strconv.FormatInt(file.FileId, 10),
|
||||
//Path: ,
|
||||
Name: file.FileName,
|
||||
Size: stream.GetSize(),
|
||||
Modified: stream.ModTime(),
|
||||
Ctime: stream.CreateTime(),
|
||||
IsFolder: false,
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, etag),
|
||||
}, nil
|
||||
}
|
||||
|
||||
//func (d *ILanZou) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
||||
// return nil, errs.NotSupport
|
||||
//}
|
||||
|
||||
var _ driver.Driver = (*ILanZou)(nil)
|
74
drivers/ilanzou/meta.go
Normal file
74
drivers/ilanzou/meta.go
Normal file
@ -0,0 +1,74 @@
|
||||
package template
|
||||
|
||||
import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/op"
|
||||
)
|
||||
|
||||
type Addition struct {
|
||||
driver.RootID
|
||||
Username string `json:"username" type:"string" required:"true"`
|
||||
Password string `json:"password" type:"string" required:"true"`
|
||||
|
||||
Token string
|
||||
UUID string
|
||||
}
|
||||
|
||||
type Conf struct {
|
||||
base string
|
||||
secret []byte
|
||||
bucket string
|
||||
unproved string
|
||||
proved string
|
||||
}
|
||||
|
||||
func init() {
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &ILanZou{
|
||||
config: driver.Config{
|
||||
Name: "ILanZou",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
},
|
||||
conf: Conf{
|
||||
base: "https://api.ilanzou.com",
|
||||
secret: []byte("lanZouY-disk-app"),
|
||||
bucket: "wpanstore-lanzou",
|
||||
unproved: "unproved",
|
||||
proved: "proved",
|
||||
},
|
||||
}
|
||||
})
|
||||
op.RegisterDriver(func() driver.Driver {
|
||||
return &ILanZou{
|
||||
config: driver.Config{
|
||||
Name: "FeijiPan",
|
||||
LocalSort: false,
|
||||
OnlyLocal: false,
|
||||
OnlyProxy: false,
|
||||
NoCache: false,
|
||||
NoUpload: false,
|
||||
NeedMs: false,
|
||||
DefaultRoot: "0",
|
||||
CheckStatus: false,
|
||||
Alert: "",
|
||||
NoOverwriteUpload: false,
|
||||
},
|
||||
conf: Conf{
|
||||
base: "https://api.feijipan.com",
|
||||
secret: []byte("dingHao-disk-app"),
|
||||
bucket: "wpanstore",
|
||||
unproved: "ws",
|
||||
proved: "app",
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
57
drivers/ilanzou/types.go
Normal file
57
drivers/ilanzou/types.go
Normal file
@ -0,0 +1,57 @@
|
||||
package template
|
||||
|
||||
type ListResp struct {
|
||||
Msg string `json:"msg"`
|
||||
Total int `json:"total"`
|
||||
Code int `json:"code"`
|
||||
Offset int `json:"offset"`
|
||||
TotalPage int `json:"totalPage"`
|
||||
Limit int `json:"limit"`
|
||||
List []ListItem `json:"list"`
|
||||
}
|
||||
|
||||
type ListItem struct {
|
||||
IconId int `json:"iconId"`
|
||||
IsAmt int `json:"isAmt"`
|
||||
FolderDesc string `json:"folderDesc,omitempty"`
|
||||
AddTime string `json:"addTime"`
|
||||
FolderId int64 `json:"folderId"`
|
||||
ParentId int64 `json:"parentId"`
|
||||
ParentName string `json:"parentName"`
|
||||
NoteType int `json:"noteType,omitempty"`
|
||||
UpdTime string `json:"updTime"`
|
||||
IsShare int `json:"isShare"`
|
||||
FolderIcon string `json:"folderIcon,omitempty"`
|
||||
FolderName string `json:"folderName,omitempty"`
|
||||
FileType int `json:"fileType"`
|
||||
Status int `json:"status"`
|
||||
IsFileShare int `json:"isFileShare,omitempty"`
|
||||
FileName string `json:"fileName,omitempty"`
|
||||
FileStars float64 `json:"fileStars,omitempty"`
|
||||
IsFileDownload int `json:"isFileDownload,omitempty"`
|
||||
FileComments int `json:"fileComments,omitempty"`
|
||||
FileSize int64 `json:"fileSize,omitempty"`
|
||||
FileIcon string `json:"fileIcon,omitempty"`
|
||||
FileDownloads int `json:"fileDownloads,omitempty"`
|
||||
FileUrl interface{} `json:"fileUrl"`
|
||||
FileLikes int `json:"fileLikes,omitempty"`
|
||||
FileId int64 `json:"fileId,omitempty"`
|
||||
}
|
||||
|
||||
type Part struct {
|
||||
PartNumber int `json:"partNumber"`
|
||||
ETag string `json:"etag"`
|
||||
}
|
||||
|
||||
type UploadResultResp struct {
|
||||
Msg string `json:"msg"`
|
||||
Code int `json:"code"`
|
||||
List []struct {
|
||||
FileIconId int `json:"fileIconId"`
|
||||
FileName string `json:"fileName"`
|
||||
FileIcon string `json:"fileIcon"`
|
||||
FileId int64 `json:"fileId"`
|
||||
Status int `json:"status"`
|
||||
Token string `json:"token"`
|
||||
} `json:"list"`
|
||||
}
|
97
drivers/ilanzou/util.go
Normal file
97
drivers/ilanzou/util.go
Normal file
@ -0,0 +1,97 @@
|
||||
package template
|
||||
|
||||
import (
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/foxxorcat/mopan-sdk-go"
|
||||
"github.com/go-resty/resty/v2"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
func (d *ILanZou) login() error {
|
||||
res, err := d.unproved("/login", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
"loginName": d.Username,
|
||||
"loginPwd": d.Password,
|
||||
})
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.Token = utils.Json.Get(res, "data", "appToken").ToString()
|
||||
if d.Token == "" {
|
||||
return fmt.Errorf("failed to login: token is empty, resp: %s", res)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func getTimestamp(secret []byte) (string, error) {
|
||||
ts := time.Now().UnixMilli()
|
||||
tsStr := strconv.FormatInt(ts, 10)
|
||||
res, err := mopan.AesEncrypt([]byte(tsStr), secret)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return hex.EncodeToString(res), nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) request(pathname, method string, callback base.ReqCallback, proved bool, retry ...bool) ([]byte, error) {
|
||||
req := base.RestyClient.R()
|
||||
ts, err := getTimestamp(d.conf.secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
req.SetQueryParams(map[string]string{
|
||||
"uuid": d.UUID,
|
||||
"devType": "6",
|
||||
"devCode": d.UUID,
|
||||
"devModel": "chrome",
|
||||
"devVersion": "120",
|
||||
"appVersion": "",
|
||||
"timestamp": ts,
|
||||
//"appToken": d.Token,
|
||||
"extra": "2",
|
||||
})
|
||||
if proved {
|
||||
req.SetQueryParam("appToken", d.Token)
|
||||
}
|
||||
if callback != nil {
|
||||
callback(req)
|
||||
}
|
||||
res, err := req.Execute(method, d.conf.base+pathname)
|
||||
if err != nil {
|
||||
if res != nil {
|
||||
log.Errorf("[iLanZou] request error: %s", res.String())
|
||||
}
|
||||
return nil, err
|
||||
}
|
||||
isRetry := len(retry) > 0 && retry[0]
|
||||
body := res.Body()
|
||||
code := utils.Json.Get(body, "code").ToInt()
|
||||
msg := utils.Json.Get(body, "msg").ToString()
|
||||
if code != 200 {
|
||||
if !isRetry && proved && (utils.SliceContains([]int{-1, -2}, code) || d.Token == "") {
|
||||
err = d.login()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return d.request(pathname, method, callback, proved, true)
|
||||
}
|
||||
return nil, fmt.Errorf("%d: %s", code, msg)
|
||||
}
|
||||
return body, nil
|
||||
}
|
||||
|
||||
func (d *ILanZou) unproved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||
return d.request("/"+d.conf.unproved+pathname, method, callback, false)
|
||||
}
|
||||
|
||||
func (d *ILanZou) proved(pathname, method string, callback base.ReqCallback) ([]byte, error) {
|
||||
return d.request("/"+d.conf.proved+pathname, method, callback, true)
|
||||
}
|
@ -258,7 +258,7 @@ var sizeFindReg = regexp.MustCompile(`(?i)大小\W*([0-9.]+\s*[bkm]+)`)
|
||||
var timeFindReg = regexp.MustCompile(`\d+\s*[秒天分小][钟时]?前|[昨前]天|\d{4}-\d{2}-\d{2}`)
|
||||
|
||||
// 查找分享文件夹子文件夹ID和名称
|
||||
var findSubFolaerReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
|
||||
var findSubFolderReg = regexp.MustCompile(`(?i)(?:folderlink|mbxfolder).+href="/(.+?)"(?:.+filename")?>(.+?)<`)
|
||||
|
||||
// 获取下载页面链接
|
||||
var findDownPageParamReg = regexp.MustCompile(`<iframe.*?src="(.+?)"`)
|
||||
@ -455,7 +455,7 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
|
||||
|
||||
files := make([]FileOrFolderByShareUrl, 0)
|
||||
// vip获取文件夹
|
||||
floders := findSubFolaerReg.FindAllStringSubmatch(sharePageData, -1)
|
||||
floders := findSubFolderReg.FindAllStringSubmatch(sharePageData, -1)
|
||||
for _, floder := range floders {
|
||||
if len(floder) == 3 {
|
||||
files = append(files, FileOrFolderByShareUrl{
|
||||
@ -476,10 +476,10 @@ func (d *LanZou) getFolderByShareUrl(pwd string, sharePageData string) ([]FileOr
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
/*// 文件夹中的文件也不加密
|
||||
// 文件夹中的文件加密
|
||||
for i := 0; i < len(resp.Text); i++ {
|
||||
resp.Text[i].Pwd = pwd
|
||||
}*/
|
||||
}
|
||||
if len(resp.Text) == 0 {
|
||||
break
|
||||
}
|
||||
|
@ -257,10 +257,18 @@ func (d *Local) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
|
||||
|
||||
func (d *Local) Remove(ctx context.Context, obj model.Obj) error {
|
||||
var err error
|
||||
if obj.IsDir() {
|
||||
err = os.RemoveAll(obj.GetPath())
|
||||
if utils.SliceContains([]string{"", "delete permanently"}, d.RecycleBinPath) {
|
||||
if obj.IsDir() {
|
||||
err = os.RemoveAll(obj.GetPath())
|
||||
} else {
|
||||
err = os.Remove(obj.GetPath())
|
||||
}
|
||||
} else {
|
||||
err = os.Remove(obj.GetPath())
|
||||
dstPath := filepath.Join(d.RecycleBinPath, obj.GetName())
|
||||
if utils.Exists(dstPath) {
|
||||
dstPath = filepath.Join(d.RecycleBinPath, obj.GetName()+"_"+time.Now().Format("20060102150405"))
|
||||
}
|
||||
err = os.Rename(obj.GetPath(), dstPath)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -11,6 +11,7 @@ type Addition struct {
|
||||
ThumbCacheFolder string `json:"thumb_cache_folder"`
|
||||
ShowHidden bool `json:"show_hidden" default:"true" required:"false" help:"show hidden directories and files"`
|
||||
MkdirPerm string `json:"mkdir_perm" default:"777"`
|
||||
RecycleBinPath string `json:"recycle_bin_path" default:"delete permanently" help:"path to recycle bin, delete permanently if empty or keep 'delete permanently'"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -188,6 +188,9 @@ func (d *MediaTrack) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
uploader := s3manager.NewUploader(s)
|
||||
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: &resp.Data.Bucket,
|
||||
Key: &resp.Data.Object,
|
||||
|
@ -4,11 +4,12 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/rclone/rclone/lib/readers"
|
||||
"io"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/pkg/http_range"
|
||||
"github.com/rclone/rclone/lib/readers"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/errs"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
@ -169,7 +170,7 @@ func (d *Mega) Put(ctx context.Context, dstDir model.Obj, stream model.FileStrea
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
up(id * 100 / u.Chunks())
|
||||
up(float64(id) * 100 / float64(u.Chunks()))
|
||||
}
|
||||
|
||||
_, err = u.Finish()
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
@ -42,23 +43,31 @@ func (d *MoPan) Init(ctx context.Context) error {
|
||||
if d.uploadThread < 1 || d.uploadThread > 32 {
|
||||
d.uploadThread, d.UploadThread = 3, "3"
|
||||
}
|
||||
login := func() error {
|
||||
data, err := d.client.Login(d.Phone, d.Password)
|
||||
|
||||
defer func() { d.SMSCode = "" }()
|
||||
|
||||
login := func() (err error) {
|
||||
var loginData *mopan.LoginResp
|
||||
if d.SMSCode != "" {
|
||||
loginData, err = d.client.LoginBySmsStep2(d.Phone, d.SMSCode)
|
||||
} else {
|
||||
loginData, err = d.client.Login(d.Phone, d.Password)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.client.SetAuthorization(data.Token)
|
||||
d.client.SetAuthorization(loginData.Token)
|
||||
|
||||
info, err := d.client.GetUserInfo()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
d.userID = info.UserID
|
||||
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, data.UserCloudStorageRelations)
|
||||
log.Debugf("[mopan] Phone: %s UserCloudStorageRelations: %+v", d.Phone, loginData.UserCloudStorageRelations)
|
||||
cloudCircleApp, _ := d.client.QueryAllCloudCircleApp()
|
||||
log.Debugf("[mopan] Phone: %s CloudCircleApp: %+v", d.Phone, cloudCircleApp)
|
||||
if d.RootFolderID == "" {
|
||||
for _, userCloudStorage := range data.UserCloudStorageRelations {
|
||||
for _, userCloudStorage := range loginData.UserCloudStorageRelations {
|
||||
if userCloudStorage.Path == "/文件" {
|
||||
d.RootFolderID = userCloudStorage.FolderID
|
||||
}
|
||||
@ -75,8 +84,20 @@ func (d *MoPan) Init(ctx context.Context) error {
|
||||
op.MustSaveDriverStorage(d)
|
||||
}
|
||||
return err
|
||||
}).SetDeviceInfo(d.DeviceInfo)
|
||||
d.DeviceInfo = d.client.GetDeviceInfo()
|
||||
})
|
||||
|
||||
var deviceInfo mopan.DeviceInfo
|
||||
if strings.TrimSpace(d.DeviceInfo) != "" && utils.Json.UnmarshalFromString(d.DeviceInfo, &deviceInfo) == nil {
|
||||
d.client.SetDeviceInfo(&deviceInfo)
|
||||
}
|
||||
d.DeviceInfo, _ = utils.Json.MarshalToString(d.client.GetDeviceInfo())
|
||||
|
||||
if strings.Contains(d.SMSCode, "send") {
|
||||
if _, err := d.client.LoginBySms(d.Phone); err != nil {
|
||||
return err
|
||||
}
|
||||
return errors.New("please enter the SMS code")
|
||||
}
|
||||
return login()
|
||||
}
|
||||
|
||||
@ -117,6 +138,18 @@ func (d *MoPan) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data.DownloadUrl = strings.Replace(strings.ReplaceAll(data.DownloadUrl, "&", "&"), "http://", "https://", 1)
|
||||
res, err := base.NoRedirectClient.R().SetDoNotParseResponse(true).SetContext(ctx).Get(data.DownloadUrl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer func() {
|
||||
_ = res.RawBody().Close()
|
||||
}()
|
||||
if res.StatusCode() == 302 {
|
||||
data.DownloadUrl = res.Header().Get("location")
|
||||
}
|
||||
|
||||
return &model.Link{
|
||||
URL: data.DownloadUrl,
|
||||
}, nil
|
||||
@ -262,7 +295,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
||||
}
|
||||
|
||||
if !initUpdload.FileDataExists {
|
||||
fmt.Println(d.client.CloudDiskStartBusiness())
|
||||
utils.Log.Error(d.client.CloudDiskStartBusiness())
|
||||
|
||||
threadG, upCtx := errgroup.NewGroupWithContext(ctx, d.uploadThread,
|
||||
retry.Attempts(3),
|
||||
@ -298,7 +331,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return fmt.Errorf("upload err,code=%d", resp.StatusCode)
|
||||
}
|
||||
up(100 * int(threadG.Success()) / len(parts))
|
||||
up(100 * float64(threadG.Success()) / float64(len(parts)))
|
||||
initUpdload.PartInfos[i] = ""
|
||||
return nil
|
||||
})
|
||||
|
@ -8,6 +8,7 @@ import (
|
||||
type Addition struct {
|
||||
Phone string `json:"phone" required:"true"`
|
||||
Password string `json:"password" required:"true"`
|
||||
SMSCode string `json:"sms_code" help:"input 'send' send sms "`
|
||||
|
||||
RootFolderID string `json:"root_folder_id" default:""`
|
||||
|
||||
|
@ -4,6 +4,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
"github.com/foxxorcat/mopan-sdk-go"
|
||||
)
|
||||
|
||||
@ -14,6 +15,8 @@ func fileToObj(f mopan.File) model.Obj {
|
||||
Name: f.Name,
|
||||
Size: int64(f.Size),
|
||||
Modified: time.Time(f.LastOpTime),
|
||||
Ctime: time.Time(f.CreateDate),
|
||||
HashInfo: utils.NewHashInfo(utils.MD5, f.Md5),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: f.Icon.SmallURL,
|
||||
@ -26,6 +29,7 @@ func folderToObj(f mopan.Folder) model.Obj {
|
||||
ID: string(f.ID),
|
||||
Name: f.Name,
|
||||
Modified: time.Time(f.LastOpTime),
|
||||
Ctime: time.Time(f.CreateDate),
|
||||
IsFolder: true,
|
||||
}
|
||||
}
|
||||
@ -37,6 +41,7 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
|
||||
Name: newName,
|
||||
IsFolder: true,
|
||||
Modified: o.ModTime(),
|
||||
Ctime: o.CreateTime(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,6 +55,8 @@ func CloneObj(o model.Obj, newID, newName string) model.Obj {
|
||||
Name: newName,
|
||||
Size: o.GetSize(),
|
||||
Modified: o.ModTime(),
|
||||
Ctime: o.CreateTime(),
|
||||
HashInfo: o.GetHash(),
|
||||
},
|
||||
Thumbnail: model.Thumbnail{
|
||||
Thumbnail: thumb,
|
||||
|
@ -4,7 +4,9 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"sync"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -18,6 +20,8 @@ type Onedrive struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
root *Object
|
||||
mutex sync.Mutex
|
||||
}
|
||||
|
||||
func (d *Onedrive) Config() driver.Config {
|
||||
@ -39,6 +43,42 @@ func (d *Onedrive) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *Onedrive) GetRoot(ctx context.Context) (model.Obj, error) {
|
||||
if d.root != nil {
|
||||
return d.root, nil
|
||||
}
|
||||
d.mutex.Lock()
|
||||
defer d.mutex.Unlock()
|
||||
root := &Object{
|
||||
ObjThumb: model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: "root",
|
||||
Path: d.RootFolderPath,
|
||||
Name: "root",
|
||||
Size: 0,
|
||||
Modified: d.Modified,
|
||||
Ctime: d.Modified,
|
||||
IsFolder: true,
|
||||
},
|
||||
},
|
||||
ParentID: "",
|
||||
}
|
||||
if !utils.PathEqual(d.RootFolderPath, "/") {
|
||||
// get root folder id
|
||||
url := d.GetMetaUrl(false, d.RootFolderPath)
|
||||
var resp struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
_, err := d.Request(url, http.MethodGet, nil, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
root.ID = resp.Id
|
||||
}
|
||||
d.root = root
|
||||
return d.root, nil
|
||||
}
|
||||
|
||||
func (d *Onedrive) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetPath())
|
||||
if err != nil {
|
||||
@ -57,8 +97,17 @@ func (d *Onedrive) Link(ctx context.Context, file model.Obj, args model.LinkArgs
|
||||
if f.File == nil {
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
u := f.Url
|
||||
if d.CustomHost != "" {
|
||||
_u, err := url.Parse(f.Url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_u.Host = d.CustomHost
|
||||
u = _u.String()
|
||||
}
|
||||
return &model.Link{
|
||||
URL: f.Url,
|
||||
URL: u,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
@ -15,6 +15,7 @@ type Addition struct {
|
||||
RefreshToken string `json:"refresh_token" required:"true"`
|
||||
SiteId string `json:"site_id"`
|
||||
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
|
||||
CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -196,13 +196,14 @@ func (d *Onedrive) upBig(ctx context.Context, dstDir model.Obj, stream model.Fil
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 {
|
||||
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
|
||||
data, _ := io.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
return errors.New(string(data))
|
||||
}
|
||||
res.Body.Close()
|
||||
up(int(finish * 100 / stream.GetSize()))
|
||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -4,7 +4,9 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"sync"
|
||||
|
||||
"github.com/alist-org/alist/v3/drivers/base"
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
@ -18,6 +20,8 @@ type OnedriveAPP struct {
|
||||
model.Storage
|
||||
Addition
|
||||
AccessToken string
|
||||
root *Object
|
||||
mutex sync.Mutex
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) Config() driver.Config {
|
||||
@ -39,6 +43,42 @@ func (d *OnedriveAPP) Drop(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) GetRoot(ctx context.Context) (model.Obj, error) {
|
||||
if d.root != nil {
|
||||
return d.root, nil
|
||||
}
|
||||
d.mutex.Lock()
|
||||
defer d.mutex.Unlock()
|
||||
root := &Object{
|
||||
ObjThumb: model.ObjThumb{
|
||||
Object: model.Object{
|
||||
ID: "root",
|
||||
Path: d.RootFolderPath,
|
||||
Name: "root",
|
||||
Size: 0,
|
||||
Modified: d.Modified,
|
||||
Ctime: d.Modified,
|
||||
IsFolder: true,
|
||||
},
|
||||
},
|
||||
ParentID: "",
|
||||
}
|
||||
if !utils.PathEqual(d.RootFolderPath, "/") {
|
||||
// get root folder id
|
||||
url := d.GetMetaUrl(false, d.RootFolderPath)
|
||||
var resp struct {
|
||||
Id string `json:"id"`
|
||||
}
|
||||
_, err := d.Request(url, http.MethodGet, nil, &resp)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
root.ID = resp.Id
|
||||
}
|
||||
d.root = root
|
||||
return d.root, nil
|
||||
}
|
||||
|
||||
func (d *OnedriveAPP) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
||||
files, err := d.getFiles(dir.GetPath())
|
||||
if err != nil {
|
||||
@ -57,8 +97,17 @@ func (d *OnedriveAPP) Link(ctx context.Context, file model.Obj, args model.LinkA
|
||||
if f.File == nil {
|
||||
return nil, errs.NotFile
|
||||
}
|
||||
u := f.Url
|
||||
if d.CustomHost != "" {
|
||||
_u, err := url.Parse(f.Url)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
_u.Host = d.CustomHost
|
||||
u = _u.String()
|
||||
}
|
||||
return &model.Link{
|
||||
URL: f.Url,
|
||||
URL: u,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
@ -13,6 +13,7 @@ type Addition struct {
|
||||
TenantID string `json:"tenant_id"`
|
||||
Email string `json:"email"`
|
||||
ChunkSize int64 `json:"chunk_size" type:"number" default:"5"`
|
||||
CustomHost string `json:"custom_host" help:"Custom host for onedrive download link"`
|
||||
}
|
||||
|
||||
var config = driver.Config{
|
||||
|
@ -71,8 +71,8 @@ func (d *OnedriveAPP) _accessToken() error {
|
||||
"grant_type": "client_credentials",
|
||||
"client_id": d.ClientID,
|
||||
"client_secret": d.ClientSecret,
|
||||
"resource": "https://graph.microsoft.com/",
|
||||
"scope": "https://graph.microsoft.com/.default",
|
||||
"resource": onedriveHostMap[d.Region].Api + "/",
|
||||
"scope": onedriveHostMap[d.Region].Api + "/.default",
|
||||
}).Post(url)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -187,13 +187,14 @@ func (d *OnedriveAPP) upBig(ctx context.Context, dstDir model.Obj, stream model.
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 {
|
||||
// https://learn.microsoft.com/zh-cn/onedrive/developer/rest-api/api/driveitem_createuploadsession
|
||||
if res.StatusCode != 201 && res.StatusCode != 202 && res.StatusCode != 200 {
|
||||
data, _ := io.ReadAll(res.Body)
|
||||
res.Body.Close()
|
||||
return errors.New(string(data))
|
||||
}
|
||||
res.Body.Close()
|
||||
up(int(finish * 100 / stream.GetSize()))
|
||||
up(float64(finish) * 100 / float64(stream.GetSize()))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -3,7 +3,6 @@ package pikpak
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
@ -11,6 +10,7 @@ import (
|
||||
"github.com/alist-org/alist/v3/internal/driver"
|
||||
"github.com/alist-org/alist/v3/internal/model"
|
||||
"github.com/alist-org/alist/v3/pkg/utils"
|
||||
hash_extend "github.com/alist-org/alist/v3/pkg/utils/hash"
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
@ -123,22 +123,20 @@ func (d *PikPak) Remove(ctx context.Context, obj model.Obj) error {
|
||||
}
|
||||
|
||||
func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
|
||||
tempFile, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer func() {
|
||||
_ = tempFile.Close()
|
||||
}()
|
||||
// cal gcid
|
||||
sha1Str, err := getGcid(tempFile, stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = tempFile.Seek(0, io.SeekStart)
|
||||
if err != nil {
|
||||
return err
|
||||
hi := stream.GetHash()
|
||||
sha1Str := hi.GetHash(hash_extend.GCID)
|
||||
if len(sha1Str) < hash_extend.GCID.Width {
|
||||
tFile, err := stream.CacheFullInTempFile()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sha1Str, err = utils.HashFile(hash_extend.GCID, tFile, stream.GetSize())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
var resp UploadTaskData
|
||||
res, err := d.request("https://api-drive.mypikpak.com/drive/v1/files", http.MethodPost, func(req *resty.Request) {
|
||||
req.SetBody(base.Json{
|
||||
@ -174,10 +172,13 @@ func (d *PikPak) Put(ctx context.Context, dstDir model.Obj, stream model.FileStr
|
||||
return err
|
||||
}
|
||||
uploader := s3manager.NewUploader(ss)
|
||||
if stream.GetSize() > s3manager.MaxUploadParts*s3manager.DefaultUploadPartSize {
|
||||
uploader.PartSize = stream.GetSize() / (s3manager.MaxUploadParts - 1)
|
||||
}
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: ¶ms.Bucket,
|
||||
Key: ¶ms.Key,
|
||||
Body: tempFile,
|
||||
Body: stream,
|
||||
}
|
||||
_, err = uploader.UploadWithContext(ctx, input)
|
||||
return err
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user