mirror of
https://github.com/OpenListTeam/OpenList.git
synced 2025-09-20 04:36:09 +08:00
Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
49213c1321 | |||
64dd3cb047 | |||
12fd52b6b7 | |||
27533d0e20 | |||
34a2eeb4a9 | |||
652e4ba1cb | |||
639b5cf7c2 | |||
b5c1386645 | |||
041868dfb8 | |||
cfbc157477 | |||
5d44806064 | |||
fc8b99c862 | |||
24560b43c0 | |||
39ca385778 |
2
.github/workflows/changelog.yml
vendored
2
.github/workflows/changelog.yml
vendored
@ -21,4 +21,4 @@ jobs:
|
|||||||
|
|
||||||
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}
|
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
22
.github/workflows/issue_close_question.yml
vendored
22
.github/workflows/issue_close_question.yml
vendored
@ -1,22 +0,0 @@
|
|||||||
name: Close need info
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 */1 * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
close-need-info:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: close-issues
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'close-issues'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
labels: 'question'
|
|
||||||
inactive-day: 3
|
|
||||||
close-reason: 'not_planned'
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 3 days.
|
|
||||||
你好 @${{ github.event.issue.user.login }},此issue因超过3天未回复被关闭。
|
|
21
.github/workflows/issue_close_stale.yml
vendored
21
.github/workflows/issue_close_stale.yml
vendored
@ -1,21 +0,0 @@
|
|||||||
name: Close inactive
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: "0 0 */7 * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
close-inactive:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: close-issues
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'close-issues'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
labels: 'stale'
|
|
||||||
inactive-day: 8
|
|
||||||
close-reason: 'not_planned'
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue. Thank you for your contributions again.
|
|
25
.github/workflows/issue_duplicate.yml
vendored
25
.github/workflows/issue_duplicate.yml
vendored
@ -1,25 +0,0 @@
|
|||||||
name: Issue Duplicate
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-comment:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.label.name == 'duplicate'
|
|
||||||
steps:
|
|
||||||
- name: Create comment
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'create-comment'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-number: ${{ github.event.issue.number }}
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, your issue is a duplicate and will be closed.
|
|
||||||
你好 @${{ github.event.issue.user.login }},你的issue是重复的,将被关闭。
|
|
||||||
- name: Close issue
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'close-issue'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
25
.github/workflows/issue_invalid.yml
vendored
25
.github/workflows/issue_invalid.yml
vendored
@ -1,25 +0,0 @@
|
|||||||
name: Issue Invalid
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-comment:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.label.name == 'invalid'
|
|
||||||
steps:
|
|
||||||
- name: Create comment
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'create-comment'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-number: ${{ github.event.issue.number }}
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, your issue is invalid and will be closed.
|
|
||||||
你好 @${{ github.event.issue.user.login }},你的issue无效,将被关闭。
|
|
||||||
- name: Close issue
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'close-issue'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
17
.github/workflows/issue_on_close.yml
vendored
17
.github/workflows/issue_on_close.yml
vendored
@ -1,17 +0,0 @@
|
|||||||
name: Remove working label when issue closed
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [closed]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
rm-working:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Remove working label
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'remove-labels'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-number: ${{ github.event.issue.number }}
|
|
||||||
labels: 'working,pr-welcome'
|
|
20
.github/workflows/issue_question.yml
vendored
20
.github/workflows/issue_question.yml
vendored
@ -1,20 +0,0 @@
|
|||||||
name: Issue Question
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
create-comment:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.label.name == 'question'
|
|
||||||
steps:
|
|
||||||
- name: Create comment
|
|
||||||
uses: actions-cool/issues-helper@v3.6.0
|
|
||||||
with:
|
|
||||||
actions: 'create-comment'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-number: ${{ github.event.issue.number }}
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 3 days.
|
|
||||||
你好 @${{ github.event.issue.user.login }},请按照issue模板填写, 并详细说明问题/日志记录/复现步骤/复现链接/实现思路或提供更多信息等, 3天内未回复issue自动关闭。
|
|
19
.github/workflows/issue_similarity.yml
vendored
19
.github/workflows/issue_similarity.yml
vendored
@ -1,19 +0,0 @@
|
|||||||
name: Issues Similarity Analysis
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [opened, edited]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
similarity-analysis:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: analysis
|
|
||||||
uses: actions-cool/issues-similarity-analysis@v1
|
|
||||||
with:
|
|
||||||
filter-threshold: 0.5
|
|
||||||
comment-title: '### See'
|
|
||||||
comment-body: '${index}. ${similarity} #${number}'
|
|
||||||
show-footer: false
|
|
||||||
show-mentioned: true
|
|
||||||
since-days: 730
|
|
13
.github/workflows/issue_translate.yml
vendored
13
.github/workflows/issue_translate.yml
vendored
@ -1,13 +0,0 @@
|
|||||||
name: Translation Helper
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request_target:
|
|
||||||
types: [opened]
|
|
||||||
issues:
|
|
||||||
types: [opened]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
translate:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions-cool/translation-helper@v1.2.0
|
|
25
.github/workflows/issue_wontfix.yml
vendored
25
.github/workflows/issue_wontfix.yml
vendored
@ -1,25 +0,0 @@
|
|||||||
name: Issue Wontfix
|
|
||||||
|
|
||||||
on:
|
|
||||||
issues:
|
|
||||||
types: [labeled]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lock-issue:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event.label.name == 'wontfix'
|
|
||||||
steps:
|
|
||||||
- name: Create comment
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'create-comment'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
issue-number: ${{ github.event.issue.number }}
|
|
||||||
body: |
|
|
||||||
Hello @${{ github.event.issue.user.login }}, this issue will not be worked on and will be closed.
|
|
||||||
你好 @${{ github.event.issue.user.login }},这不会被处理,将被关闭。
|
|
||||||
- name: Close issue
|
|
||||||
uses: actions-cool/issues-helper@v3
|
|
||||||
with:
|
|
||||||
actions: 'close-issue'
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
28
.github/workflows/release_docker.yml
vendored
28
.github/workflows/release_docker.yml
vendored
@ -2,6 +2,19 @@ name: release_docker
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
manual_tag:
|
||||||
|
description: 'Tag name (like v0.1.0). Required if as_latest is true.'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
as_latest:
|
||||||
|
description: 'Tag as latest?'
|
||||||
|
required: true
|
||||||
|
default: 'false'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'true'
|
||||||
|
- 'false'
|
||||||
push:
|
push:
|
||||||
tags:
|
tags:
|
||||||
- 'v*'
|
- 'v*'
|
||||||
@ -17,11 +30,11 @@ env:
|
|||||||
REGISTRY: ghcr.io
|
REGISTRY: ghcr.io
|
||||||
ARTIFACT_NAME: 'binaries_docker_release'
|
ARTIFACT_NAME: 'binaries_docker_release'
|
||||||
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
|
||||||
IMAGE_PUSH: ${{ github.event_name == 'push' }}
|
IMAGE_PUSH: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
|
||||||
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' }}
|
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' || github.event.inputs.as_latest == 'true' }}
|
||||||
IMAGE_TAGS_BETA: |
|
IMAGE_TAGS_BETA: |
|
||||||
type=raw,value=beta,enable={{is_default_branch}}
|
type=raw,value=beta,enable={{is_default_branch}}
|
||||||
|
|
||||||
permissions: write-all
|
permissions: write-all
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@ -129,9 +142,14 @@ jobs:
|
|||||||
images: |
|
images: |
|
||||||
${{ env.REGISTRY }}/${{ env.ORG_NAME }}/${{ env.IMAGE_NAME }}
|
${{ env.REGISTRY }}/${{ env.ORG_NAME }}/${{ env.IMAGE_NAME }}
|
||||||
${{ env.ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
|
${{ env.ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
|
||||||
tags: ${{ env.IMAGE_IS_PROD == 'true' && '' || env.IMAGE_TAGS_BETA }}
|
tags: >
|
||||||
|
${{ env.IMAGE_IS_PROD == 'true' && (
|
||||||
|
github.event_name == 'workflow_dispatch'
|
||||||
|
&& format('type=raw,value={0}', github.event.inputs.manual_tag)
|
||||||
|
|| format('type=raw,value={0}', github.ref_name)
|
||||||
|
) || env.IMAGE_TAGS_BETA }}
|
||||||
flavor: |
|
flavor: |
|
||||||
${{ env.IMAGE_IS_PROD == 'true' && 'latest=true' || '' }}
|
latest=${{ env.IMAGE_IS_PROD }}
|
||||||
${{ matrix.tag_favor }}
|
${{ matrix.tag_favor }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
|
@ -95,7 +95,8 @@ English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing
|
|||||||
|
|
||||||
## Document
|
## Document
|
||||||
|
|
||||||
<https://docs.openlist.team>
|
- https://docs.oplist.org
|
||||||
|
- https://docs.openlist.team
|
||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
@ -125,4 +126,4 @@ The `OpenList` is open-source software licensed under the AGPL-3.0 license.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam)
|
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam) · [Telegram Channel](https://t.me/OpenListOfficial)
|
||||||
|
@ -93,7 +93,8 @@
|
|||||||
|
|
||||||
## 文档
|
## 文档
|
||||||
|
|
||||||
<https://docs.openlist.team>
|
- https://docs.oplist.org
|
||||||
|
- https://docs.openlist.team
|
||||||
|
|
||||||
## Demo
|
## Demo
|
||||||
|
|
||||||
@ -123,4 +124,4 @@ N/A(重建中)
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@GitHub](https://github.com/OpenListTeam) · [Telegram 交流群](https://t.me/OpenListTeam)
|
> [@GitHub](https://github.com/OpenListTeam) · [Telegram 交流群](https://t.me/OpenListTeam) · [Telegram 频道](https://t.me/OpenListOfficial)
|
||||||
|
@ -94,7 +94,8 @@
|
|||||||
|
|
||||||
## ドキュメント
|
## ドキュメント
|
||||||
|
|
||||||
<https://docs.openlist.team>
|
- https://docs.oplist.org
|
||||||
|
- https://docs.openlist.team
|
||||||
|
|
||||||
## デモ
|
## デモ
|
||||||
|
|
||||||
@ -124,4 +125,4 @@ N/A (再構築中)
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam)
|
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam) · [Telegram Channel](https://t.me/OpenListOfficial)
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
version: '3.3'
|
|
||||||
services:
|
services:
|
||||||
openlist:
|
openlist:
|
||||||
restart: always
|
restart: always
|
||||||
@ -13,4 +12,4 @@ services:
|
|||||||
- UMASK=022
|
- UMASK=022
|
||||||
- TZ=UTC
|
- TZ=UTC
|
||||||
container_name: openlist
|
container_name: openlist
|
||||||
image: 'ghcr.io/openlistteam/openlist:latest'
|
image: 'openlistteam/openlist:latest'
|
||||||
|
@ -82,7 +82,6 @@ func (d *Open123) Upload(ctx context.Context, file model.FileStreamer, createRes
|
|||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
retry.Delay(time.Second),
|
retry.Delay(time.Second),
|
||||||
retry.DelayType(retry.BackOffDelay))
|
retry.DelayType(retry.BackOffDelay))
|
||||||
threadG.SetLimit(3)
|
|
||||||
|
|
||||||
for partIndex := int64(0); partIndex < uploadNums; partIndex++ {
|
for partIndex := int64(0); partIndex < uploadNums; partIndex++ {
|
||||||
if utils.IsCanceled(uploadCtx) {
|
if utils.IsCanceled(uploadCtx) {
|
||||||
|
@ -504,7 +504,6 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
|
|||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
retry.Delay(time.Second),
|
retry.Delay(time.Second),
|
||||||
retry.DelayType(retry.BackOffDelay))
|
retry.DelayType(retry.BackOffDelay))
|
||||||
threadG.SetLimit(3)
|
|
||||||
|
|
||||||
count := int(size / sliceSize)
|
count := int(size / sliceSize)
|
||||||
lastPartSize := size % sliceSize
|
lastPartSize := size % sliceSize
|
||||||
|
@ -52,7 +52,6 @@ import (
|
|||||||
_ "github.com/OpenListTeam/OpenList/drivers/pikpak_share"
|
_ "github.com/OpenListTeam/OpenList/drivers/pikpak_share"
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc"
|
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc"
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc_tv"
|
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc_tv"
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/quqi"
|
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/s3"
|
_ "github.com/OpenListTeam/OpenList/drivers/s3"
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/seafile"
|
_ "github.com/OpenListTeam/OpenList/drivers/seafile"
|
||||||
_ "github.com/OpenListTeam/OpenList/drivers/sftp"
|
_ "github.com/OpenListTeam/OpenList/drivers/sftp"
|
||||||
|
@ -295,7 +295,6 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
|
|||||||
retry.Attempts(1),
|
retry.Attempts(1),
|
||||||
retry.Delay(time.Second),
|
retry.Delay(time.Second),
|
||||||
retry.DelayType(retry.BackOffDelay))
|
retry.DelayType(retry.BackOffDelay))
|
||||||
threadG.SetLimit(3)
|
|
||||||
|
|
||||||
for i, partseq := range precreateResp.BlockList {
|
for i, partseq := range precreateResp.BlockList {
|
||||||
if utils.IsCanceled(upCtx) {
|
if utils.IsCanceled(upCtx) {
|
||||||
|
@ -342,7 +342,6 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
|
|||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
retry.Delay(time.Second),
|
retry.Delay(time.Second),
|
||||||
retry.DelayType(retry.BackOffDelay))
|
retry.DelayType(retry.BackOffDelay))
|
||||||
threadG.SetLimit(3)
|
|
||||||
|
|
||||||
for i, partseq := range precreateResp.BlockList {
|
for i, partseq := range precreateResp.BlockList {
|
||||||
if utils.IsCanceled(upCtx) {
|
if utils.IsCanceled(upCtx) {
|
||||||
|
@ -5,19 +5,14 @@ import (
|
|||||||
"github.com/OpenListTeam/OpenList/internal/op"
|
"github.com/OpenListTeam/OpenList/internal/op"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
|
||||||
DefaultClientID = "76lrwrklhdn1icb"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
type Addition struct {
|
||||||
RefreshToken string `json:"refresh_token" required:"true"`
|
|
||||||
driver.RootPath
|
driver.RootPath
|
||||||
|
UseOnlineAPI bool `json:"use_online_api" default:"true"`
|
||||||
OauthTokenURL string `json:"oauth_token_url" default:"https://api.oplist.org/dropboxs/renewapi"` // TODO: replace
|
APIAddress string `json:"api_url_address" default:"https://api.oplist.org/dropboxs/renewapi"`
|
||||||
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
|
||||||
|
|
||||||
AccessToken string
|
AccessToken string
|
||||||
|
RefreshToken string `json:"refresh_token" required:"true"`
|
||||||
RootNamespaceId string
|
RootNamespaceId string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -15,10 +15,37 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func (d *Dropbox) refreshToken() error {
|
func (d *Dropbox) refreshToken() error {
|
||||||
url := d.base + "/oauth2/token"
|
// 使用在线API刷新Token,无需ClientID和ClientSecret
|
||||||
if utils.SliceContains([]string{"", DefaultClientID}, d.ClientID) {
|
if d.UseOnlineAPI && len(d.APIAddress) > 0 {
|
||||||
url = d.OauthTokenURL
|
u := d.APIAddress
|
||||||
|
var resp struct {
|
||||||
|
RefreshToken string `json:"refresh_token"`
|
||||||
|
AccessToken string `json:"access_token"`
|
||||||
|
ErrorMessage string `json:"text"`
|
||||||
|
}
|
||||||
|
_, err := base.RestyClient.R().
|
||||||
|
SetResult(&resp).
|
||||||
|
SetQueryParams(map[string]string{
|
||||||
|
"refresh_ui": d.RefreshToken,
|
||||||
|
"server_use": "true",
|
||||||
|
"driver_txt": "dropboxs_go",
|
||||||
|
}).
|
||||||
|
Get(u)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.RefreshToken == "" || resp.AccessToken == "" {
|
||||||
|
if resp.ErrorMessage != "" {
|
||||||
|
return fmt.Errorf("failed to refresh token: %s", resp.ErrorMessage)
|
||||||
|
}
|
||||||
|
return fmt.Errorf("empty token returned from official API")
|
||||||
|
}
|
||||||
|
d.AccessToken = resp.AccessToken
|
||||||
|
d.RefreshToken = resp.RefreshToken
|
||||||
|
op.MustSaveDriverStorage(d)
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
url := d.base + "/oauth2/token"
|
||||||
var tokenResp TokenResp
|
var tokenResp TokenResp
|
||||||
resp, err := base.RestyClient.R().
|
resp, err := base.RestyClient.R().
|
||||||
//ForceContentType("application/x-www-form-urlencoded").
|
//ForceContentType("application/x-www-form-urlencoded").
|
||||||
|
@ -298,7 +298,6 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
|
|||||||
retry.Attempts(3),
|
retry.Attempts(3),
|
||||||
retry.Delay(time.Second),
|
retry.Delay(time.Second),
|
||||||
retry.DelayType(retry.BackOffDelay))
|
retry.DelayType(retry.BackOffDelay))
|
||||||
threadG.SetLimit(3)
|
|
||||||
|
|
||||||
// step.3
|
// step.3
|
||||||
parts, err := d.client.GetAllMultiUploadUrls(initUpdload.UploadFileID, initUpdload.PartInfos)
|
parts, err := d.client.GetAllMultiUploadUrls(initUpdload.UploadFileID, initUpdload.PartInfos)
|
||||||
|
@ -1,452 +0,0 @@
|
|||||||
package quqi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"errors"
|
|
||||||
"io"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/driver"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/errs"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/model"
|
|
||||||
"github.com/OpenListTeam/OpenList/pkg/utils"
|
|
||||||
"github.com/OpenListTeam/OpenList/pkg/utils/random"
|
|
||||||
"github.com/aws/aws-sdk-go/aws"
|
|
||||||
"github.com/aws/aws-sdk-go/aws/credentials"
|
|
||||||
"github.com/aws/aws-sdk-go/aws/session"
|
|
||||||
"github.com/aws/aws-sdk-go/service/s3"
|
|
||||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Quqi struct {
|
|
||||||
model.Storage
|
|
||||||
Addition
|
|
||||||
Cookie string // Cookie
|
|
||||||
GroupID string // 私人云群组ID
|
|
||||||
ClientID string // 随机生成客户端ID 经过测试,部分接口调用若不携带client id会出现错误
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Config() driver.Config {
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) GetAddition() driver.Additional {
|
|
||||||
return &d.Addition
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Init(ctx context.Context) error {
|
|
||||||
// 登录
|
|
||||||
if err := d.login(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// 生成随机client id (与网页端生成逻辑一致)
|
|
||||||
d.ClientID = "quqipc_" + random.String(10)
|
|
||||||
|
|
||||||
// 获取私人云ID (暂时仅获取私人云)
|
|
||||||
groupResp := &GroupRes{}
|
|
||||||
if _, err := d.request("group.quqi.com", "/v1/group/list", resty.MethodGet, nil, groupResp); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
for _, groupInfo := range groupResp.Data {
|
|
||||||
if groupInfo == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if groupInfo.Type == 2 {
|
|
||||||
d.GroupID = strconv.Itoa(groupInfo.ID)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if d.GroupID == "" {
|
|
||||||
return errs.StorageNotFound
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Drop(ctx context.Context) error {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
|
|
||||||
var (
|
|
||||||
listResp = &ListRes{}
|
|
||||||
files []model.Obj
|
|
||||||
)
|
|
||||||
|
|
||||||
if _, err := d.request("", "/api/dir/ls", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": dir.GetID(),
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, listResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if listResp.Data == nil {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// dirs
|
|
||||||
for _, dirInfo := range listResp.Data.Dir {
|
|
||||||
if dirInfo == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
files = append(files, &model.Object{
|
|
||||||
ID: strconv.FormatInt(dirInfo.NodeID, 10),
|
|
||||||
Name: dirInfo.Name,
|
|
||||||
Modified: time.Unix(dirInfo.UpdateTime, 0),
|
|
||||||
Ctime: time.Unix(dirInfo.AddTime, 0),
|
|
||||||
IsFolder: true,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// files
|
|
||||||
for _, fileInfo := range listResp.Data.File {
|
|
||||||
if fileInfo == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if fileInfo.EXT != "" {
|
|
||||||
fileInfo.Name = strings.Join([]string{fileInfo.Name, fileInfo.EXT}, ".")
|
|
||||||
}
|
|
||||||
|
|
||||||
files = append(files, &model.Object{
|
|
||||||
ID: strconv.FormatInt(fileInfo.NodeID, 10),
|
|
||||||
Name: fileInfo.Name,
|
|
||||||
Size: fileInfo.Size,
|
|
||||||
Modified: time.Unix(fileInfo.UpdateTime, 0),
|
|
||||||
Ctime: time.Unix(fileInfo.AddTime, 0),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return files, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
|
|
||||||
if d.CDN {
|
|
||||||
link, err := d.linkFromCDN(file.GetID())
|
|
||||||
if err != nil {
|
|
||||||
log.Warn(err)
|
|
||||||
} else {
|
|
||||||
return link, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
link, err := d.linkFromPreview(file.GetID())
|
|
||||||
if err != nil {
|
|
||||||
log.Warn(err)
|
|
||||||
} else {
|
|
||||||
return link, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
link, err = d.linkFromDownload(file.GetID())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return link, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
|
|
||||||
var (
|
|
||||||
makeDirRes = &MakeDirRes{}
|
|
||||||
timeNow = time.Now()
|
|
||||||
)
|
|
||||||
|
|
||||||
if _, err := d.request("", "/api/dir/mkDir", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"parent_id": parentDir.GetID(),
|
|
||||||
"name": dirName,
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, makeDirRes); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Object{
|
|
||||||
ID: strconv.FormatInt(makeDirRes.Data.NodeID, 10),
|
|
||||||
Name: dirName,
|
|
||||||
Modified: timeNow,
|
|
||||||
Ctime: timeNow,
|
|
||||||
IsFolder: true,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
var moveRes = &MoveRes{}
|
|
||||||
|
|
||||||
if _, err := d.request("", "/api/dir/mvDir", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": dstDir.GetID(),
|
|
||||||
"source_quqi_id": d.GroupID,
|
|
||||||
"source_tree_id": "1",
|
|
||||||
"source_node_id": srcObj.GetID(),
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, moveRes); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Object{
|
|
||||||
ID: strconv.FormatInt(moveRes.Data.NodeID, 10),
|
|
||||||
Name: moveRes.Data.NodeName,
|
|
||||||
Size: srcObj.GetSize(),
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: srcObj.CreateTime(),
|
|
||||||
IsFolder: srcObj.IsDir(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
|
|
||||||
var realName = newName
|
|
||||||
|
|
||||||
if !srcObj.IsDir() {
|
|
||||||
srcExt, newExt := utils.Ext(srcObj.GetName()), utils.Ext(newName)
|
|
||||||
|
|
||||||
// 曲奇网盘的文件名称由文件名和扩展名组成,若存在扩展名,则重命名时仅支持更改文件名,扩展名在曲奇服务端保留
|
|
||||||
if srcExt != "" && srcExt == newExt {
|
|
||||||
parts := strings.Split(newName, ".")
|
|
||||||
if len(parts) > 1 {
|
|
||||||
realName = strings.Join(parts[:len(parts)-1], ".")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := d.request("", "/api/dir/renameDir", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": srcObj.GetID(),
|
|
||||||
"rename": realName,
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, nil); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Object{
|
|
||||||
ID: srcObj.GetID(),
|
|
||||||
Name: newName,
|
|
||||||
Size: srcObj.GetSize(),
|
|
||||||
Modified: time.Now(),
|
|
||||||
Ctime: srcObj.CreateTime(),
|
|
||||||
IsFolder: srcObj.IsDir(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
|
|
||||||
// 无法从曲奇接口响应中直接获取复制后的文件信息
|
|
||||||
if _, err := d.request("", "/api/node/copy", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": dstDir.GetID(),
|
|
||||||
"source_quqi_id": d.GroupID,
|
|
||||||
"source_tree_id": "1",
|
|
||||||
"source_node_id": srcObj.GetID(),
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, nil); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Remove(ctx context.Context, obj model.Obj) error {
|
|
||||||
// 暂时不做直接删除,默认都放到回收站。直接删除方法:先调用删除接口放入回收站,在通过回收站接口删除文件
|
|
||||||
if _, err := d.request("", "/api/node/del", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": obj.GetID(),
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, nil); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
|
|
||||||
// base info
|
|
||||||
sizeStr := strconv.FormatInt(stream.GetSize(), 10)
|
|
||||||
f, err := stream.CacheFullInTempFile()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
md5, err := utils.HashFile(utils.MD5, f)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
sha, err := utils.HashFile(utils.SHA256, f)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// init upload
|
|
||||||
var uploadInitResp UploadInitResp
|
|
||||||
_, err = d.request("", "/api/upload/v1/file/init", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"parent_id": dstDir.GetID(),
|
|
||||||
"size": sizeStr,
|
|
||||||
"file_name": stream.GetName(),
|
|
||||||
"md5": md5,
|
|
||||||
"sha": sha,
|
|
||||||
"is_slice": "true",
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, &uploadInitResp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// check exist
|
|
||||||
// if the file already exists in Quqi server, there is no need to actually upload it
|
|
||||||
if uploadInitResp.Data.Exist {
|
|
||||||
// the file name returned by Quqi does not include the extension name
|
|
||||||
nodeName, nodeExt := uploadInitResp.Data.NodeName, utils.Ext(stream.GetName())
|
|
||||||
if nodeExt != "" {
|
|
||||||
nodeName = nodeName + "." + nodeExt
|
|
||||||
}
|
|
||||||
return &model.Object{
|
|
||||||
ID: strconv.FormatInt(uploadInitResp.Data.NodeID, 10),
|
|
||||||
Name: nodeName,
|
|
||||||
Size: stream.GetSize(),
|
|
||||||
Modified: stream.ModTime(),
|
|
||||||
Ctime: stream.CreateTime(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
// listParts
|
|
||||||
_, err = d.request("upload.quqi.com:20807", "/upload/v1/listParts", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"token": uploadInitResp.Data.Token,
|
|
||||||
"task_id": uploadInitResp.Data.TaskID,
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// get temp key
|
|
||||||
var tempKeyResp TempKeyResp
|
|
||||||
_, err = d.request("upload.quqi.com:20807", "/upload/v1/tempKey", resty.MethodGet, func(req *resty.Request) {
|
|
||||||
req.SetQueryParams(map[string]string{
|
|
||||||
"token": uploadInitResp.Data.Token,
|
|
||||||
"task_id": uploadInitResp.Data.TaskID,
|
|
||||||
})
|
|
||||||
}, &tempKeyResp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// upload
|
|
||||||
// u, err := url.Parse(fmt.Sprintf("https://%s.cos.ap-shanghai.myqcloud.com", uploadInitResp.Data.Bucket))
|
|
||||||
// b := &cos.BaseURL{BucketURL: u}
|
|
||||||
// client := cos.NewClient(b, &http.Client{
|
|
||||||
// Transport: &cos.CredentialTransport{
|
|
||||||
// Credential: cos.NewTokenCredential(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
|
|
||||||
// },
|
|
||||||
// })
|
|
||||||
// partSize := int64(1024 * 1024 * 2)
|
|
||||||
// partCount := (stream.GetSize() + partSize - 1) / partSize
|
|
||||||
// for i := 1; i <= int(partCount); i++ {
|
|
||||||
// length := partSize
|
|
||||||
// if i == int(partCount) {
|
|
||||||
// length = stream.GetSize() - (int64(i)-1)*partSize
|
|
||||||
// }
|
|
||||||
// _, err := client.Object.UploadPart(
|
|
||||||
// ctx, uploadInitResp.Data.Key, uploadInitResp.Data.UploadID, i, io.LimitReader(f, partSize), &cos.ObjectUploadPartOptions{
|
|
||||||
// ContentLength: length,
|
|
||||||
// },
|
|
||||||
// )
|
|
||||||
// if err != nil {
|
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
cfg := &aws.Config{
|
|
||||||
Credentials: credentials.NewStaticCredentials(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
|
|
||||||
Region: aws.String("ap-shanghai"),
|
|
||||||
Endpoint: aws.String("cos.ap-shanghai.myqcloud.com"),
|
|
||||||
}
|
|
||||||
s, err := session.NewSession(cfg)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
uploader := s3manager.NewUploader(s)
|
|
||||||
buf := make([]byte, 1024*1024*2)
|
|
||||||
fup := &driver.ReaderUpdatingProgress{
|
|
||||||
Reader: &driver.SimpleReaderWithSize{
|
|
||||||
Reader: f,
|
|
||||||
Size: int64(len(buf)),
|
|
||||||
},
|
|
||||||
UpdateProgress: up,
|
|
||||||
}
|
|
||||||
for partNumber := int64(1); ; partNumber++ {
|
|
||||||
n, err := io.ReadFull(fup, buf)
|
|
||||||
if err != nil && !errors.Is(err, io.ErrUnexpectedEOF) {
|
|
||||||
if err == io.EOF {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
reader := bytes.NewReader(buf[:n])
|
|
||||||
_, err = uploader.S3.UploadPartWithContext(ctx, &s3.UploadPartInput{
|
|
||||||
UploadId: &uploadInitResp.Data.UploadID,
|
|
||||||
Key: &uploadInitResp.Data.Key,
|
|
||||||
Bucket: &uploadInitResp.Data.Bucket,
|
|
||||||
PartNumber: aws.Int64(partNumber),
|
|
||||||
Body: struct {
|
|
||||||
*driver.RateLimitReader
|
|
||||||
io.Seeker
|
|
||||||
}{
|
|
||||||
RateLimitReader: driver.NewLimitedUploadStream(ctx, reader),
|
|
||||||
Seeker: reader,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// finish upload
|
|
||||||
var uploadFinishResp UploadFinishResp
|
|
||||||
_, err = d.request("", "/api/upload/v1/file/finish", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"token": uploadInitResp.Data.Token,
|
|
||||||
"task_id": uploadInitResp.Data.TaskID,
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, &uploadFinishResp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// the file name returned by Quqi does not include the extension name
|
|
||||||
nodeName, nodeExt := uploadFinishResp.Data.NodeName, utils.Ext(stream.GetName())
|
|
||||||
if nodeExt != "" {
|
|
||||||
nodeName = nodeName + "." + nodeExt
|
|
||||||
}
|
|
||||||
return &model.Object{
|
|
||||||
ID: strconv.FormatInt(uploadFinishResp.Data.NodeID, 10),
|
|
||||||
Name: nodeName,
|
|
||||||
Size: stream.GetSize(),
|
|
||||||
Modified: stream.ModTime(),
|
|
||||||
Ctime: stream.CreateTime(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
|
|
||||||
// return nil, errs.NotSupport
|
|
||||||
//}
|
|
||||||
|
|
||||||
var _ driver.Driver = (*Quqi)(nil)
|
|
@ -1,28 +0,0 @@
|
|||||||
package quqi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/driver"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/op"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Addition struct {
|
|
||||||
driver.RootID
|
|
||||||
Phone string `json:"phone"`
|
|
||||||
Password string `json:"password"`
|
|
||||||
Cookie string `json:"cookie" help:"Cookie can be used on multiple clients at the same time"`
|
|
||||||
CDN bool `json:"cdn" help:"If you enable this option, the download speed can be increased, but there will be some performance loss"`
|
|
||||||
}
|
|
||||||
|
|
||||||
var config = driver.Config{
|
|
||||||
Name: "Quqi",
|
|
||||||
OnlyLocal: true,
|
|
||||||
LocalSort: true,
|
|
||||||
//NoUpload: true,
|
|
||||||
DefaultRoot: "0",
|
|
||||||
}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
op.RegisterDriver(func() driver.Driver {
|
|
||||||
return &Quqi{}
|
|
||||||
})
|
|
||||||
}
|
|
@ -1,197 +0,0 @@
|
|||||||
package quqi
|
|
||||||
|
|
||||||
type BaseReqQuery struct {
|
|
||||||
ID string `json:"quqiid"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BaseReq struct {
|
|
||||||
GroupID string `json:"quqi_id"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type BaseRes struct {
|
|
||||||
//Data interface{} `json:"data"`
|
|
||||||
Code int `json:"err"`
|
|
||||||
Message string `json:"msg"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GroupRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data []*Group `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data *List `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GetDocRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
OriginPath string `json:"origin_path"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type GetDownloadResp struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
Url string `json:"url"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type MakeDirRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
IsRoot bool `json:"is_root"`
|
|
||||||
NodeID int64 `json:"node_id"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type MoveRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
NodeChildNum int64 `json:"node_child_num"`
|
|
||||||
NodeID int64 `json:"node_id"`
|
|
||||||
NodeName string `json:"node_name"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
GroupID int64 `json:"quqi_id"`
|
|
||||||
TreeID int64 `json:"tree_id"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type RenameRes struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
NodeID int64 `json:"node_id"`
|
|
||||||
GroupID int64 `json:"quqi_id"`
|
|
||||||
Rename string `json:"rename"`
|
|
||||||
TreeID int64 `json:"tree_id"`
|
|
||||||
UpdateTime int64 `json:"updatetime"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CopyRes struct {
|
|
||||||
BaseRes
|
|
||||||
}
|
|
||||||
|
|
||||||
type RemoveRes struct {
|
|
||||||
BaseRes
|
|
||||||
}
|
|
||||||
|
|
||||||
type Group struct {
|
|
||||||
ID int `json:"quqi_id"`
|
|
||||||
Type int `json:"type"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
IsAdministrator int `json:"is_administrator"`
|
|
||||||
Role int `json:"role"`
|
|
||||||
Avatar string `json:"avatar_url"`
|
|
||||||
IsStick int `json:"is_stick"`
|
|
||||||
Nickname string `json:"nickname"`
|
|
||||||
Status int `json:"status"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type List struct {
|
|
||||||
ListDir
|
|
||||||
Dir []*ListDir `json:"dir"`
|
|
||||||
File []*ListFile `json:"file"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListItem struct {
|
|
||||||
AddTime int64 `json:"add_time"`
|
|
||||||
IsDir int `json:"is_dir"`
|
|
||||||
IsExpand int `json:"is_expand"`
|
|
||||||
IsFinalize int `json:"is_finalize"`
|
|
||||||
LastEditorName string `json:"last_editor_name"`
|
|
||||||
Name string `json:"name"`
|
|
||||||
NodeID int64 `json:"nid"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
Permission int `json:"permission"`
|
|
||||||
TreeID int64 `json:"tid"`
|
|
||||||
UpdateCNT int64 `json:"update_cnt"`
|
|
||||||
UpdateTime int64 `json:"update_time"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListDir struct {
|
|
||||||
ListItem
|
|
||||||
ChildDocNum int64 `json:"child_doc_num"`
|
|
||||||
DirDetail string `json:"dir_detail"`
|
|
||||||
DirType int `json:"dir_type"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ListFile struct {
|
|
||||||
ListItem
|
|
||||||
BroadDocType string `json:"broad_doc_type"`
|
|
||||||
CanDisplay bool `json:"can_display"`
|
|
||||||
Detail string `json:"detail"`
|
|
||||||
EXT string `json:"ext"`
|
|
||||||
Filetype string `json:"filetype"`
|
|
||||||
HasMobileThumbnail bool `json:"has_mobile_thumbnail"`
|
|
||||||
HasThumbnail bool `json:"has_thumbnail"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
Version int `json:"version"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UploadInitResp struct {
|
|
||||||
Data struct {
|
|
||||||
Bucket string `json:"bucket"`
|
|
||||||
Exist bool `json:"exist"`
|
|
||||||
Key string `json:"key"`
|
|
||||||
TaskID string `json:"task_id"`
|
|
||||||
Token string `json:"token"`
|
|
||||||
UploadID string `json:"upload_id"`
|
|
||||||
URL string `json:"url"`
|
|
||||||
NodeID int64 `json:"node_id"`
|
|
||||||
NodeName string `json:"node_name"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
} `json:"data"`
|
|
||||||
Err int `json:"err"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type TempKeyResp struct {
|
|
||||||
Err int `json:"err"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
Data struct {
|
|
||||||
ExpiredTime int `json:"expiredTime"`
|
|
||||||
Expiration string `json:"expiration"`
|
|
||||||
Credentials struct {
|
|
||||||
SessionToken string `json:"sessionToken"`
|
|
||||||
TmpSecretID string `json:"tmpSecretId"`
|
|
||||||
TmpSecretKey string `json:"tmpSecretKey"`
|
|
||||||
} `json:"credentials"`
|
|
||||||
RequestID string `json:"requestId"`
|
|
||||||
StartTime int `json:"startTime"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UploadFinishResp struct {
|
|
||||||
Data struct {
|
|
||||||
NodeID int64 `json:"node_id"`
|
|
||||||
NodeName string `json:"node_name"`
|
|
||||||
ParentID int64 `json:"parent_id"`
|
|
||||||
QuqiID int64 `json:"quqi_id"`
|
|
||||||
TreeID int64 `json:"tree_id"`
|
|
||||||
} `json:"data"`
|
|
||||||
Err int `json:"err"`
|
|
||||||
Msg string `json:"msg"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type UrlExchangeResp struct {
|
|
||||||
BaseRes
|
|
||||||
Data struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Mime string `json:"mime"`
|
|
||||||
Size int64 `json:"size"`
|
|
||||||
DownloadType int `json:"download_type"`
|
|
||||||
ChannelType int `json:"channel_type"`
|
|
||||||
ChannelID int `json:"channel_id"`
|
|
||||||
Url string `json:"url"`
|
|
||||||
ExpiredTime int64 `json:"expired_time"`
|
|
||||||
IsEncrypted bool `json:"is_encrypted"`
|
|
||||||
EncryptedSize int64 `json:"encrypted_size"`
|
|
||||||
EncryptedAlg string `json:"encrypted_alg"`
|
|
||||||
EncryptedKey string `json:"encrypted_key"`
|
|
||||||
PassportID int64 `json:"passport_id"`
|
|
||||||
RequestExpiredTime int64 `json:"request_expired_time"`
|
|
||||||
} `json:"data"`
|
|
||||||
}
|
|
@ -1,299 +0,0 @@
|
|||||||
package quqi
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"context"
|
|
||||||
"encoding/base64"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"net/url"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/OpenListTeam/OpenList/drivers/base"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/errs"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/model"
|
|
||||||
"github.com/OpenListTeam/OpenList/internal/stream"
|
|
||||||
"github.com/OpenListTeam/OpenList/pkg/http_range"
|
|
||||||
"github.com/OpenListTeam/OpenList/pkg/utils"
|
|
||||||
"github.com/go-resty/resty/v2"
|
|
||||||
"github.com/minio/sio"
|
|
||||||
)
|
|
||||||
|
|
||||||
// do others that not defined in Driver interface
|
|
||||||
func (d *Quqi) request(host string, path string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
|
|
||||||
var (
|
|
||||||
reqUrl = url.URL{
|
|
||||||
Scheme: "https",
|
|
||||||
Host: "quqi.com",
|
|
||||||
Path: path,
|
|
||||||
}
|
|
||||||
req = base.RestyClient.R()
|
|
||||||
result BaseRes
|
|
||||||
)
|
|
||||||
|
|
||||||
if host != "" {
|
|
||||||
reqUrl.Host = host
|
|
||||||
}
|
|
||||||
req.SetHeaders(map[string]string{
|
|
||||||
"Origin": "https://quqi.com",
|
|
||||||
"Cookie": d.Cookie,
|
|
||||||
})
|
|
||||||
|
|
||||||
if d.GroupID != "" {
|
|
||||||
req.SetQueryParam("quqiid", d.GroupID)
|
|
||||||
}
|
|
||||||
|
|
||||||
if callback != nil {
|
|
||||||
callback(req)
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := req.Execute(method, reqUrl.String())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// resty.Request.SetResult cannot parse result correctly sometimes
|
|
||||||
err = utils.Json.Unmarshal(res.Body(), &result)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if result.Code != 0 {
|
|
||||||
return nil, errors.New(result.Message)
|
|
||||||
}
|
|
||||||
if resp != nil {
|
|
||||||
err = utils.Json.Unmarshal(res.Body(), resp)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) login() error {
|
|
||||||
if d.Addition.Cookie != "" {
|
|
||||||
d.Cookie = d.Addition.Cookie
|
|
||||||
}
|
|
||||||
if d.checkLogin() {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if d.Cookie != "" {
|
|
||||||
return errors.New("cookie is invalid")
|
|
||||||
}
|
|
||||||
if d.Phone == "" {
|
|
||||||
return errors.New("phone number is empty")
|
|
||||||
}
|
|
||||||
if d.Password == "" {
|
|
||||||
return errs.EmptyPassword
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, err := d.request("", "/auth/person/v2/login/password", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"phone": d.Phone,
|
|
||||||
"password": base64.StdEncoding.EncodeToString([]byte(d.Password)),
|
|
||||||
})
|
|
||||||
}, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
var cookies []string
|
|
||||||
for _, cookie := range resp.RawResponse.Cookies() {
|
|
||||||
cookies = append(cookies, fmt.Sprintf("%s=%s", cookie.Name, cookie.Value))
|
|
||||||
}
|
|
||||||
d.Cookie = strings.Join(cookies, ";")
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) checkLogin() bool {
|
|
||||||
if _, err := d.request("", "/auth/account/baseInfo", resty.MethodGet, nil, nil); err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// decryptKey 获取密码
|
|
||||||
func decryptKey(encodeKey string) []byte {
|
|
||||||
// 移除非法字符
|
|
||||||
u := strings.ReplaceAll(encodeKey, "[^A-Za-z0-9+\\/]", "")
|
|
||||||
|
|
||||||
// 计算输出字节数组的长度
|
|
||||||
o := len(u)
|
|
||||||
a := 32
|
|
||||||
|
|
||||||
// 创建输出字节数组
|
|
||||||
c := make([]byte, a)
|
|
||||||
|
|
||||||
// 编码循环
|
|
||||||
s := uint32(0) // 累加器
|
|
||||||
f := 0 // 输出数组索引
|
|
||||||
for l := 0; l < o; l++ {
|
|
||||||
r := l & 3 // 取模4,得到当前字符在四字节块中的位置
|
|
||||||
i := u[l] // 当前字符的ASCII码
|
|
||||||
|
|
||||||
// 编码当前字符
|
|
||||||
switch {
|
|
||||||
case i >= 65 && i < 91: // 大写字母
|
|
||||||
s |= uint32(i-65) << uint32(6*(3-r))
|
|
||||||
case i >= 97 && i < 123: // 小写字母
|
|
||||||
s |= uint32(i-71) << uint32(6*(3-r))
|
|
||||||
case i >= 48 && i < 58: // 数字
|
|
||||||
s |= uint32(i+4) << uint32(6*(3-r))
|
|
||||||
case i == 43: // 加号
|
|
||||||
s |= uint32(62) << uint32(6*(3-r))
|
|
||||||
case i == 47: // 斜杠
|
|
||||||
s |= uint32(63) << uint32(6*(3-r))
|
|
||||||
}
|
|
||||||
|
|
||||||
// 如果累加器已经包含了四个字符,或者是最后一个字符,则写入输出数组
|
|
||||||
if r == 3 || l == o-1 {
|
|
||||||
for e := 0; e < 3 && f < a; e, f = e+1, f+1 {
|
|
||||||
c[f] = byte(s >> (16 >> e & 24) & 255)
|
|
||||||
}
|
|
||||||
s = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) linkFromPreview(id string) (*model.Link, error) {
|
|
||||||
var getDocResp GetDocRes
|
|
||||||
if _, err := d.request("", "/api/doc/getDoc", resty.MethodPost, func(req *resty.Request) {
|
|
||||||
req.SetFormData(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": id,
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
})
|
|
||||||
}, &getDocResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if getDocResp.Data.OriginPath == "" {
|
|
||||||
return nil, errors.New("cannot get link from preview")
|
|
||||||
}
|
|
||||||
return &model.Link{
|
|
||||||
URL: getDocResp.Data.OriginPath,
|
|
||||||
Header: http.Header{
|
|
||||||
"Origin": []string{"https://quqi.com"},
|
|
||||||
"Cookie": []string{d.Cookie},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) linkFromDownload(id string) (*model.Link, error) {
|
|
||||||
var getDownloadResp GetDownloadResp
|
|
||||||
if _, err := d.request("", "/api/doc/getDownload", resty.MethodGet, func(req *resty.Request) {
|
|
||||||
req.SetQueryParams(map[string]string{
|
|
||||||
"quqi_id": d.GroupID,
|
|
||||||
"tree_id": "1",
|
|
||||||
"node_id": id,
|
|
||||||
"url_type": "undefined",
|
|
||||||
"entry_type": "undefined",
|
|
||||||
"client_id": d.ClientID,
|
|
||||||
"no_redirect": "1",
|
|
||||||
})
|
|
||||||
}, &getDownloadResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if getDownloadResp.Data.Url == "" {
|
|
||||||
return nil, errors.New("cannot get link from download")
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Link{
|
|
||||||
URL: getDownloadResp.Data.Url,
|
|
||||||
Header: http.Header{
|
|
||||||
"Origin": []string{"https://quqi.com"},
|
|
||||||
"Cookie": []string{d.Cookie},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *Quqi) linkFromCDN(id string) (*model.Link, error) {
|
|
||||||
downloadLink, err := d.linkFromDownload(id)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var urlExchangeResp UrlExchangeResp
|
|
||||||
if _, err = d.request("api.quqi.com", "/preview/downloadInfo/url/exchange", resty.MethodGet, func(req *resty.Request) {
|
|
||||||
req.SetQueryParam("url", downloadLink.URL)
|
|
||||||
}, &urlExchangeResp); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if urlExchangeResp.Data.Url == "" {
|
|
||||||
return nil, errors.New("cannot get link from cdn")
|
|
||||||
}
|
|
||||||
|
|
||||||
// 假设存在未加密的情况
|
|
||||||
if !urlExchangeResp.Data.IsEncrypted {
|
|
||||||
return &model.Link{
|
|
||||||
URL: urlExchangeResp.Data.Url,
|
|
||||||
Header: http.Header{
|
|
||||||
"Origin": []string{"https://quqi.com"},
|
|
||||||
"Cookie": []string{d.Cookie},
|
|
||||||
},
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// 根据sio(https://github.com/minio/sio/blob/master/DARE.md)描述及实际测试,得出以下结论:
|
|
||||||
// 1. 加密后大小(encrypted_size)-原始文件大小(size) = 加密包的头大小+身份验证标识 = (16+16) * N -> N为加密包的数量
|
|
||||||
// 2. 原始文件大小(size)+64*1024-1 / (64*1024) = N -> 每个包的有效负载为64K
|
|
||||||
remoteClosers := utils.EmptyClosers()
|
|
||||||
payloadSize := int64(1 << 16)
|
|
||||||
expiration := time.Until(time.Unix(urlExchangeResp.Data.ExpiredTime, 0))
|
|
||||||
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
|
|
||||||
encryptedOffset := httpRange.Start / payloadSize * (payloadSize + 32)
|
|
||||||
decryptedOffset := httpRange.Start % payloadSize
|
|
||||||
encryptedLength := (httpRange.Length+httpRange.Start+payloadSize-1)/payloadSize*(payloadSize+32) - encryptedOffset
|
|
||||||
if httpRange.Length < 0 {
|
|
||||||
encryptedLength = httpRange.Length
|
|
||||||
} else {
|
|
||||||
if httpRange.Length+httpRange.Start >= urlExchangeResp.Data.Size || encryptedLength+encryptedOffset >= urlExchangeResp.Data.EncryptedSize {
|
|
||||||
encryptedLength = -1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//log.Debugf("size: %d\tencrypted_size: %d", urlExchangeResp.Data.Size, urlExchangeResp.Data.EncryptedSize)
|
|
||||||
//log.Debugf("http range offset: %d, length: %d", httpRange.Start, httpRange.Length)
|
|
||||||
//log.Debugf("encrypted offset: %d, length: %d, decrypted offset: %d", encryptedOffset, encryptedLength, decryptedOffset)
|
|
||||||
|
|
||||||
rrc, err := stream.GetRangeReadCloserFromLink(urlExchangeResp.Data.EncryptedSize, &model.Link{
|
|
||||||
URL: urlExchangeResp.Data.Url,
|
|
||||||
Header: http.Header{
|
|
||||||
"Origin": []string{"https://quqi.com"},
|
|
||||||
"Cookie": []string{d.Cookie},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
rc, err := rrc.RangeRead(ctx, http_range.Range{Start: encryptedOffset, Length: encryptedLength})
|
|
||||||
remoteClosers.AddClosers(rrc.GetClosers())
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
decryptReader, err := sio.DecryptReader(rc, sio.Config{
|
|
||||||
MinVersion: sio.Version10,
|
|
||||||
MaxVersion: sio.Version20,
|
|
||||||
CipherSuites: []byte{sio.CHACHA20_POLY1305, sio.AES_256_GCM},
|
|
||||||
Key: decryptKey(urlExchangeResp.Data.EncryptedKey),
|
|
||||||
SequenceNumber: uint32(httpRange.Start / payloadSize),
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
bufferReader := bufio.NewReader(decryptReader)
|
|
||||||
bufferReader.Discard(int(decryptedOffset))
|
|
||||||
|
|
||||||
return io.NopCloser(bufferReader), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return &model.Link{
|
|
||||||
RangeReadCloser: &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: remoteClosers},
|
|
||||||
Expiration: &expiration,
|
|
||||||
}, nil
|
|
||||||
}
|
|
@ -125,10 +125,10 @@ func InitialSettings() []model.SettingItem {
|
|||||||
"Google":"https://docs.google.com/gview?url=$e_url&embedded=true"
|
"Google":"https://docs.google.com/gview?url=$e_url&embedded=true"
|
||||||
},
|
},
|
||||||
"pdf": {
|
"pdf": {
|
||||||
"PDF.js":"//res.oplist.org/pdf.js/web/viewer.html?url=$e_url"
|
"PDF.js":"https://res.oplist.org/pdf.js/web/viewer.html?file=$e_url"
|
||||||
},
|
},
|
||||||
"epub": {
|
"epub": {
|
||||||
"EPUB.js":"//res.oplist.org/epub.js/viewer.html?url=$e_url"
|
"EPUB.js":"https://res.oplist.org/epub.js/viewer.html?url=$e_url"
|
||||||
}
|
}
|
||||||
}`, Type: conf.TypeText, Group: model.PREVIEW},
|
}`, Type: conf.TypeText, Group: model.PREVIEW},
|
||||||
// {Key: conf.OfficeViewers, Value: `{
|
// {Key: conf.OfficeViewers, Value: `{
|
||||||
|
@ -165,6 +165,10 @@ func (d *downloader) download() (io.ReadCloser, error) {
|
|||||||
if maxPart < d.cfg.Concurrency {
|
if maxPart < d.cfg.Concurrency {
|
||||||
d.cfg.Concurrency = maxPart
|
d.cfg.Concurrency = maxPart
|
||||||
}
|
}
|
||||||
|
if d.params.Range.Length == 0 {
|
||||||
|
d.cfg.Concurrency = 1
|
||||||
|
}
|
||||||
|
|
||||||
log.Debugf("cfgConcurrency:%d", d.cfg.Concurrency)
|
log.Debugf("cfgConcurrency:%d", d.cfg.Concurrency)
|
||||||
|
|
||||||
if d.cfg.Concurrency == 1 {
|
if d.cfg.Concurrency == 1 {
|
||||||
|
Reference in New Issue
Block a user