Compare commits

..

13 Commits

Author SHA1 Message Date
c0a8321461 Update release_docker.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:36:35 +08:00
680501a8a8 Update release_android.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:36:22 +08:00
83913a8031 Update release.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:36:11 +08:00
929d4e65b9 Update release_freebsd.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:35:51 +08:00
6717d02f94 Update release_linux_musl.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:35:34 +08:00
9d2a71e3eb Update release_linux_musl_arm.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:35:21 +08:00
62de731d37 Update release_freebsd.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:34:35 +08:00
7277163b0a Update release_linux_musl.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:34:04 +08:00
98f65d5478 Update release_linux_musl_arm.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:33:52 +08:00
312e04ea69 chore(ci):Fixed CI bugs 2025-06-20 20:13:40 +08:00
6ddb4359d3 Update release.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:07:56 +08:00
cf444c2f63 Update release.yml
Signed-off-by: Pikachu Ren <40362270+PIKACHUIM@users.noreply.github.com>
2025-06-20 20:05:04 +08:00
0176cfb0c9 chore:fixed ci build 2025-06-20 19:32:10 +08:00
58 changed files with 1304 additions and 2284 deletions

View File

@ -1,6 +1,5 @@
name: "Bug report"
description: Bug / 错误报告 / 问题
title: "[BUG] "
description: Bug report
labels: [bug]
body:
- type: markdown
@ -17,14 +16,14 @@ body:
您必须勾选以下所有内容否则您的issue可能会被直接关闭。或者您可以去[讨论区](https://github.com/OpenListTeam/OpenList/discussions)
options:
- label: |
I have read the [documentation](https://docs.oplist.org).
我已经阅读了[文档](https://docs.oplist.org)。
I have read the [documentation](https://openlistteam.github.io/docs).
我已经阅读了[文档](https://openlistteam.github.io/docs)。
- label: |
I'm sure there are no duplicate issues or discussions.
我确定没有重复的issue或讨论。
- label: |
I'm sure it's due to `OpenList` and not something else(such as [Network](https://docs.oplist.org/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host) ,`Dependencies` or `Operational`).
我确定是`OpenList`的问题,而不是其他原因(例如[网络](https://docs.oplist.org/zh/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host)`依赖`或`操作`)。
I'm sure it's due to `OpenList` and not something else(such as [Network](https://openlistteam.github.io/docs/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host) ,`Dependencies` or `Operational`).
我确定是`OpenList`的问题,而不是其他原因(例如[网络](https://openlistteam.github.io/docs/zh/faq/howto.html#tls-handshake-timeout-read-connection-reset-by-peer-dns-lookup-failed-connect-connection-refused-client-timeout-exceeded-while-awaiting-headers-no-such-host)`依赖`或`操作`)。
- label: |
I'm sure this issue is not fixed in the latest version.
我确定这个问题在最新版本中没有被修复。
@ -36,7 +35,7 @@ body:
description: |
What version of our software are you running? Do not use `latest` or `master` as an answer.
您使用的是哪个版本的软件?请不要使用`latest`或`master`作为答案。
placeholder: v4.xx.xx
placeholder: v3.xx.xx
validations:
required: true
- type: input
@ -61,7 +60,7 @@ body:
label: Reproduction / 复现链接
description: |
Please provide a link to a repo that can reproduce the problem you ran into. Please be aware that your issue may be closed directly if you don't provide it.
请提供能复现此问题的链接请知悉如果不提供它你的issue可能会被直接关闭
请提供能复现此问题的链接请知悉如果不提供它你的issue可能会被直接关闭
validations:
required: true
- type: textarea
@ -69,8 +68,8 @@ body:
attributes:
label: Config / 配置
description: |
Please provide the configuration file of your `OpenList` application and take a screenshot of the relevant storage configuration. (you can mask sensitive fields)
请提供您的`OpenList`应用的配置文件,并截图相关存储配置。(隐藏隐私字段)
Please provide the configuration file of your `OpenList` application and take a screenshot of the relevant storage configuration. (hide privacy field)
请提供您的`OpenList`应用的配置文件,并截图相关存储配置。(隐藏隐私字段)
validations:
required: true
- type: textarea

View File

@ -2,7 +2,4 @@ blank_issues_enabled: true
contact_links:
- name: Questions & Discussions
url: https://github.com/OpenListTeam/OpenList/discussions
about: Discuss / 讨论、问题、想法等
- name: Chat
url: https://t.me/OpenListTeam
about: Chat with us / 与我们聊天
about: Use GitHub discussions for message-board style questions and discussions.

View File

@ -1,6 +1,5 @@
name: "Feature request"
description: Feature request / 功能请求 / 增强
title: "[Feature] "
description: Feature request
labels: [enhancement]
body:
- type: checkboxes
@ -8,7 +7,7 @@ body:
label: Please make sure of the following things
description: You may select more than one, even select all.
options:
- label: I have read the [documentation](https://docs.openlist.org).
- label: I have read the [documentation](https://openlistteam.github.io/docs).
- label: I'm sure there are no duplicate issues or discussions.
- label: I'm sure this feature is not implemented.
- label: I'm sure it's a reasonable and popular requirement.

View File

@ -21,4 +21,4 @@ jobs:
- run: npx changelogithub # or changelogithub@0.12 if ensure the stable result
env:
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
GITHUB_TOKEN: ${{secrets.MY_TOKEN}}

View File

@ -0,0 +1,22 @@
name: Close need info
on:
schedule:
- cron: "0 0 */1 * *"
workflow_dispatch:
jobs:
close-need-info:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'question'
inactive-day: 3
close-reason: 'not_planned'
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to no activities in 3 days.
你好 @${{ github.event.issue.user.login }}此issue因超过3天未回复被关闭。

21
.github/workflows/issue_close_stale.yml vendored Normal file
View File

@ -0,0 +1,21 @@
name: Close inactive
on:
schedule:
- cron: "0 0 */7 * *"
workflow_dispatch:
jobs:
close-inactive:
runs-on: ubuntu-latest
steps:
- name: close-issues
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issues'
token: ${{ secrets.GITHUB_TOKEN }}
labels: 'stale'
inactive-day: 8
close-reason: 'not_planned'
body: |
Hello @${{ github.event.issue.user.login }}, this issue was closed due to inactive more than 52 days. You can reopen or recreate it if you think it should continue. Thank you for your contributions again.

25
.github/workflows/issue_duplicate.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Issue Duplicate
on:
issues:
types: [labeled]
jobs:
create-comment:
runs-on: ubuntu-latest
if: github.event.label.name == 'duplicate'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}, your issue is a duplicate and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue是重复的将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

25
.github/workflows/issue_invalid.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Issue Invalid
on:
issues:
types: [labeled]
jobs:
create-comment:
runs-on: ubuntu-latest
if: github.event.label.name == 'invalid'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}, your issue is invalid and will be closed.
你好 @${{ github.event.issue.user.login }}你的issue无效将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

17
.github/workflows/issue_on_close.yml vendored Normal file
View File

@ -0,0 +1,17 @@
name: Remove working label when issue closed
on:
issues:
types: [closed]
jobs:
rm-working:
runs-on: ubuntu-latest
steps:
- name: Remove working label
uses: actions-cool/issues-helper@v3
with:
actions: 'remove-labels'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
labels: 'working,pr-welcome'

20
.github/workflows/issue_question.yml vendored Normal file
View File

@ -0,0 +1,20 @@
name: Issue Question
on:
issues:
types: [labeled]
jobs:
create-comment:
runs-on: ubuntu-latest
if: github.event.label.name == 'question'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3.6.0
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}, please input issue by template and add detail. Issues labeled by `question` will be closed if no activities in 3 days.
你好 @${{ github.event.issue.user.login }}请按照issue模板填写, 并详细说明问题/日志记录/复现步骤/复现链接/实现思路或提供更多信息等, 3天内未回复issue自动关闭。

19
.github/workflows/issue_similarity.yml vendored Normal file
View File

@ -0,0 +1,19 @@
name: Issues Similarity Analysis
on:
issues:
types: [opened, edited]
jobs:
similarity-analysis:
runs-on: ubuntu-latest
steps:
- name: analysis
uses: actions-cool/issues-similarity-analysis@v1
with:
filter-threshold: 0.5
comment-title: '### See'
comment-body: '${index}. ${similarity} #${number}'
show-footer: false
show-mentioned: true
since-days: 730

13
.github/workflows/issue_translate.yml vendored Normal file
View File

@ -0,0 +1,13 @@
name: Translation Helper
on:
pull_request_target:
types: [opened]
issues:
types: [opened]
jobs:
translate:
runs-on: ubuntu-latest
steps:
- uses: actions-cool/translation-helper@v1.2.0

25
.github/workflows/issue_wontfix.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: Issue Wontfix
on:
issues:
types: [labeled]
jobs:
lock-issue:
runs-on: ubuntu-latest
if: github.event.label.name == 'wontfix'
steps:
- name: Create comment
uses: actions-cool/issues-helper@v3
with:
actions: 'create-comment'
token: ${{ secrets.GITHUB_TOKEN }}
issue-number: ${{ github.event.issue.number }}
body: |
Hello @${{ github.event.issue.user.login }}, this issue will not be worked on and will be closed.
你好 @${{ github.event.issue.user.login }},这不会被处理,将被关闭。
- name: Close issue
uses: actions-cool/issues-helper@v3
with:
actions: 'close-issue'
token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -2,19 +2,6 @@ name: release_docker
on:
workflow_dispatch:
inputs:
manual_tag:
description: 'Tag name (like v0.1.0). Required if as_latest is true.'
required: false
type: string
as_latest:
description: 'Tag as latest?'
required: true
default: 'false'
type: choice
options:
- 'true'
- 'false'
push:
tags:
- 'v*'
@ -30,8 +17,8 @@ env:
REGISTRY: ghcr.io
ARTIFACT_NAME: 'binaries_docker_release'
RELEASE_PLATFORMS: 'linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x,linux/ppc64le,linux/riscv64'
IMAGE_PUSH: ${{ github.event_name == 'push' || github.event_name == 'workflow_dispatch' }}
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' || github.event.inputs.as_latest == 'true' }}
IMAGE_PUSH: ${{ github.event_name == 'push' }}
IMAGE_IS_PROD: ${{ github.ref_type == 'tag' }}
IMAGE_TAGS_BETA: |
type=raw,value=beta,enable={{is_default_branch}}
@ -142,14 +129,9 @@ jobs:
images: |
${{ env.REGISTRY }}/${{ env.ORG_NAME }}/${{ env.IMAGE_NAME }}
${{ env.ORG_NAME }}/${{ env.IMAGE_NAME_DOCKERHUB }}
tags: >
${{ env.IMAGE_IS_PROD == 'true' && (
github.event_name == 'workflow_dispatch'
&& format('type=raw,value={0}', github.event.inputs.manual_tag)
|| format('type=raw,value={0}', github.ref_name)
) || env.IMAGE_TAGS_BETA }}
tags: ${{ env.IMAGE_IS_PROD == 'true' && '' || env.IMAGE_TAGS_BETA }}
flavor: |
latest=${{ env.IMAGE_IS_PROD }}
${{ env.IMAGE_IS_PROD == 'true' && 'latest=true' || '' }}
${{ matrix.tag_favor }}
- name: Build and push

View File

@ -95,8 +95,7 @@ English | [中文](./README_cn.md) | [日本語](./README_ja.md) | [Contributing
## Document
- https://docs.oplist.org
- https://docs.openlist.team
<https://docs.openlist.team>
## Demo
@ -126,4 +125,4 @@ The `OpenList` is open-source software licensed under the AGPL-3.0 license.
---
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam) · [Telegram Channel](https://t.me/OpenListOfficial)
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam)

View File

@ -93,8 +93,7 @@
## 文档
- https://docs.oplist.org
- https://docs.openlist.team
<https://docs.openlist.team>
## Demo
@ -124,4 +123,4 @@ N/A重建中
---
> [@GitHub](https://github.com/OpenListTeam) · [Telegram 交流群](https://t.me/OpenListTeam) · [Telegram 频道](https://t.me/OpenListOfficial)
> [@GitHub](https://github.com/OpenListTeam) · [Telegram 交流群](https://t.me/OpenListTeam)

View File

@ -94,8 +94,7 @@
## ドキュメント
- https://docs.oplist.org
- https://docs.openlist.team
<https://docs.openlist.team>
## デモ
@ -125,4 +124,4 @@ N/A (再構築中)
---
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam) · [Telegram Channel](https://t.me/OpenListOfficial)
> [@GitHub](https://github.com/OpenListTeam) · [Telegram Group](https://t.me/OpenListTeam)

View File

@ -1,241 +0,0 @@
package cmd
import (
log "github.com/sirupsen/logrus"
"io"
"os"
"path"
"path/filepath"
"strings"
"github.com/spf13/cobra"
rcCrypt "github.com/rclone/rclone/backend/crypt"
"github.com/rclone/rclone/fs/config/configmap"
"github.com/rclone/rclone/fs/config/obscure"
)
// encryption and decryption command format for Crypt driver
type options struct {
Op string //decrypt or encrypt
src string //source dir or file
dst string //out destination
pwd string //de/encrypt password
salt string
filenameEncryption string //reference drivers\crypt\meta.go Addtion
dirnameEncryption string
filenameEncode string
suffix string
}
var opt options
// CryptCmd represents the crypt command
var CryptCmd = &cobra.Command{
Use: "crypt",
Short: "Encrypt or decrypt local file or dir",
Example: `openlist crypt -s ./src/encrypt/ --op=de --pwd=123456 --salt=345678`,
Run: func(cmd *cobra.Command, args []string) {
opt.validate()
opt.cryptFileDir()
},
}
func init() {
RootCmd.AddCommand(CryptCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// versionCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
CryptCmd.Flags().StringVarP(&opt.src, "src", "s", "", "src file or dir to encrypt/decrypt")
CryptCmd.Flags().StringVarP(&opt.dst, "dst", "d", "", "dst dir to output,if not set,output to src dir")
CryptCmd.Flags().StringVar(&opt.Op, "op", "", "de or en which stands for decrypt or encrypt")
CryptCmd.Flags().StringVar(&opt.pwd, "pwd", "", "password used to encrypt/decrypt,if not contain ___Obfuscated___ prefix,will be obfuscated before used")
CryptCmd.Flags().StringVar(&opt.salt, "salt", "", "salt used to encrypt/decrypt,if not contain ___Obfuscated___ prefix,will be obfuscated before used")
CryptCmd.Flags().StringVar(&opt.filenameEncryption, "filename-encrypt", "off", "filename encryption mode: off,standard,obfuscate")
CryptCmd.Flags().StringVar(&opt.dirnameEncryption, "dirname-encrypt", "false", "is dirname encryption enabled:true,false")
CryptCmd.Flags().StringVar(&opt.filenameEncode, "filename-encode", "base64", "filename encoding mode: base64,base32,base32768")
CryptCmd.Flags().StringVar(&opt.suffix, "suffix", ".bin", "suffix for encrypted file,default is .bin")
}
func (o *options) validate() {
if o.src == "" {
log.Fatal("src can not be empty")
}
if o.Op != "encrypt" && o.Op != "decrypt" && o.Op != "en" && o.Op != "de" {
log.Fatal("op must be encrypt or decrypt")
}
if o.filenameEncryption != "off" && o.filenameEncryption != "standard" && o.filenameEncryption != "obfuscate" {
log.Fatal("filename_encryption must be off,standard,obfuscate")
}
if o.filenameEncode != "base64" && o.filenameEncode != "base32" && o.filenameEncode != "base32768" {
log.Fatal("filename_encode must be base64,base32,base32768")
}
}
func (o *options) cryptFileDir() {
src, _ := filepath.Abs(o.src)
log.Infof("src abs is %v", src)
fileInfo, err := os.Stat(src)
if err != nil {
log.Fatalf("reading file/dir %v failed,err:%v", src, err)
}
pwd := updateObfusParm(o.pwd)
salt := updateObfusParm(o.salt)
//create cipher
config := configmap.Simple{
"password": pwd,
"password2": salt,
"filename_encryption": o.filenameEncryption,
"directory_name_encryption": o.dirnameEncryption,
"filename_encoding": o.filenameEncode,
"suffix": o.suffix,
"pass_bad_blocks": "",
}
log.Infof("config:%v", config)
cipher, err := rcCrypt.NewCipher(config)
if err != nil {
log.Fatalf("create cipher failed,err:%v", err)
}
dst := ""
//check and create dst dir
if o.dst != "" {
dst, _ = filepath.Abs(o.dst)
checkCreateDir(dst)
}
// src is file
if !fileInfo.IsDir() { //file
if dst == "" {
dst = filepath.Dir(src)
}
o.cryptFile(cipher, src, dst)
return
}
// src is dir
if dst == "" {
//if src is dir and not set dst dir ,create ${src}_crypt dir as dst dir
dst = path.Join("./", fileInfo.Name()+"_crypt")
}
log.Infof("dst : %v", dst)
filepath.Walk(src, func(p string, info os.FileInfo, err error) error {
if err != nil {
log.Errorf("get file %v info failed, err:%v", p, err)
return err
}
if info.IsDir() {
//create output dir
d := strings.Replace(p, src, dst, 1)
log.Infof("create output dir %v", d)
checkCreateDir(d)
return nil
}
d := strings.Replace(filepath.Dir(p), src, dst, 1)
o.cryptFile(cipher, p, d)
return nil
})
}
func (o *options) cryptFile(cipher *rcCrypt.Cipher, src string, dst string) {
fileInfo, err := os.Stat(src)
if err != nil {
log.Fatalf("get file %v info failed,err:%v", src, err)
}
fd, err := os.OpenFile(src, os.O_RDWR, 0666)
if err != nil {
log.Fatalf("open file %v failed,err:%v", src, err)
}
defer fd.Close()
var cryptSrcReader io.Reader
var outFile string
if o.Op == "encrypt" || o.Op == "en" {
filename := fileInfo.Name()
if o.filenameEncryption != "off" {
filename = cipher.EncryptFileName(fileInfo.Name())
log.Infof("encrypt file name %v to %v", fileInfo.Name(), filename)
}
cryptSrcReader, err = cipher.EncryptData(fd)
if err != nil {
log.Fatalf("encrypt file %v failed,err:%v", src, err)
}
outFile = path.Join(dst, filename)
} else {
filename := fileInfo.Name()
if o.filenameEncryption != "off" {
filename, err = cipher.DecryptFileName(filename)
if err != nil {
log.Fatalf("decrypt file name %v failed,err:%v", src, err)
}
log.Infof("decrypt file name %v to %v, ", fileInfo.Name(), filename)
}
cryptSrcReader, err = cipher.DecryptData(fd)
if err != nil {
log.Fatalf("decrypt file %v failed,err:%v", src, err)
}
outFile = path.Join(dst, filename)
}
//write new file
wr, err := os.OpenFile(outFile, os.O_CREATE|os.O_WRONLY, 0755)
if err != nil {
log.Fatalf("create file %v failed,err:%v", outFile, err)
}
defer wr.Close()
_, err = io.Copy(wr, cryptSrcReader)
if err != nil {
log.Fatalf("write file %v failed,err:%v", outFile, err)
}
}
// check dir exist ,if not ,create
func checkCreateDir(dir string) {
_, err := os.Stat(dir)
if os.IsNotExist(err) {
err := os.MkdirAll(dir, 0755)
if err != nil {
log.Fatalf("create dir %v failed,err:%v", dir, err)
}
return
}
log.Fatalf("read dir %v err: %v", dir, err)
}
func updateObfusParm(str string) string {
obfuscatedPrefix := "___Obfuscated___"
if !strings.HasPrefix(str, obfuscatedPrefix) {
str, err := obscure.Obscure(str)
if err != nil {
log.Fatalf("update obfuscated parameter failed,err:%v", str)
}
} else {
str, _ = strings.CutPrefix(str, obfuscatedPrefix)
}
return str
}

View File

@ -1,3 +1,4 @@
version: '3.3'
services:
openlist:
restart: always
@ -12,4 +13,4 @@ services:
- UMASK=022
- TZ=UTC
container_name: openlist
image: 'openlistteam/openlist:latest'
image: 'ghcr.io/openlistteam/openlist:latest'

View File

@ -6,7 +6,6 @@ import (
"fmt"
"net/http"
"net/url"
"strings"
"sync"
"time"
@ -196,7 +195,7 @@ func (d *Pan123) Put(ctx context.Context, dstDir model.Obj, file model.FileStrea
data := base.Json{
"driveId": 0,
"duplicate": 2, // 2->覆盖 1->重命名 0->默认
"etag": strings.ToLower(etag),
"etag": etag,
"fileName": file.GetName(),
"parentFileId": dstDir.GetID(),
"size": file.GetSize(),

View File

@ -3,7 +3,6 @@ package _123_open
import (
"context"
"net/http"
"strings"
"time"
"github.com/OpenListTeam/OpenList/drivers/base"
@ -22,7 +21,7 @@ func (d *Open123) create(parentFileID int64, filename string, etag string, size
req.SetBody(base.Json{
"parentFileId": parentFileID,
"filename": filename,
"etag": strings.ToLower(etag),
"etag": etag,
"size": size,
"duplicate": duplicate,
"containDir": containDir,
@ -83,6 +82,7 @@ func (d *Open123) Upload(ctx context.Context, file model.FileStreamer, createRes
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
threadG.SetLimit(3)
for partIndex := int64(0); partIndex < uploadNums; partIndex++ {
if utils.IsCanceled(uploadCtx) {

View File

@ -504,6 +504,7 @@ func (y *Cloud189PC) StreamUpload(ctx context.Context, dstDir model.Obj, file mo
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
threadG.SetLimit(3)
count := int(size / sliceSize)
lastPartSize := size % sliceSize

View File

@ -50,9 +50,9 @@ import (
_ "github.com/OpenListTeam/OpenList/drivers/openlist"
_ "github.com/OpenListTeam/OpenList/drivers/pikpak"
_ "github.com/OpenListTeam/OpenList/drivers/pikpak_share"
_ "github.com/OpenListTeam/OpenList/drivers/quark_open"
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc"
_ "github.com/OpenListTeam/OpenList/drivers/quark_uc_tv"
_ "github.com/OpenListTeam/OpenList/drivers/quqi"
_ "github.com/OpenListTeam/OpenList/drivers/s3"
_ "github.com/OpenListTeam/OpenList/drivers/seafile"
_ "github.com/OpenListTeam/OpenList/drivers/sftp"

View File

@ -295,6 +295,7 @@ func (d *BaiduNetdisk) Put(ctx context.Context, dstDir model.Obj, stream model.F
retry.Attempts(1),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
threadG.SetLimit(3)
for i, partseq := range precreateResp.BlockList {
if utils.IsCanceled(upCtx) {

View File

@ -342,6 +342,7 @@ func (d *BaiduPhoto) Put(ctx context.Context, dstDir model.Obj, stream model.Fil
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
threadG.SetLimit(3)
for i, partseq := range precreateResp.BlockList {
if utils.IsCanceled(upCtx) {

View File

@ -173,12 +173,13 @@ func (d *CloudreveV4) Move(ctx context.Context, srcObj, dstDir model.Obj) error
}
func (d *CloudreveV4) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
return d.request(http.MethodPost, "/file/rename", func(req *resty.Request) {
return d.request(http.MethodPost, "/file/create", func(req *resty.Request) {
req.SetBody(base.Json{
"new_name": newName,
"uri": srcObj.GetPath(),
})
}, nil)
}
func (d *CloudreveV4) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {

View File

@ -175,7 +175,8 @@ func (d *CloudreveV4) doLogin(needCaptcha bool) error {
}
func (d *CloudreveV4) refreshToken() error {
if d.RefreshToken == "" {
var token Token
if token.RefreshToken == "" {
if d.Username != "" {
err := d.login()
if err != nil {
@ -184,7 +185,6 @@ func (d *CloudreveV4) refreshToken() error {
}
return nil
}
var token Token
err := d.request(http.MethodPost, "/session/token/refresh", func(req *resty.Request) {
req.SetBody(base.Json{
"refresh_token": d.RefreshToken,
@ -469,7 +469,7 @@ func (d *CloudreveV4) upS3(ctx context.Context, file model.FileStreamer, u FileU
}
// 上传成功发送回调请求
return d.request(http.MethodGet, "/callback/s3/"+u.SessionID+"/"+u.CallbackSecret, func(req *resty.Request) {
return d.request(http.MethodPost, "/callback/s3/"+u.SessionID+"/"+u.CallbackSecret, func(req *resty.Request) {
req.SetBody("{}")
}, nil)
}

View File

@ -5,14 +5,19 @@ import (
"github.com/OpenListTeam/OpenList/internal/op"
)
const (
DefaultClientID = "76lrwrklhdn1icb"
)
type Addition struct {
RefreshToken string `json:"refresh_token" required:"true"`
driver.RootPath
UseOnlineAPI bool `json:"use_online_api" default:"false"`
APIAddress string `json:"api_url_address" default:"https://api.oplist.org/dropboxs/renewapi"`
OauthTokenURL string `json:"oauth_token_url" default:"https://api.oplist.org/dropboxs/renewapi"` // TODO: replace
ClientID string `json:"client_id" required:"false" help:"Keep it empty if you don't have one"`
ClientSecret string `json:"client_secret" required:"false" help:"Keep it empty if you don't have one"`
AccessToken string
RefreshToken string `json:"refresh_token" required:"true"`
RootNamespaceId string
}

View File

@ -15,37 +15,10 @@ import (
)
func (d *Dropbox) refreshToken() error {
// 使用在线API刷新Token无需ClientID和ClientSecret
if d.UseOnlineAPI && len(d.APIAddress) > 0 {
u := d.APIAddress
var resp struct {
RefreshToken string `json:"refresh_token"`
AccessToken string `json:"access_token"`
ErrorMessage string `json:"text"`
}
_, err := base.RestyClient.R().
SetResult(&resp).
SetQueryParams(map[string]string{
"refresh_ui": d.RefreshToken,
"server_use": "true",
"driver_txt": "dropboxs_go",
}).
Get(u)
if err != nil {
return err
}
if resp.RefreshToken == "" || resp.AccessToken == "" {
if resp.ErrorMessage != "" {
return fmt.Errorf("failed to refresh token: %s", resp.ErrorMessage)
}
return fmt.Errorf("empty token returned from official API")
}
d.AccessToken = resp.AccessToken
d.RefreshToken = resp.RefreshToken
op.MustSaveDriverStorage(d)
return nil
}
url := d.base + "/oauth2/token"
if utils.SliceContains([]string{"", DefaultClientID}, d.ClientID) {
url = d.OauthTokenURL
}
var tokenResp TokenResp
resp, err := base.RestyClient.R().
//ForceContentType("application/x-www-form-urlencoded").

View File

@ -90,15 +90,15 @@ func (d *GooglePhoto) getFakeRoot() ([]MediaItem, error) {
return []MediaItem{
{
Id: FETCH_ALL,
Title: FETCH_ALL,
Title: "全部媒体",
},
{
Id: FETCH_ALBUMS,
Title: FETCH_ALBUMS,
Title: "全部影集",
},
{
Id: FETCH_SHARE_ALBUMS,
Title: FETCH_SHARE_ALBUMS,
Title: "共享影集",
},
}, nil
}

View File

@ -298,6 +298,7 @@ func (d *MoPan) Put(ctx context.Context, dstDir model.Obj, stream model.FileStre
retry.Attempts(3),
retry.Delay(time.Second),
retry.DelayType(retry.BackOffDelay))
threadG.SetLimit(3)
// step.3
parts, err := d.client.GetAllMultiUploadUrls(initUpdload.UploadFileID, initUpdload.PartInfos)

View File

@ -1,216 +0,0 @@
package quark_open
import (
"bytes"
"context"
"encoding/hex"
"errors"
"fmt"
"github.com/OpenListTeam/OpenList/drivers/base"
"github.com/OpenListTeam/OpenList/internal/driver"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/model"
streamPkg "github.com/OpenListTeam/OpenList/internal/stream"
"github.com/OpenListTeam/OpenList/pkg/utils"
"github.com/go-resty/resty/v2"
"hash"
"io"
"net/http"
)
type QuarkOpen struct {
model.Storage
Addition
config driver.Config
conf Conf
}
func (d *QuarkOpen) Config() driver.Config {
return d.config
}
func (d *QuarkOpen) GetAddition() driver.Additional {
return &d.Addition
}
func (d *QuarkOpen) Init(ctx context.Context) error {
_, err := d.request(ctx, "/open/v1/user/info", http.MethodGet, nil, nil)
return err
}
func (d *QuarkOpen) Drop(ctx context.Context) error {
return nil
}
func (d *QuarkOpen) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
files, err := d.GetFiles(ctx, dir.GetID())
if err != nil {
return nil, err
}
return utils.SliceConvert(files, func(src File) (model.Obj, error) {
return fileToObj(src), nil
})
}
func (d *QuarkOpen) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
data := base.Json{
"fid": file.GetID(),
}
var resp FileLikeResp
_, err := d.request(ctx, "/open/v1/file/get_download_url", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return nil, err
}
return &model.Link{
URL: resp.Data.DownloadURL,
Header: http.Header{
"Cookie": []string{d.generateAuthCookie()},
},
Concurrency: 3,
PartSize: 10 * utils.MB,
}, nil
}
func (d *QuarkOpen) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) error {
data := base.Json{
"dir_path": dirName,
"pdir_fid": parentDir.GetID(),
}
_, err := d.request(ctx, "/open/v1/dir", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *QuarkOpen) Move(ctx context.Context, srcObj, dstDir model.Obj) error {
data := base.Json{
"action_type": 1,
"fid_list": []string{srcObj.GetID()},
"to_pdir_fid": dstDir.GetID(),
}
_, err := d.request(ctx, "/open/v1/file/move", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *QuarkOpen) Rename(ctx context.Context, srcObj model.Obj, newName string) error {
data := base.Json{
"fid": srcObj.GetID(),
"file_name": newName,
"conflict_mode": "REUSE",
}
_, err := d.request(ctx, "/open/v1/file/rename", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *QuarkOpen) Copy(ctx context.Context, srcObj, dstDir model.Obj) error {
return errs.NotSupport
}
func (d *QuarkOpen) Remove(ctx context.Context, obj model.Obj) error {
data := base.Json{
"action_type": 1,
"fid_list": []string{obj.GetID()},
}
_, err := d.request(ctx, "/open/v1/file/delete", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, nil)
return err
}
func (d *QuarkOpen) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) error {
md5Str, sha1Str := stream.GetHash().GetHash(utils.MD5), stream.GetHash().GetHash(utils.SHA1)
var (
md5 hash.Hash
sha1 hash.Hash
)
writers := []io.Writer{}
if len(md5Str) != utils.MD5.Width {
md5 = utils.MD5.NewFunc()
writers = append(writers, md5)
}
if len(sha1Str) != utils.SHA1.Width {
sha1 = utils.SHA1.NewFunc()
writers = append(writers, sha1)
}
if len(writers) > 0 {
_, err := streamPkg.CacheFullInTempFileAndWriter(stream, io.MultiWriter(writers...))
if err != nil {
return err
}
if md5 != nil {
md5Str = hex.EncodeToString(md5.Sum(nil))
}
if sha1 != nil {
sha1Str = hex.EncodeToString(sha1.Sum(nil))
}
}
// pre
pre, err := d.upPre(ctx, stream, dstDir.GetID(), md5Str, sha1Str)
if err != nil {
return err
}
// get part info
partInfo := d._getPartInfo(stream, pre.Data.PartSize)
// get upload url info
upUrlInfo, err := d.upUrl(ctx, pre, partInfo)
if err != nil {
return err
}
// part up
total := stream.GetSize()
left := total
part := make([]byte, pre.Data.PartSize)
// 用于存储每个分片的ETag后续commit时需要
etags := make([]string, len(partInfo))
// 遍历上传每个分片
for i, urlInfo := range upUrlInfo.UploadUrls {
if utils.IsCanceled(ctx) {
return ctx.Err()
}
currentSize := int64(urlInfo.PartSize)
if left < currentSize {
part = part[:left]
} else {
part = part[:currentSize]
}
// 读取分片数据
n, err := io.ReadFull(stream, part)
if err != nil && !errors.Is(err, io.ErrUnexpectedEOF) {
return err
}
// 准备上传分片
reader := driver.NewLimitedUploadStream(ctx, bytes.NewReader(part))
etag, err := d.upPart(ctx, upUrlInfo, i, reader)
if err != nil {
return fmt.Errorf("failed to upload part %d: %w", i, err)
}
// 保存ETag用于后续commit
etags[i] = etag
// 更新剩余大小和进度
left -= int64(n)
up(float64(total-left) / float64(total) * 100)
}
return d.upFinish(ctx, pre, partInfo, etags)
}
var _ driver.Driver = (*QuarkOpen)(nil)

View File

@ -1,40 +0,0 @@
package quark_open
import (
"github.com/OpenListTeam/OpenList/internal/driver"
"github.com/OpenListTeam/OpenList/internal/op"
)
type Addition struct {
driver.RootID
OrderBy string `json:"order_by" type:"select" options:"none,file_type,file_name,updated_at,created_at" default:"none"`
OrderDirection string `json:"order_direction" type:"select" options:"asc,desc" default:"asc"`
UseOnlineAPI bool `json:"use_online_api" default:"true"`
APIAddress string `json:"api_url_address" default:"https://api.oplist.org/quarkyun/renewapi"`
AccessToken string `json:"access_token" required:"false" default:""`
RefreshToken string `json:"refresh_token" required:"true"`
AppID string `json:"app_id" required:"true" help:"Keep it empty if you don't have one"`
SignKey string `json:"sign_key" required:"true" help:"Keep it empty if you don't have one"`
}
type Conf struct {
ua string
api string
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &QuarkOpen{
config: driver.Config{
Name: "QuarkOpen",
OnlyLocal: true,
DefaultRoot: "0",
NoOverwriteUpload: true,
},
conf: Conf{
ua: "go-resty/3.0.0-beta.1 (https://resty.dev)",
api: "https://open-api-drive.quark.cn",
},
}
})
}

View File

@ -1,131 +0,0 @@
package quark_open
import (
"time"
"github.com/OpenListTeam/OpenList/internal/model"
)
type Resp struct {
CommonRsp
Errno int `json:"errno"`
ErrorInfo string `json:"error_info"`
}
type CommonRsp struct {
Status int `json:"status"`
ReqID string `json:"req_id"`
}
type RefreshTokenOnlineAPIResp struct {
RefreshToken string `json:"refresh_token"`
AccessToken string `json:"access_token"`
AppID string `json:"app_id"`
SignKey string `json:"sign_key"`
ErrorMessage string `json:"text"`
}
type File struct {
Fid string `json:"fid"`
ParentFid string `json:"parent_fid"`
Category int64 `json:"category"`
FileName string `json:"filename"`
Size int64 `json:"size"`
FileType string `json:"file_type"`
ThumbnailURL string `json:"thumbnail_url"`
ContentHash string `json:"content_hash"`
CreatedAt int64 `json:"created_at"`
UpdatedAt int64 `json:"updated_at"`
}
func fileToObj(f File) *model.Object {
return &model.Object{
ID: f.Fid,
Name: f.FileName,
Size: f.Size,
Modified: time.UnixMilli(f.UpdatedAt),
IsFolder: f.FileType == "0",
}
}
type QueryCursor struct {
Version string `json:"version"`
Token string `json:"token"`
}
type FileListResp struct {
CommonRsp
Data struct {
FileList []File `json:"file_list"`
LastPage bool `json:"last_page"`
NextQueryCursor QueryCursor `json:"next_query_cursor"`
} `json:"data"`
}
type FileLikeResp struct {
CommonRsp
Data struct {
Fid string `json:"fid"`
Size int `json:"size"`
FileName string `json:"file_name"`
DownloadURL string `json:"download_url"`
} `json:"data"`
}
type UpPreResp struct {
CommonRsp
Data struct {
Finish bool `json:"finish"`
TaskID string `json:"task_id"`
Fid string `json:"fid"`
CommonHeaders struct {
XOssContentSha256 string `json:"X-Oss-Content-Sha256"`
XOssDate string `json:"X-Oss-Date"`
} `json:"common_headers"`
UploadUrls []struct {
PartNumber int `json:"part_number"`
SignatureInfo struct {
AuthType string `json:"auth_type"`
Signature string `json:"signature"`
} `json:"signature_info"`
UploadURL string `json:"upload_url"`
Expired int64 `json:"expired"`
} `json:"upload_urls"`
PartSize int64 `json:"part_size"`
} `json:"data"`
}
type UpUrlInfo struct {
UploadUrls []struct {
PartNumber int `json:"part_number"`
PartSize int `json:"part_size"`
SignatureInfo struct {
AuthType string `json:"auth_type"`
Signature string `json:"signature"`
} `json:"signature_info"`
UploadURL string `json:"upload_url"`
} `json:"upload_urls"`
CommonHeaders struct {
XOssContentSha256 string `json:"X-Oss-Content-Sha256"`
XOssDate string `json:"X-Oss-Date"`
} `json:"common_headers"`
UploadID string `json:"upload_id"`
}
type UpUrlResp struct {
CommonRsp
Data UpUrlInfo `json:"data"`
}
type UpFinishResp struct {
CommonRsp
Data struct {
TaskID string `json:"task_id"`
Fid string `json:"fid"`
Finish bool `json:"finish"`
PdirFid string `json:"pdir_fid"`
Thumbnail string `json:"thumbnail"`
FormatType string `json:"format_type"`
Size int `json:"size"`
} `json:"data"`
}

View File

@ -1,309 +0,0 @@
package quark_open
import (
"context"
"crypto/sha256"
"encoding/hex"
"errors"
"fmt"
"github.com/google/uuid"
"io"
"net/http"
"strconv"
"time"
"github.com/OpenListTeam/OpenList/drivers/base"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/internal/op"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
func (d *QuarkOpen) request(ctx context.Context, pathname string, method string, callback base.ReqCallback, resp interface{}) ([]byte, error) {
u := d.conf.api + pathname
tm, token, reqID := d.generateReqSign(method, pathname, d.Addition.SignKey)
req := base.RestyClient.R()
req.SetContext(ctx)
req.SetHeaders(map[string]string{
"Accept": "application/json, text/plain, */*",
"User-Agent": d.conf.ua,
"x-pan-tm": tm,
"x-pan-token": token,
"x-pan-client-id": d.Addition.AppID,
})
req.SetQueryParams(map[string]string{
"req_id": reqID,
"access_token": d.Addition.AccessToken,
})
if callback != nil {
callback(req)
}
if resp != nil {
req.SetResult(resp)
}
var e Resp
req.SetError(&e)
res, err := req.Execute(method, u)
if err != nil {
return nil, err
}
// 判断 是否需要 刷新 access_token
if e.Status == -1 && (e.Errno == 11001 || e.Errno == 14001) {
// token 过期
err = d.refreshToken()
if err != nil {
return nil, err
}
ctx1, cancelFunc := context.WithTimeout(ctx, 10*time.Second)
defer cancelFunc()
return d.request(ctx1, pathname, method, callback, resp)
}
if e.Status >= 400 || e.Errno != 0 {
return nil, errors.New(e.ErrorInfo)
}
return res.Body(), nil
}
func (d *QuarkOpen) GetFiles(ctx context.Context, parent string) ([]File, error) {
files := make([]File, 0)
var queryCursor QueryCursor
for {
reqBody := map[string]interface{}{
"parent_fid": parent,
"size": 100, // 默认每页100个文件
"sort": "file_name:asc", // 基本排序方式
}
// 如果有排序设置
if d.OrderBy != "none" {
reqBody["sort"] = d.OrderBy + ":" + d.OrderDirection
}
// 设置查询游标(用于分页)
if queryCursor.Token != "" {
reqBody["query_cursor"] = queryCursor
}
var resp FileListResp
_, err := d.request(ctx, "/open/v1/file/list", http.MethodPost, func(req *resty.Request) {
req.SetBody(reqBody)
}, &resp)
if err != nil {
return nil, err
}
files = append(files, resp.Data.FileList...)
if resp.Data.LastPage {
break
}
queryCursor = resp.Data.NextQueryCursor
}
return files, nil
}
func (d *QuarkOpen) upPre(ctx context.Context, file model.FileStreamer, parentId, md5, sha1 string) (UpPreResp, error) {
now := time.Now()
data := base.Json{
"file_name": file.GetName(),
"size": file.GetSize(),
"format_type": file.GetMimetype(),
"md5": md5,
"sha1": sha1,
"l_created_at": now.UnixMilli(),
"l_updated_at": now.UnixMilli(),
"pdir_fid": parentId,
"same_path_reuse": true,
}
var resp UpPreResp
_, err := d.request(ctx, "/open/v1/file/upload_pre", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
return resp, err
}
func (d *QuarkOpen) _getPartInfo(stream model.FileStreamer, partSize int64) []base.Json {
// 计算分片信息
partInfo := make([]base.Json, 0)
total := stream.GetSize()
left := total
partNumber := 1
// 计算每个分片的大小和编号
for left > 0 {
size := partSize
if left < partSize {
size = left
}
partInfo = append(partInfo, base.Json{
"part_number": partNumber,
"part_size": size,
})
left -= size
partNumber++
}
return partInfo
}
func (d *QuarkOpen) upUrl(ctx context.Context, pre UpPreResp, partInfo []base.Json) (upUrlInfo UpUrlInfo, err error) {
// 构建请求体
data := base.Json{
"task_id": pre.Data.TaskID,
"part_info_list": partInfo,
}
var resp UpUrlResp
_, err = d.request(ctx, "/open/v1/file/get_upload_urls", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return upUrlInfo, err
}
return resp.Data, nil
}
func (d *QuarkOpen) upPart(ctx context.Context, upUrlInfo UpUrlInfo, partNumber int, bytes io.Reader) (string, error) {
// 创建请求
req, err := http.NewRequestWithContext(ctx, http.MethodPut, upUrlInfo.UploadUrls[partNumber].UploadURL, bytes)
if err != nil {
return "", err
}
req.Header.Set("Authorization", upUrlInfo.UploadUrls[partNumber].SignatureInfo.Signature)
req.Header.Set("X-Oss-Date", upUrlInfo.CommonHeaders.XOssDate)
req.Header.Set("X-Oss-Content-Sha256", upUrlInfo.CommonHeaders.XOssContentSha256)
req.Header.Set("Accept-Encoding", "gzip")
req.Header.Set("User-Agent", "Go-http-client/1.1")
// 发送请求
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
return "", err
}
defer resp.Body.Close()
if resp.StatusCode != 200 {
body, _ := io.ReadAll(resp.Body)
return "", fmt.Errorf("up status: %d, error: %s", resp.StatusCode, string(body))
}
// 返回 Etag 作为分片上传的标识
return resp.Header.Get("Etag"), nil
}
func (d *QuarkOpen) upFinish(ctx context.Context, pre UpPreResp, partInfo []base.Json, etags []string) error {
// 创建 part_info_list
partInfoList := make([]base.Json, len(partInfo))
// 确保 partInfo 和 etags 长度一致
if len(partInfo) != len(etags) {
return fmt.Errorf("part info count (%d) does not match etags count (%d)", len(partInfo), len(etags))
}
// 组合 part_info_list
for i, part := range partInfo {
partInfoList[i] = base.Json{
"part_number": part["part_number"],
"part_size": part["part_size"],
"etag": etags[i],
}
}
// 构建请求体
data := base.Json{
"task_id": pre.Data.TaskID,
"part_info_list": partInfoList,
}
// 发送请求
var resp UpFinishResp
_, err := d.request(ctx, "/open/v1/file/upload_finish", http.MethodPost, func(req *resty.Request) {
req.SetBody(data)
}, &resp)
if err != nil {
return err
}
if resp.Data.Finish != true {
return fmt.Errorf("upload finish failed, task_id: %s", resp.Data.TaskID)
}
return nil
}
func (d *QuarkOpen) generateReqSign(method string, pathname string, signKey string) (string, string, string) {
// 生成时间戳 (13位毫秒级)
timestamp := strconv.FormatInt(time.Now().UnixNano()/int64(time.Millisecond), 10)
// 生成 x-pan-token token的组成是: method + "&" + pathname + "&" + timestamp + "&" + signKey
tokenData := method + "&" + pathname + "&" + timestamp + "&" + signKey
tokenHash := sha256.Sum256([]byte(tokenData))
xPanToken := hex.EncodeToString(tokenHash[:])
// 生成 req_id
reqUuid, _ := uuid.NewRandom()
reqID := reqUuid.String()
return timestamp, xPanToken, reqID
}
func (d *QuarkOpen) refreshToken() error {
refresh, access, err := d._refreshToken()
for i := 0; i < 3; i++ {
if err == nil {
break
} else {
log.Errorf("[quark_open] failed to refresh token: %s", err)
}
refresh, access, err = d._refreshToken()
}
if err != nil {
return err
}
log.Infof("[quark_open] token exchange: %s -> %s", d.RefreshToken, refresh)
d.RefreshToken, d.AccessToken = refresh, access
op.MustSaveDriverStorage(d)
return nil
}
func (d *QuarkOpen) _refreshToken() (string, string, error) {
if d.UseOnlineAPI && d.APIAddress != "" {
u := d.APIAddress
var resp RefreshTokenOnlineAPIResp
_, err := base.RestyClient.R().
SetResult(&resp).
SetQueryParams(map[string]string{
"refresh_ui": d.RefreshToken,
"server_use": "true",
"driver_txt": "quarkyun_oa",
}).
Get(u)
if err != nil {
return "", "", err
}
if resp.RefreshToken == "" || resp.AccessToken == "" {
if resp.ErrorMessage != "" {
return "", "", fmt.Errorf("failed to refresh token: %s", resp.ErrorMessage)
}
return "", "", fmt.Errorf("empty token returned from official API")
}
return resp.RefreshToken, resp.AccessToken, nil
}
// TODO 本地刷新逻辑
return "", "", fmt.Errorf("local refresh token logic is not implemented yet, please use online API or contact the developer")
}
// 生成认证 Cookie
func (d *QuarkOpen) generateAuthCookie() string {
return fmt.Sprintf("x_pan_client_id=%s; x_pan_access_token=%s",
d.Addition.AppID, d.Addition.AccessToken)
}

452
drivers/quqi/driver.go Normal file
View File

@ -0,0 +1,452 @@
package quqi
import (
"bytes"
"context"
"errors"
"io"
"strconv"
"strings"
"time"
"github.com/OpenListTeam/OpenList/internal/driver"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/pkg/utils"
"github.com/OpenListTeam/OpenList/pkg/utils/random"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
"github.com/go-resty/resty/v2"
log "github.com/sirupsen/logrus"
)
type Quqi struct {
model.Storage
Addition
Cookie string // Cookie
GroupID string // 私人云群组ID
ClientID string // 随机生成客户端ID 经过测试部分接口调用若不携带client id会出现错误
}
func (d *Quqi) Config() driver.Config {
return config
}
func (d *Quqi) GetAddition() driver.Additional {
return &d.Addition
}
func (d *Quqi) Init(ctx context.Context) error {
// 登录
if err := d.login(); err != nil {
return err
}
// 生成随机client id (与网页端生成逻辑一致)
d.ClientID = "quqipc_" + random.String(10)
// 获取私人云ID (暂时仅获取私人云)
groupResp := &GroupRes{}
if _, err := d.request("group.quqi.com", "/v1/group/list", resty.MethodGet, nil, groupResp); err != nil {
return err
}
for _, groupInfo := range groupResp.Data {
if groupInfo == nil {
continue
}
if groupInfo.Type == 2 {
d.GroupID = strconv.Itoa(groupInfo.ID)
break
}
}
if d.GroupID == "" {
return errs.StorageNotFound
}
return nil
}
func (d *Quqi) Drop(ctx context.Context) error {
return nil
}
func (d *Quqi) List(ctx context.Context, dir model.Obj, args model.ListArgs) ([]model.Obj, error) {
var (
listResp = &ListRes{}
files []model.Obj
)
if _, err := d.request("", "/api/dir/ls", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": dir.GetID(),
"client_id": d.ClientID,
})
}, listResp); err != nil {
return nil, err
}
if listResp.Data == nil {
return nil, nil
}
// dirs
for _, dirInfo := range listResp.Data.Dir {
if dirInfo == nil {
continue
}
files = append(files, &model.Object{
ID: strconv.FormatInt(dirInfo.NodeID, 10),
Name: dirInfo.Name,
Modified: time.Unix(dirInfo.UpdateTime, 0),
Ctime: time.Unix(dirInfo.AddTime, 0),
IsFolder: true,
})
}
// files
for _, fileInfo := range listResp.Data.File {
if fileInfo == nil {
continue
}
if fileInfo.EXT != "" {
fileInfo.Name = strings.Join([]string{fileInfo.Name, fileInfo.EXT}, ".")
}
files = append(files, &model.Object{
ID: strconv.FormatInt(fileInfo.NodeID, 10),
Name: fileInfo.Name,
Size: fileInfo.Size,
Modified: time.Unix(fileInfo.UpdateTime, 0),
Ctime: time.Unix(fileInfo.AddTime, 0),
})
}
return files, nil
}
func (d *Quqi) Link(ctx context.Context, file model.Obj, args model.LinkArgs) (*model.Link, error) {
if d.CDN {
link, err := d.linkFromCDN(file.GetID())
if err != nil {
log.Warn(err)
} else {
return link, nil
}
}
link, err := d.linkFromPreview(file.GetID())
if err != nil {
log.Warn(err)
} else {
return link, nil
}
link, err = d.linkFromDownload(file.GetID())
if err != nil {
return nil, err
}
return link, nil
}
func (d *Quqi) MakeDir(ctx context.Context, parentDir model.Obj, dirName string) (model.Obj, error) {
var (
makeDirRes = &MakeDirRes{}
timeNow = time.Now()
)
if _, err := d.request("", "/api/dir/mkDir", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"parent_id": parentDir.GetID(),
"name": dirName,
"client_id": d.ClientID,
})
}, makeDirRes); err != nil {
return nil, err
}
return &model.Object{
ID: strconv.FormatInt(makeDirRes.Data.NodeID, 10),
Name: dirName,
Modified: timeNow,
Ctime: timeNow,
IsFolder: true,
}, nil
}
func (d *Quqi) Move(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
var moveRes = &MoveRes{}
if _, err := d.request("", "/api/dir/mvDir", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": dstDir.GetID(),
"source_quqi_id": d.GroupID,
"source_tree_id": "1",
"source_node_id": srcObj.GetID(),
"client_id": d.ClientID,
})
}, moveRes); err != nil {
return nil, err
}
return &model.Object{
ID: strconv.FormatInt(moveRes.Data.NodeID, 10),
Name: moveRes.Data.NodeName,
Size: srcObj.GetSize(),
Modified: time.Now(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *Quqi) Rename(ctx context.Context, srcObj model.Obj, newName string) (model.Obj, error) {
var realName = newName
if !srcObj.IsDir() {
srcExt, newExt := utils.Ext(srcObj.GetName()), utils.Ext(newName)
// 曲奇网盘的文件名称由文件名和扩展名组成,若存在扩展名,则重命名时仅支持更改文件名,扩展名在曲奇服务端保留
if srcExt != "" && srcExt == newExt {
parts := strings.Split(newName, ".")
if len(parts) > 1 {
realName = strings.Join(parts[:len(parts)-1], ".")
}
}
}
if _, err := d.request("", "/api/dir/renameDir", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": srcObj.GetID(),
"rename": realName,
"client_id": d.ClientID,
})
}, nil); err != nil {
return nil, err
}
return &model.Object{
ID: srcObj.GetID(),
Name: newName,
Size: srcObj.GetSize(),
Modified: time.Now(),
Ctime: srcObj.CreateTime(),
IsFolder: srcObj.IsDir(),
}, nil
}
func (d *Quqi) Copy(ctx context.Context, srcObj, dstDir model.Obj) (model.Obj, error) {
// 无法从曲奇接口响应中直接获取复制后的文件信息
if _, err := d.request("", "/api/node/copy", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": dstDir.GetID(),
"source_quqi_id": d.GroupID,
"source_tree_id": "1",
"source_node_id": srcObj.GetID(),
"client_id": d.ClientID,
})
}, nil); err != nil {
return nil, err
}
return nil, nil
}
func (d *Quqi) Remove(ctx context.Context, obj model.Obj) error {
// 暂时不做直接删除,默认都放到回收站。直接删除方法:先调用删除接口放入回收站,在通过回收站接口删除文件
if _, err := d.request("", "/api/node/del", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": obj.GetID(),
"client_id": d.ClientID,
})
}, nil); err != nil {
return err
}
return nil
}
func (d *Quqi) Put(ctx context.Context, dstDir model.Obj, stream model.FileStreamer, up driver.UpdateProgress) (model.Obj, error) {
// base info
sizeStr := strconv.FormatInt(stream.GetSize(), 10)
f, err := stream.CacheFullInTempFile()
if err != nil {
return nil, err
}
md5, err := utils.HashFile(utils.MD5, f)
if err != nil {
return nil, err
}
sha, err := utils.HashFile(utils.SHA256, f)
if err != nil {
return nil, err
}
// init upload
var uploadInitResp UploadInitResp
_, err = d.request("", "/api/upload/v1/file/init", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"parent_id": dstDir.GetID(),
"size": sizeStr,
"file_name": stream.GetName(),
"md5": md5,
"sha": sha,
"is_slice": "true",
"client_id": d.ClientID,
})
}, &uploadInitResp)
if err != nil {
return nil, err
}
// check exist
// if the file already exists in Quqi server, there is no need to actually upload it
if uploadInitResp.Data.Exist {
// the file name returned by Quqi does not include the extension name
nodeName, nodeExt := uploadInitResp.Data.NodeName, utils.Ext(stream.GetName())
if nodeExt != "" {
nodeName = nodeName + "." + nodeExt
}
return &model.Object{
ID: strconv.FormatInt(uploadInitResp.Data.NodeID, 10),
Name: nodeName,
Size: stream.GetSize(),
Modified: stream.ModTime(),
Ctime: stream.CreateTime(),
}, nil
}
// listParts
_, err = d.request("upload.quqi.com:20807", "/upload/v1/listParts", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"token": uploadInitResp.Data.Token,
"task_id": uploadInitResp.Data.TaskID,
"client_id": d.ClientID,
})
}, nil)
if err != nil {
return nil, err
}
// get temp key
var tempKeyResp TempKeyResp
_, err = d.request("upload.quqi.com:20807", "/upload/v1/tempKey", resty.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"token": uploadInitResp.Data.Token,
"task_id": uploadInitResp.Data.TaskID,
})
}, &tempKeyResp)
if err != nil {
return nil, err
}
// upload
// u, err := url.Parse(fmt.Sprintf("https://%s.cos.ap-shanghai.myqcloud.com", uploadInitResp.Data.Bucket))
// b := &cos.BaseURL{BucketURL: u}
// client := cos.NewClient(b, &http.Client{
// Transport: &cos.CredentialTransport{
// Credential: cos.NewTokenCredential(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
// },
// })
// partSize := int64(1024 * 1024 * 2)
// partCount := (stream.GetSize() + partSize - 1) / partSize
// for i := 1; i <= int(partCount); i++ {
// length := partSize
// if i == int(partCount) {
// length = stream.GetSize() - (int64(i)-1)*partSize
// }
// _, err := client.Object.UploadPart(
// ctx, uploadInitResp.Data.Key, uploadInitResp.Data.UploadID, i, io.LimitReader(f, partSize), &cos.ObjectUploadPartOptions{
// ContentLength: length,
// },
// )
// if err != nil {
// return nil, err
// }
// }
cfg := &aws.Config{
Credentials: credentials.NewStaticCredentials(tempKeyResp.Data.Credentials.TmpSecretID, tempKeyResp.Data.Credentials.TmpSecretKey, tempKeyResp.Data.Credentials.SessionToken),
Region: aws.String("ap-shanghai"),
Endpoint: aws.String("cos.ap-shanghai.myqcloud.com"),
}
s, err := session.NewSession(cfg)
if err != nil {
return nil, err
}
uploader := s3manager.NewUploader(s)
buf := make([]byte, 1024*1024*2)
fup := &driver.ReaderUpdatingProgress{
Reader: &driver.SimpleReaderWithSize{
Reader: f,
Size: int64(len(buf)),
},
UpdateProgress: up,
}
for partNumber := int64(1); ; partNumber++ {
n, err := io.ReadFull(fup, buf)
if err != nil && !errors.Is(err, io.ErrUnexpectedEOF) {
if err == io.EOF {
break
}
return nil, err
}
reader := bytes.NewReader(buf[:n])
_, err = uploader.S3.UploadPartWithContext(ctx, &s3.UploadPartInput{
UploadId: &uploadInitResp.Data.UploadID,
Key: &uploadInitResp.Data.Key,
Bucket: &uploadInitResp.Data.Bucket,
PartNumber: aws.Int64(partNumber),
Body: struct {
*driver.RateLimitReader
io.Seeker
}{
RateLimitReader: driver.NewLimitedUploadStream(ctx, reader),
Seeker: reader,
},
})
if err != nil {
return nil, err
}
}
// finish upload
var uploadFinishResp UploadFinishResp
_, err = d.request("", "/api/upload/v1/file/finish", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"token": uploadInitResp.Data.Token,
"task_id": uploadInitResp.Data.TaskID,
"client_id": d.ClientID,
})
}, &uploadFinishResp)
if err != nil {
return nil, err
}
// the file name returned by Quqi does not include the extension name
nodeName, nodeExt := uploadFinishResp.Data.NodeName, utils.Ext(stream.GetName())
if nodeExt != "" {
nodeName = nodeName + "." + nodeExt
}
return &model.Object{
ID: strconv.FormatInt(uploadFinishResp.Data.NodeID, 10),
Name: nodeName,
Size: stream.GetSize(),
Modified: stream.ModTime(),
Ctime: stream.CreateTime(),
}, nil
}
//func (d *Template) Other(ctx context.Context, args model.OtherArgs) (interface{}, error) {
// return nil, errs.NotSupport
//}
var _ driver.Driver = (*Quqi)(nil)

28
drivers/quqi/meta.go Normal file
View File

@ -0,0 +1,28 @@
package quqi
import (
"github.com/OpenListTeam/OpenList/internal/driver"
"github.com/OpenListTeam/OpenList/internal/op"
)
type Addition struct {
driver.RootID
Phone string `json:"phone"`
Password string `json:"password"`
Cookie string `json:"cookie" help:"Cookie can be used on multiple clients at the same time"`
CDN bool `json:"cdn" help:"If you enable this option, the download speed can be increased, but there will be some performance loss"`
}
var config = driver.Config{
Name: "Quqi",
OnlyLocal: true,
LocalSort: true,
//NoUpload: true,
DefaultRoot: "0",
}
func init() {
op.RegisterDriver(func() driver.Driver {
return &Quqi{}
})
}

197
drivers/quqi/types.go Normal file
View File

@ -0,0 +1,197 @@
package quqi
type BaseReqQuery struct {
ID string `json:"quqiid"`
}
type BaseReq struct {
GroupID string `json:"quqi_id"`
}
type BaseRes struct {
//Data interface{} `json:"data"`
Code int `json:"err"`
Message string `json:"msg"`
}
type GroupRes struct {
BaseRes
Data []*Group `json:"data"`
}
type ListRes struct {
BaseRes
Data *List `json:"data"`
}
type GetDocRes struct {
BaseRes
Data struct {
OriginPath string `json:"origin_path"`
} `json:"data"`
}
type GetDownloadResp struct {
BaseRes
Data struct {
Url string `json:"url"`
} `json:"data"`
}
type MakeDirRes struct {
BaseRes
Data struct {
IsRoot bool `json:"is_root"`
NodeID int64 `json:"node_id"`
ParentID int64 `json:"parent_id"`
} `json:"data"`
}
type MoveRes struct {
BaseRes
Data struct {
NodeChildNum int64 `json:"node_child_num"`
NodeID int64 `json:"node_id"`
NodeName string `json:"node_name"`
ParentID int64 `json:"parent_id"`
GroupID int64 `json:"quqi_id"`
TreeID int64 `json:"tree_id"`
} `json:"data"`
}
type RenameRes struct {
BaseRes
Data struct {
NodeID int64 `json:"node_id"`
GroupID int64 `json:"quqi_id"`
Rename string `json:"rename"`
TreeID int64 `json:"tree_id"`
UpdateTime int64 `json:"updatetime"`
} `json:"data"`
}
type CopyRes struct {
BaseRes
}
type RemoveRes struct {
BaseRes
}
type Group struct {
ID int `json:"quqi_id"`
Type int `json:"type"`
Name string `json:"name"`
IsAdministrator int `json:"is_administrator"`
Role int `json:"role"`
Avatar string `json:"avatar_url"`
IsStick int `json:"is_stick"`
Nickname string `json:"nickname"`
Status int `json:"status"`
}
type List struct {
ListDir
Dir []*ListDir `json:"dir"`
File []*ListFile `json:"file"`
}
type ListItem struct {
AddTime int64 `json:"add_time"`
IsDir int `json:"is_dir"`
IsExpand int `json:"is_expand"`
IsFinalize int `json:"is_finalize"`
LastEditorName string `json:"last_editor_name"`
Name string `json:"name"`
NodeID int64 `json:"nid"`
ParentID int64 `json:"parent_id"`
Permission int `json:"permission"`
TreeID int64 `json:"tid"`
UpdateCNT int64 `json:"update_cnt"`
UpdateTime int64 `json:"update_time"`
}
type ListDir struct {
ListItem
ChildDocNum int64 `json:"child_doc_num"`
DirDetail string `json:"dir_detail"`
DirType int `json:"dir_type"`
}
type ListFile struct {
ListItem
BroadDocType string `json:"broad_doc_type"`
CanDisplay bool `json:"can_display"`
Detail string `json:"detail"`
EXT string `json:"ext"`
Filetype string `json:"filetype"`
HasMobileThumbnail bool `json:"has_mobile_thumbnail"`
HasThumbnail bool `json:"has_thumbnail"`
Size int64 `json:"size"`
Version int `json:"version"`
}
type UploadInitResp struct {
Data struct {
Bucket string `json:"bucket"`
Exist bool `json:"exist"`
Key string `json:"key"`
TaskID string `json:"task_id"`
Token string `json:"token"`
UploadID string `json:"upload_id"`
URL string `json:"url"`
NodeID int64 `json:"node_id"`
NodeName string `json:"node_name"`
ParentID int64 `json:"parent_id"`
} `json:"data"`
Err int `json:"err"`
Msg string `json:"msg"`
}
type TempKeyResp struct {
Err int `json:"err"`
Msg string `json:"msg"`
Data struct {
ExpiredTime int `json:"expiredTime"`
Expiration string `json:"expiration"`
Credentials struct {
SessionToken string `json:"sessionToken"`
TmpSecretID string `json:"tmpSecretId"`
TmpSecretKey string `json:"tmpSecretKey"`
} `json:"credentials"`
RequestID string `json:"requestId"`
StartTime int `json:"startTime"`
} `json:"data"`
}
type UploadFinishResp struct {
Data struct {
NodeID int64 `json:"node_id"`
NodeName string `json:"node_name"`
ParentID int64 `json:"parent_id"`
QuqiID int64 `json:"quqi_id"`
TreeID int64 `json:"tree_id"`
} `json:"data"`
Err int `json:"err"`
Msg string `json:"msg"`
}
type UrlExchangeResp struct {
BaseRes
Data struct {
Name string `json:"name"`
Mime string `json:"mime"`
Size int64 `json:"size"`
DownloadType int `json:"download_type"`
ChannelType int `json:"channel_type"`
ChannelID int `json:"channel_id"`
Url string `json:"url"`
ExpiredTime int64 `json:"expired_time"`
IsEncrypted bool `json:"is_encrypted"`
EncryptedSize int64 `json:"encrypted_size"`
EncryptedAlg string `json:"encrypted_alg"`
EncryptedKey string `json:"encrypted_key"`
PassportID int64 `json:"passport_id"`
RequestExpiredTime int64 `json:"request_expired_time"`
} `json:"data"`
}

299
drivers/quqi/util.go Normal file
View File

@ -0,0 +1,299 @@
package quqi
import (
"bufio"
"context"
"encoding/base64"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strings"
"time"
"github.com/OpenListTeam/OpenList/drivers/base"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/internal/stream"
"github.com/OpenListTeam/OpenList/pkg/http_range"
"github.com/OpenListTeam/OpenList/pkg/utils"
"github.com/go-resty/resty/v2"
"github.com/minio/sio"
)
// do others that not defined in Driver interface
func (d *Quqi) request(host string, path string, method string, callback base.ReqCallback, resp interface{}) (*resty.Response, error) {
var (
reqUrl = url.URL{
Scheme: "https",
Host: "quqi.com",
Path: path,
}
req = base.RestyClient.R()
result BaseRes
)
if host != "" {
reqUrl.Host = host
}
req.SetHeaders(map[string]string{
"Origin": "https://quqi.com",
"Cookie": d.Cookie,
})
if d.GroupID != "" {
req.SetQueryParam("quqiid", d.GroupID)
}
if callback != nil {
callback(req)
}
res, err := req.Execute(method, reqUrl.String())
if err != nil {
return nil, err
}
// resty.Request.SetResult cannot parse result correctly sometimes
err = utils.Json.Unmarshal(res.Body(), &result)
if err != nil {
return nil, err
}
if result.Code != 0 {
return nil, errors.New(result.Message)
}
if resp != nil {
err = utils.Json.Unmarshal(res.Body(), resp)
if err != nil {
return nil, err
}
}
return res, nil
}
func (d *Quqi) login() error {
if d.Addition.Cookie != "" {
d.Cookie = d.Addition.Cookie
}
if d.checkLogin() {
return nil
}
if d.Cookie != "" {
return errors.New("cookie is invalid")
}
if d.Phone == "" {
return errors.New("phone number is empty")
}
if d.Password == "" {
return errs.EmptyPassword
}
resp, err := d.request("", "/auth/person/v2/login/password", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"phone": d.Phone,
"password": base64.StdEncoding.EncodeToString([]byte(d.Password)),
})
}, nil)
if err != nil {
return err
}
var cookies []string
for _, cookie := range resp.RawResponse.Cookies() {
cookies = append(cookies, fmt.Sprintf("%s=%s", cookie.Name, cookie.Value))
}
d.Cookie = strings.Join(cookies, ";")
return nil
}
func (d *Quqi) checkLogin() bool {
if _, err := d.request("", "/auth/account/baseInfo", resty.MethodGet, nil, nil); err != nil {
return false
}
return true
}
// decryptKey 获取密码
func decryptKey(encodeKey string) []byte {
// 移除非法字符
u := strings.ReplaceAll(encodeKey, "[^A-Za-z0-9+\\/]", "")
// 计算输出字节数组的长度
o := len(u)
a := 32
// 创建输出字节数组
c := make([]byte, a)
// 编码循环
s := uint32(0) // 累加器
f := 0 // 输出数组索引
for l := 0; l < o; l++ {
r := l & 3 // 取模4得到当前字符在四字节块中的位置
i := u[l] // 当前字符的ASCII码
// 编码当前字符
switch {
case i >= 65 && i < 91: // 大写字母
s |= uint32(i-65) << uint32(6*(3-r))
case i >= 97 && i < 123: // 小写字母
s |= uint32(i-71) << uint32(6*(3-r))
case i >= 48 && i < 58: // 数字
s |= uint32(i+4) << uint32(6*(3-r))
case i == 43: // 加号
s |= uint32(62) << uint32(6*(3-r))
case i == 47: // 斜杠
s |= uint32(63) << uint32(6*(3-r))
}
// 如果累加器已经包含了四个字符,或者是最后一个字符,则写入输出数组
if r == 3 || l == o-1 {
for e := 0; e < 3 && f < a; e, f = e+1, f+1 {
c[f] = byte(s >> (16 >> e & 24) & 255)
}
s = 0
}
}
return c
}
func (d *Quqi) linkFromPreview(id string) (*model.Link, error) {
var getDocResp GetDocRes
if _, err := d.request("", "/api/doc/getDoc", resty.MethodPost, func(req *resty.Request) {
req.SetFormData(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": id,
"client_id": d.ClientID,
})
}, &getDocResp); err != nil {
return nil, err
}
if getDocResp.Data.OriginPath == "" {
return nil, errors.New("cannot get link from preview")
}
return &model.Link{
URL: getDocResp.Data.OriginPath,
Header: http.Header{
"Origin": []string{"https://quqi.com"},
"Cookie": []string{d.Cookie},
},
}, nil
}
func (d *Quqi) linkFromDownload(id string) (*model.Link, error) {
var getDownloadResp GetDownloadResp
if _, err := d.request("", "/api/doc/getDownload", resty.MethodGet, func(req *resty.Request) {
req.SetQueryParams(map[string]string{
"quqi_id": d.GroupID,
"tree_id": "1",
"node_id": id,
"url_type": "undefined",
"entry_type": "undefined",
"client_id": d.ClientID,
"no_redirect": "1",
})
}, &getDownloadResp); err != nil {
return nil, err
}
if getDownloadResp.Data.Url == "" {
return nil, errors.New("cannot get link from download")
}
return &model.Link{
URL: getDownloadResp.Data.Url,
Header: http.Header{
"Origin": []string{"https://quqi.com"},
"Cookie": []string{d.Cookie},
},
}, nil
}
func (d *Quqi) linkFromCDN(id string) (*model.Link, error) {
downloadLink, err := d.linkFromDownload(id)
if err != nil {
return nil, err
}
var urlExchangeResp UrlExchangeResp
if _, err = d.request("api.quqi.com", "/preview/downloadInfo/url/exchange", resty.MethodGet, func(req *resty.Request) {
req.SetQueryParam("url", downloadLink.URL)
}, &urlExchangeResp); err != nil {
return nil, err
}
if urlExchangeResp.Data.Url == "" {
return nil, errors.New("cannot get link from cdn")
}
// 假设存在未加密的情况
if !urlExchangeResp.Data.IsEncrypted {
return &model.Link{
URL: urlExchangeResp.Data.Url,
Header: http.Header{
"Origin": []string{"https://quqi.com"},
"Cookie": []string{d.Cookie},
},
}, nil
}
// 根据sio(https://github.com/minio/sio/blob/master/DARE.md)描述及实际测试,得出以下结论:
// 1. 加密后大小(encrypted_size)-原始文件大小(size) = 加密包的头大小+身份验证标识 = (16+16) * N -> N为加密包的数量
// 2. 原始文件大小(size)+64*1024-1 / (64*1024) = N -> 每个包的有效负载为64K
remoteClosers := utils.EmptyClosers()
payloadSize := int64(1 << 16)
expiration := time.Until(time.Unix(urlExchangeResp.Data.ExpiredTime, 0))
resultRangeReader := func(ctx context.Context, httpRange http_range.Range) (io.ReadCloser, error) {
encryptedOffset := httpRange.Start / payloadSize * (payloadSize + 32)
decryptedOffset := httpRange.Start % payloadSize
encryptedLength := (httpRange.Length+httpRange.Start+payloadSize-1)/payloadSize*(payloadSize+32) - encryptedOffset
if httpRange.Length < 0 {
encryptedLength = httpRange.Length
} else {
if httpRange.Length+httpRange.Start >= urlExchangeResp.Data.Size || encryptedLength+encryptedOffset >= urlExchangeResp.Data.EncryptedSize {
encryptedLength = -1
}
}
//log.Debugf("size: %d\tencrypted_size: %d", urlExchangeResp.Data.Size, urlExchangeResp.Data.EncryptedSize)
//log.Debugf("http range offset: %d, length: %d", httpRange.Start, httpRange.Length)
//log.Debugf("encrypted offset: %d, length: %d, decrypted offset: %d", encryptedOffset, encryptedLength, decryptedOffset)
rrc, err := stream.GetRangeReadCloserFromLink(urlExchangeResp.Data.EncryptedSize, &model.Link{
URL: urlExchangeResp.Data.Url,
Header: http.Header{
"Origin": []string{"https://quqi.com"},
"Cookie": []string{d.Cookie},
},
})
if err != nil {
return nil, err
}
rc, err := rrc.RangeRead(ctx, http_range.Range{Start: encryptedOffset, Length: encryptedLength})
remoteClosers.AddClosers(rrc.GetClosers())
if err != nil {
return nil, err
}
decryptReader, err := sio.DecryptReader(rc, sio.Config{
MinVersion: sio.Version10,
MaxVersion: sio.Version20,
CipherSuites: []byte{sio.CHACHA20_POLY1305, sio.AES_256_GCM},
Key: decryptKey(urlExchangeResp.Data.EncryptedKey),
SequenceNumber: uint32(httpRange.Start / payloadSize),
})
if err != nil {
return nil, err
}
bufferReader := bufio.NewReader(decryptReader)
bufferReader.Discard(int(decryptedOffset))
return io.NopCloser(bufferReader), nil
}
return &model.Link{
RangeReadCloser: &model.RangeReadCloser{RangeReader: resultRangeReader, Closers: remoteClosers},
Expiration: &expiration,
}, nil
}

View File

@ -58,7 +58,7 @@ func (x *Thunder) Init(ctx context.Context) (err error) {
},
DeviceID: func() string {
if len(x.DeviceID) != 32 {
return utils.GetMD5EncodeStr(x.Username + x.Password)
return utils.GetMD5EncodeStr(x.DeviceID)
}
return x.DeviceID
}(),

View File

@ -7,7 +7,6 @@ import (
"io"
"net/http"
"strings"
"time"
"github.com/OpenListTeam/OpenList/drivers/base"
"github.com/OpenListTeam/OpenList/internal/driver"
@ -66,7 +65,6 @@ func (x *ThunderBrowser) Init(ctx context.Context) (err error) {
UserAgent: BuildCustomUserAgent(utils.GetMD5EncodeStr(x.Username+x.Password), PackageName, SdkVersion, ClientVersion, PackageName),
DownloadUserAgent: DownloadUserAgent,
UseVideoUrl: x.UseVideoUrl,
UseFluentPlay: x.UseFluentPlay,
RemoveWay: x.Addition.RemoveWay,
refreshCTokenCk: func(token string) {
x.CaptchaToken = token
@ -83,8 +81,6 @@ func (x *ThunderBrowser) Init(ctx context.Context) (err error) {
x.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
op.MustSaveDriverStorage(x)
}
// 清空 信任密钥
x.Addition.CreditKey = ""
}
x.SetTokenResp(token)
return err
@ -97,20 +93,10 @@ func (x *ThunderBrowser) Init(ctx context.Context) (err error) {
if ctoekn != "" {
x.SetCaptchaToken(ctoekn)
}
if x.Addition.CreditKey != "" {
x.SetCreditKey(x.Addition.CreditKey)
if x.DeviceID == "" {
x.SetDeviceID(utils.GetMD5EncodeStr(x.Username + x.Password))
}
if x.Addition.DeviceID != "" {
x.Common.DeviceID = x.Addition.DeviceID
} else {
x.Addition.DeviceID = x.Common.DeviceID
op.MustSaveDriverStorage(x)
}
x.XunLeiBrowserCommon.UseVideoUrl = x.UseVideoUrl
x.XunLeiBrowserCommon.UseFluentPlay = x.UseFluentPlay
x.Addition.RootFolderID = x.RootFolderID
// 防止重复登录
identity := x.GetIdentity()
@ -121,8 +107,6 @@ func (x *ThunderBrowser) Init(ctx context.Context) (err error) {
if err != nil {
return err
}
// 清空 信任密钥
x.Addition.CreditKey = ""
x.SetTokenResp(token)
}
@ -204,7 +188,6 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
return DownloadUserAgent
}(),
UseVideoUrl: x.UseVideoUrl,
UseFluentPlay: x.UseFluentPlay,
RemoveWay: x.ExpertAddition.RemoveWay,
refreshCTokenCk: func(token string) {
x.CaptchaToken = token
@ -217,13 +200,7 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
x.SetCaptchaToken(x.ExpertAddition.CaptchaToken)
op.MustSaveDriverStorage(x)
}
if x.ExpertAddition.CreditKey != "" {
x.SetCreditKey(x.ExpertAddition.CreditKey)
}
if x.ExpertAddition.DeviceID != "" {
x.Common.DeviceID = x.ExpertAddition.DeviceID
} else {
if x.Common.DeviceID != "" {
x.ExpertAddition.DeviceID = x.Common.DeviceID
op.MustSaveDriverStorage(x)
}
@ -236,7 +213,6 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
op.MustSaveDriverStorage(x)
}
x.XunLeiBrowserCommon.UseVideoUrl = x.UseVideoUrl
x.XunLeiBrowserCommon.UseFluentPlay = x.UseFluentPlay
x.ExpertAddition.RootFolderID = x.RootFolderID
// 签名方法
if x.SignType == "captcha_sign" {
@ -277,8 +253,6 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
if err != nil {
return err
}
// 清空 信任密钥
x.ExpertAddition.CreditKey = ""
x.SetTokenResp(token)
x.SetRefreshTokenFunc(func() error {
token, err := x.XunLeiBrowserCommon.RefreshToken(x.TokenResp.RefreshToken)
@ -287,8 +261,6 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
if err != nil {
x.GetStorage().SetStatus(fmt.Sprintf("%+v", err.Error()))
}
// 清空 信任密钥
x.ExpertAddition.CreditKey = ""
}
x.SetTokenResp(token)
op.MustSaveDriverStorage(x)
@ -314,7 +286,6 @@ func (x *ThunderBrowserExpert) Init(ctx context.Context) (err error) {
x.XunLeiBrowserCommon.UserAgent = x.UserAgent
x.XunLeiBrowserCommon.DownloadUserAgent = x.DownloadUserAgent
x.XunLeiBrowserCommon.UseVideoUrl = x.UseVideoUrl
x.XunLeiBrowserCommon.UseFluentPlay = x.UseFluentPlay
x.ExpertAddition.RootFolderID = x.RootFolderID
}
@ -335,7 +306,6 @@ func (x *ThunderBrowserExpert) SetTokenResp(token *TokenResp) {
type XunLeiBrowserCommon struct {
*Common
*TokenResp // 登录信息
*CoreLoginResp // core登录信息
refreshTokenFunc func() error
}
@ -553,8 +523,7 @@ func (xc *XunLeiBrowserCommon) getFiles(ctx context.Context, dir model.Obj, path
folderSpace = dirF.GetSpace()
default:
// 处理 根目录的情况
//folderSpace = ThunderBrowserDriveSpace
folderSpace = ThunderDriveSpace // 迅雷浏览器已经合并到迅雷云盘,因此变更根目录
folderSpace = ThunderBrowserDriveSpace
}
params := map[string]string{
"parent_id": dir.GetID(),
@ -600,11 +569,6 @@ func (xc *XunLeiBrowserCommon) SetTokenResp(tr *TokenResp) {
xc.TokenResp = tr
}
// SetCoreTokenResp 设置CoreToken
func (xc *XunLeiBrowserCommon) SetCoreTokenResp(tr *CoreLoginResp) {
xc.CoreLoginResp = tr
}
// SetSpaceTokenResp 设置Token
func (xc *XunLeiBrowserCommon) SetSpaceTokenResp(spaceToken string) {
xc.TokenResp.Token = spaceToken
@ -650,24 +614,14 @@ func (xc *XunLeiBrowserCommon) Request(url string, method string, callback base.
}
if errResp.ErrorMsg == "captcha_invalid" {
// 验证码token过期
if err = xc.RefreshCaptchaTokenAtLogin(GetAction(method, url), xc.TokenResp.UserID); err != nil {
if err = xc.RefreshCaptchaTokenAtLogin(GetAction(method, url), xc.UserID); err != nil {
return nil, err
}
}
return nil, errors.New(errResp.ErrorMsg)
return nil, err
default:
// 处理未捕获到的验证码错误
if errResp.ErrorMsg == "captcha_invalid" {
// 验证码token过期
if err = xc.RefreshCaptchaTokenAtLogin(GetAction(method, url), xc.TokenResp.UserID); err != nil {
return nil, err
}
}
return nil, err
}
return xc.Request(url, method, callback, resp)
}
@ -713,25 +667,20 @@ func (xc *XunLeiBrowserCommon) GetSafeAccessToken(safePassword string) (string,
// Login 登录
func (xc *XunLeiBrowserCommon) Login(username, password string) (*TokenResp, error) {
//v3 login拿到 sessionID
sessionID, err := xc.CoreLogin(username, password)
url := XLUSER_API_URL + "/auth/signin"
err := xc.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), username)
if err != nil {
return nil, err
}
//v1 login拿到令牌
url := XLUSER_API_URL + "/auth/signin/token"
if err = xc.RefreshCaptchaTokenInLogin(GetAction(http.MethodPost, url), username); err != nil {
return nil, err
}
var resp TokenResp
_, err = xc.Common.Request(url, http.MethodPost, func(req *resty.Request) {
req.SetPathParam("client_id", xc.ClientID)
req.SetBody(&SignInRequest{
CaptchaToken: xc.GetCaptchaToken(),
ClientID: xc.ClientID,
ClientSecret: xc.ClientSecret,
Provider: SignProvider,
SigninToken: sessionID,
Username: username,
Password: password,
})
}, &resp)
if err != nil {
@ -747,157 +696,3 @@ func (xc *XunLeiBrowserCommon) IsLogin() bool {
_, err := xc.Request(XLUSER_API_URL+"/user/me", http.MethodGet, nil, nil)
return err == nil
}
// OfflineDownload 离线下载文件
func (xc *XunLeiBrowserCommon) OfflineDownload(ctx context.Context, fileUrl string, parentDir model.Obj, fileName string) (*OfflineTask, error) {
var resp OfflineDownloadResp
body := base.Json{}
from := "cloudadd/"
if xc.UseFluentPlay {
body = base.Json{
"kind": FILE,
"name": fileName,
// 流畅播接口 强制将文件放在 "SPACE_FAVORITE" 文件夹
//"parent_id": parentDir.GetID(),
"upload_type": UPLOAD_TYPE_URL,
"url": base.Json{
"url": fileUrl,
//"files": []string{"0"}, // 0 表示只下载第一个文件
},
"params": base.Json{
"cookie": "null",
"web_title": "",
"lastSession": "",
"flags": "9",
"scene": "smart_spot_panel",
"referer": "https://x.xunlei.com",
"dedup_index": "0",
},
"need_dedup": true,
"folder_type": "FAVORITE",
"space": ThunderBrowserDriveFluentPlayFolderType,
}
from = "FLUENT_PLAY/sniff_ball/fluent_play/SPACE_FAVORITE"
} else {
body = base.Json{
"kind": FILE,
"name": fileName,
"parent_id": parentDir.GetID(),
"upload_type": UPLOAD_TYPE_URL,
"url": base.Json{
"url": fileUrl,
},
}
if files, ok := parentDir.(*Files); ok {
body["space"] = files.GetSpace()
} else {
// 如果不是 Files 类型,则默认使用 ThunderDriveSpace
body["space"] = ThunderDriveSpace
}
}
_, err := xc.Request(FILE_API_URL, http.MethodPost, func(r *resty.Request) {
r.SetContext(ctx)
r.SetQueryParam("_from", from)
r.SetBody(&body)
}, &resp)
if err != nil {
return nil, err
}
return &resp.Task, err
}
// OfflineList 获取离线下载任务列表
func (xc *XunLeiBrowserCommon) OfflineList(ctx context.Context, nextPageToken string) ([]OfflineTask, error) {
res := make([]OfflineTask, 0)
var resp OfflineListResp
_, err := xc.Request(TASK_API_URL, http.MethodGet, func(req *resty.Request) {
req.SetContext(ctx).
SetQueryParams(map[string]string{
"type": "offline",
"limit": "10000",
"page_token": nextPageToken,
"space": "default/*",
})
}, &resp)
if err != nil {
return nil, fmt.Errorf("failed to get offline list: %w", err)
}
res = append(res, resp.Tasks...)
return res, nil
}
func (xc *XunLeiBrowserCommon) DeleteOfflineTasks(ctx context.Context, taskIDs []string) error {
queryParams := map[string]string{
"task_ids": strings.Join(taskIDs, ","),
"_t": fmt.Sprintf("%d", time.Now().UnixMilli()),
}
if xc.UseFluentPlay {
queryParams["space"] = ThunderBrowserDriveFluentPlayFolderType
}
_, err := xc.Request(TASK_API_URL, http.MethodDelete, func(req *resty.Request) {
req.SetContext(ctx).
SetQueryParams(queryParams)
}, nil)
if err != nil {
return fmt.Errorf("failed to delete tasks %v: %w", taskIDs, err)
}
return nil
}
func (xc *XunLeiBrowserCommon) CoreLogin(username string, password string) (sessionID string, err error) {
url := XLUSER_API_BASE_URL + "/xluser.core.login/v3/login"
var resp CoreLoginResp
res, err := xc.Common.Request(url, http.MethodPost, func(req *resty.Request) {
req.SetHeader("User-Agent", "android-ok-http-client/xl-acc-sdk/version-5.0.9.509300")
req.SetBody(&CoreLoginRequest{
ProtocolVersion: "301",
SequenceNo: "1000010",
PlatformVersion: "10",
IsCompressed: "0",
Appid: APPID,
ClientVersion: xc.Common.ClientVersion,
PeerID: "00000000000000000000000000000000",
AppName: "ANDROID-com.xunlei.browser",
SdkVersion: "509300",
Devicesign: generateDeviceSign(xc.DeviceID, xc.PackageName),
NetWorkType: "WIFI",
ProviderName: "NONE",
DeviceModel: "M2004J7AC",
DeviceName: "Xiaomi_M2004j7ac",
OSVersion: "12",
Creditkey: xc.GetCreditKey(),
Hl: "zh-CN",
UserName: username,
PassWord: password,
VerifyKey: "",
VerifyCode: "",
IsMd5Pwd: "0",
})
}, nil)
if err != nil {
return "", err
}
if err = utils.Json.Unmarshal(res, &resp); err != nil {
return "", err
}
xc.SetCoreTokenResp(&resp)
sessionID = resp.SessionID
return sessionID, nil
}

View File

@ -25,21 +25,19 @@ type ExpertAddition struct {
SafePassword string `json:"safe_password" required:"true" help:"super safe password"` // 超级保险箱密码
// 签名方法1
Algorithms string `json:"algorithms" required:"true" help:"sign type is algorithms,this is required" default:"Cw4kArmKJ/aOiFTxnQ0ES+D4mbbrIUsFn,HIGg0Qfbpm5ThZ/RJfjoao4YwgT9/M,u/PUD,OlAm8tPkOF1qO5bXxRN2iFttuDldrg,FFIiM6sFhWhU7tIMVUKOF7CUv/KzgwwV8FE,yN,4m5mglrIHksI6wYdq,LXEfS7,T+p+C+F2yjgsUtiXWU/cMNYEtJI4pq7GofW,14BrGIEMXkbvFvZ49nDUfVCRcHYFOJ1BP1Y,kWIH3Row,RAmRTKNCjucPWC"`
Algorithms string `json:"algorithms" required:"true" help:"sign type is algorithms,this is required" default:"uWRwO7gPfdPB/0NfPtfQO+71,F93x+qPluYy6jdgNpq+lwdH1ap6WOM+nfz8/V,0HbpxvpXFsBK5CoTKam,dQhzbhzFRcawnsZqRETT9AuPAJ+wTQso82mRv,SAH98AmLZLRa6DB2u68sGhyiDh15guJpXhBzI,unqfo7Z64Rie9RNHMOB,7yxUdFADp3DOBvXdz0DPuKNVT35wqa5z0DEyEvf,RBG,ThTWPG5eC0UBqlbQ+04nZAptqGCdpv9o55A"`
// 签名方法2
CaptchaSign string `json:"captcha_sign" required:"true" help:"sign type is captcha_sign,this is required"`
Timestamp string `json:"timestamp" required:"true" help:"sign type is captcha_sign,this is required"`
// 验证码
CaptchaToken string `json:"captcha_token"`
// 信任密钥
CreditKey string `json:"credit_key" help:"credit key,used for login"`
// 必要且影响登录,由签名决定
DeviceID string `json:"device_id" required:"false" default:""`
ClientID string `json:"client_id" required:"true" default:"ZUBzD9J_XPXfn7f7"`
ClientSecret string `json:"client_secret" required:"true" default:"yESVmHecEe6F0aou69vl-g"`
ClientVersion string `json:"client_version" required:"true" default:"1.40.0.7208"`
ClientVersion string `json:"client_version" required:"true" default:"1.10.0.2633"`
PackageName string `json:"package_name" required:"true" default:"com.xunlei.browser"`
// 不影响登录,影响下载速度
@ -48,8 +46,6 @@ type ExpertAddition struct {
// 优先使用视频链接代替下载链接
UseVideoUrl bool `json:"use_video_url"`
// 离线下载是否使用 流畅播(Fluent Play)接口
UseFluentPlay bool `json:"use_fluent_play" default:"false" help:"use fluent play for offline download,only magnet links supported"`
// 移除方式
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
}
@ -83,11 +79,7 @@ type Addition struct {
Password string `json:"password" required:"true"`
SafePassword string `json:"safe_password" required:"true"` // 超级保险箱密码
CaptchaToken string `json:"captcha_token"`
CreditKey string `json:"credit_key" help:"credit key,used for login"` // 信任密钥
DeviceID string `json:"device_id" default:""` // 登录设备ID
UseVideoUrl bool `json:"use_video_url" default:"false"`
// 离线下载是否使用 流畅播(Fluent Play)接口
UseFluentPlay bool `json:"use_fluent_play" default:"false" help:"use fluent play for offline download,only magnet links supported"`
RemoveWay string `json:"remove_way" required:"true" type:"select" options:"trash,delete"`
}

View File

@ -18,10 +18,6 @@ type ErrResp struct {
}
func (e *ErrResp) IsError() bool {
if e.ErrorMsg == "success" {
return false
}
return e.ErrorCode != 0 || e.ErrorMsg != "" || e.ErrorDescription != ""
}
@ -72,78 +68,13 @@ func (t *TokenResp) GetSpaceToken() string {
}
type SignInRequest struct {
CaptchaToken string `json:"captcha_token"`
ClientID string `json:"client_id"`
ClientSecret string `json:"client_secret"`
Provider string `json:"provider"`
SigninToken string `json:"signin_token"`
}
type CoreLoginRequest struct {
ProtocolVersion string `json:"protocolVersion"`
SequenceNo string `json:"sequenceNo"`
PlatformVersion string `json:"platformVersion"`
IsCompressed string `json:"isCompressed"`
Appid string `json:"appid"`
ClientVersion string `json:"clientVersion"`
PeerID string `json:"peerID"`
AppName string `json:"appName"`
SdkVersion string `json:"sdkVersion"`
Devicesign string `json:"devicesign"`
NetWorkType string `json:"netWorkType"`
ProviderName string `json:"providerName"`
DeviceModel string `json:"deviceModel"`
DeviceName string `json:"deviceName"`
OSVersion string `json:"OSVersion"`
Creditkey string `json:"creditkey"`
Hl string `json:"hl"`
UserName string `json:"userName"`
PassWord string `json:"passWord"`
VerifyKey string `json:"verifyKey"`
VerifyCode string `json:"verifyCode"`
IsMd5Pwd string `json:"isMd5Pwd"`
}
type CoreLoginResp struct {
Account string `json:"account"`
Creditkey string `json:"creditkey"`
/* Error string `json:"error"`
ErrorCode string `json:"errorCode"`
ErrorDescription string `json:"error_description"`*/
ExpiresIn int `json:"expires_in"`
IsCompressed string `json:"isCompressed"`
IsSetPassWord string `json:"isSetPassWord"`
KeepAliveMinPeriod string `json:"keepAliveMinPeriod"`
KeepAlivePeriod string `json:"keepAlivePeriod"`
LoginKey string `json:"loginKey"`
NickName string `json:"nickName"`
PlatformVersion string `json:"platformVersion"`
ProtocolVersion string `json:"protocolVersion"`
SecureKey string `json:"secureKey"`
SequenceNo string `json:"sequenceNo"`
SessionID string `json:"sessionID"`
Timestamp string `json:"timestamp"`
UserID string `json:"userID"`
UserName string `json:"userName"`
UserNewNo string `json:"userNewNo"`
Version string `json:"version"`
/* VipList []struct {
ExpireDate string `json:"expireDate"`
IsAutoDeduct string `json:"isAutoDeduct"`
IsVip string `json:"isVip"`
IsYear string `json:"isYear"`
PayID string `json:"payId"`
PayName string `json:"payName"`
Register string `json:"register"`
Vasid string `json:"vasid"`
VasType string `json:"vasType"`
VipDayGrow string `json:"vipDayGrow"`
VipGrow string `json:"vipGrow"`
VipLevel string `json:"vipLevel"`
Icon struct {
General string `json:"general"`
Small string `json:"small"`
} `json:"icon"`
} `json:"vipList"`*/
Username string `json:"username"`
Password string `json:"password"`
}
/*
@ -303,76 +234,3 @@ type UploadTaskResponse struct {
File Files `json:"file"`
}
// OfflineDownloadResp 离线下载响应
type OfflineDownloadResp struct {
File *string `json:"file"`
Task OfflineTask `json:"task"`
UploadType string `json:"upload_type"`
URL struct {
Kind string `json:"kind"`
} `json:"url"`
}
// OfflineListResp 离线下载列表响应
type OfflineListResp struct {
ExpiresIn int64 `json:"expires_in"`
NextPageToken string `json:"next_page_token"`
Tasks []OfflineTask `json:"tasks"`
}
// OfflineTask 离线下载任务响应
type OfflineTask struct {
Callback string `json:"callback"`
CreatedTime string `json:"created_time"`
FileID string `json:"file_id"`
FileName string `json:"file_name"`
FileSize string `json:"file_size"`
IconLink string `json:"icon_link"`
ID string `json:"id"`
Kind string `json:"kind"`
Message string `json:"message"`
Name string `json:"name"`
Params Params `json:"params"`
Phase string `json:"phase"` // PHASE_TYPE_RUNNING, PHASE_TYPE_ERROR, PHASE_TYPE_COMPLETE, PHASE_TYPE_PENDING
Progress int64 `json:"progress"`
Space string `json:"space"`
StatusSize int64 `json:"status_size"`
Statuses []string `json:"statuses"`
ThirdTaskID string `json:"third_task_id"`
Type string `json:"type"`
UpdatedTime string `json:"updated_time"`
UserID string `json:"user_id"`
}
type Params struct {
FolderType string `json:"folder_type"`
PredictSpeed string `json:"predict_speed"`
PredictType string `json:"predict_type"`
}
// LoginReviewResp 登录验证响应
type LoginReviewResp struct {
Creditkey string `json:"creditkey"`
Error string `json:"error"`
ErrorCode string `json:"errorCode"`
ErrorDesc string `json:"errorDesc"`
ErrorDescURL string `json:"errorDescUrl"`
ErrorIsRetry int `json:"errorIsRetry"`
ErrorDescription string `json:"error_description"`
IsCompressed string `json:"isCompressed"`
PlatformVersion string `json:"platformVersion"`
ProtocolVersion string `json:"protocolVersion"`
Reviewurl string `json:"reviewurl"`
SequenceNo string `json:"sequenceNo"`
UserID string `json:"userID"`
VerifyType string `json:"verifyType"`
}
// ReviewData 验证数据
type ReviewData struct {
Creditkey string `json:"creditkey"`
Reviewurl string `json:"reviewurl"`
Deviceid string `json:"deviceid"`
Devicesign string `json:"devicesign"`
}

View File

@ -4,7 +4,6 @@ import (
"crypto/md5"
"crypto/sha1"
"encoding/hex"
"encoding/json"
"fmt"
"io"
"net/http"
@ -20,33 +19,28 @@ import (
const (
API_URL = "https://x-api-pan.xunlei.com/drive/v1"
FILE_API_URL = API_URL + "/files"
TASK_API_URL = API_URL + "/tasks"
XLUSER_API_BASE_URL = "https://xluser-ssl.xunlei.com"
XLUSER_API_URL = XLUSER_API_BASE_URL + "/v1"
XLUSER_API_URL = "https://xluser-ssl.xunlei.com/v1"
)
var Algorithms = []string{
"Cw4kArmKJ/aOiFTxnQ0ES+D4mbbrIUsFn",
"HIGg0Qfbpm5ThZ/RJfjoao4YwgT9/M",
"u/PUD",
"OlAm8tPkOF1qO5bXxRN2iFttuDldrg",
"FFIiM6sFhWhU7tIMVUKOF7CUv/KzgwwV8FE",
"yN",
"4m5mglrIHksI6wYdq",
"LXEfS7",
"T+p+C+F2yjgsUtiXWU/cMNYEtJI4pq7GofW",
"14BrGIEMXkbvFvZ49nDUfVCRcHYFOJ1BP1Y",
"kWIH3Row",
"RAmRTKNCjucPWC",
"uWRwO7gPfdPB/0NfPtfQO+71",
"F93x+qPluYy6jdgNpq+lwdH1ap6WOM+nfz8/V",
"0HbpxvpXFsBK5CoTKam",
"dQhzbhzFRcawnsZqRETT9AuPAJ+wTQso82mRv",
"SAH98AmLZLRa6DB2u68sGhyiDh15guJpXhBzI",
"unqfo7Z64Rie9RNHMOB",
"7yxUdFADp3DOBvXdz0DPuKNVT35wqa5z0DEyEvf",
"RBG",
"ThTWPG5eC0UBqlbQ+04nZAptqGCdpv9o55A",
}
const (
ClientID = "ZUBzD9J_XPXfn7f7"
ClientSecret = "yESVmHecEe6F0aou69vl-g"
ClientVersion = "1.40.0.7208"
ClientVersion = "1.10.0.2633"
PackageName = "com.xunlei.browser"
DownloadUserAgent = "AndroidDownloadManager/13 (Linux; U; Android 13; M2004J7AC Build/SP1A.210812.016)"
SdkVersion = "509300"
SdkVersion = "233100"
)
const (
@ -69,13 +63,6 @@ const (
ThunderBrowserDriveSafeSpace = "SPACE_BROWSER_SAFE"
ThunderDriveFolderType = "DEFAULT_ROOT"
ThunderBrowserDriveSafeFolderType = "BROWSER_SAFE"
ThunderBrowserDriveFluentPlayFolderType = "SPACE_FAVORITE" // 流畅播文件夹标识
)
const (
SignProvider = "access_end_point_token"
APPID = "22062"
APPKey = "a5d7416858147a4ab99573872ffccef8"
)
func GetAction(method string, url string) string {
@ -88,8 +75,6 @@ type Common struct {
captchaToken string
creditKey string
// 签名相关,二选一
Algorithms []string
Timestamp, CaptchaSign string
@ -103,7 +88,6 @@ type Common struct {
UserAgent string
DownloadUserAgent string
UseVideoUrl bool
UseFluentPlay bool
RemoveWay string
// 验证码token刷新成功回调
@ -121,13 +105,6 @@ func (c *Common) GetCaptchaToken() string {
return c.captchaToken
}
func (c *Common) SetCreditKey(creditKey string) {
c.creditKey = creditKey
}
func (c *Common) GetCreditKey() string {
return c.creditKey
}
// RefreshCaptchaTokenAtLogin 刷新验证码token(登录后)
func (c *Common) RefreshCaptchaTokenAtLogin(action, userID string) error {
metas := map[string]string{
@ -229,53 +206,12 @@ func (c *Common) Request(url, method string, callback base.ReqCallback, resp int
var erron ErrResp
utils.Json.Unmarshal(res.Body(), &erron)
if erron.IsError() {
// review_panel 表示需要短信验证码进行验证
if erron.ErrorMsg == "review_panel" {
return nil, c.getReviewData(res)
}
return nil, &erron
}
return res.Body(), nil
}
// 获取验证所需内容
func (c *Common) getReviewData(res *resty.Response) error {
var reviewResp LoginReviewResp
var reviewData ReviewData
if err := utils.Json.Unmarshal(res.Body(), &reviewResp); err != nil {
return err
}
deviceSign := generateDeviceSign(c.DeviceID, c.PackageName)
reviewData = ReviewData{
Creditkey: reviewResp.Creditkey,
Reviewurl: reviewResp.Reviewurl + "&deviceid=" + deviceSign,
Deviceid: deviceSign,
Devicesign: deviceSign,
}
// 将reviewData转为JSON字符串
reviewDataJSON, _ := json.MarshalIndent(reviewData, "", " ")
//reviewDataJSON, _ := json.Marshal(reviewData)
return fmt.Errorf(`
<div style="font-family: Arial, sans-serif; padding: 15px; border-radius: 5px; border: 1px solid #e0e0e0;>
<h3 style="color: #d9534f; margin-top: 0;">
<span style="font-size: 16px;">🔒 本次登录需要验证</span><br>
<span style="font-size: 14px; font-weight: normal; color: #666;">This login requires verification</span>
</h3>
<p style="font-size: 14px; margin-bottom: 15px;">下面是验证所需要的数据,具体使用方法请参照对应的驱动文档<br>
<span style="color: #666; font-size: 13px;">Below are the relevant verification data. For specific usage methods, please refer to the corresponding driver documentation.</span></p>
<div style="border: 1px solid #ddd; border-radius: 4px; padding: 10px; overflow-x: auto; font-family: 'Courier New', monospace; font-size: 13px;">
<pre style="margin: 0; white-space: pre-wrap;"><code>%s</code></pre>
</div>
</div>`, string(reviewDataJSON))
}
// 计算文件Gcid
func getGcid(r io.Reader, size int64) (string, error) {
calcBlockSize := func(j int64) int64 {
@ -338,7 +274,7 @@ func EncryptPassword(password string) string {
func generateDeviceSign(deviceID, packageName string) string {
signatureBase := fmt.Sprintf("%s%s%s%s", deviceID, packageName, APPID, APPKey)
signatureBase := fmt.Sprintf("%s%s%s%s", deviceID, packageName, "22062", "a5d7416858147a4ab99573872ffccef8")
sha1Hash := sha1.New()
sha1Hash.Write([]byte(signatureBase))
@ -363,7 +299,7 @@ func BuildCustomUserAgent(deviceID, appName, sdkVersion, clientVersion, packageN
sb.WriteString(fmt.Sprintf("ANDROID-%s/%s ", appName, clientVersion))
sb.WriteString("networkType/WIFI ")
sb.WriteString(fmt.Sprintf("appid/%s ", APPID))
sb.WriteString(fmt.Sprintf("appid/%s ", "22062"))
sb.WriteString(fmt.Sprintf("deviceName/Xiaomi_M2004j7ac "))
sb.WriteString(fmt.Sprintf("deviceModel/M2004J7AC "))
sb.WriteString(fmt.Sprintf("OSVersion/13 "))

View File

@ -125,10 +125,10 @@ func InitialSettings() []model.SettingItem {
"Google":"https://docs.google.com/gview?url=$e_url&embedded=true"
},
"pdf": {
"PDF.js":"https://res.oplist.org/pdf.js/web/viewer.html?file=$e_url"
"PDF.js":"//res.oplist.org/pdf.js/web/viewer.html?url=$e_url"
},
"epub": {
"EPUB.js":"https://res.oplist.org/epub.js/viewer.html?url=$e_url"
"EPUB.js":"//res.oplist.org/epub.js/viewer.html?url=$e_url"
}
}`, Type: conf.TypeText, Group: model.PREVIEW},
// {Key: conf.OfficeViewers, Value: `{

View File

@ -69,9 +69,6 @@ const (
// thunder
ThunderTempDir = "thunder_temp_dir"
// thunder_browser
ThunderBrowserTempDir = "thunder_browser_temp_dir"
// single
Token = "token"
IndexProgress = "index_progress"

View File

@ -82,14 +82,6 @@ func MoveWithTask(ctx context.Context, srcPath, dstDirPath string, lazyCache ...
return res, err
}
func MoveWithTaskAndValidation(ctx context.Context, srcPath, dstDirPath string, validateExistence bool, lazyCache ...bool) (task.TaskExtensionInfo, error) {
res, err := _moveWithValidation(ctx, srcPath, dstDirPath, validateExistence, lazyCache...)
if err != nil {
log.Errorf("failed move %s to %s: %+v", srcPath, dstDirPath, err)
}
return res, err
}
func Copy(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (task.TaskExtensionInfo, error) {
res, err := _copy(ctx, srcObjPath, dstDirPath, lazyCache...)
if err != nil {

View File

@ -3,16 +3,13 @@ package fs
import (
"context"
"fmt"
"net/http"
stdpath "path"
"sync"
"time"
"github.com/OpenListTeam/OpenList/internal/driver"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/internal/op"
"github.com/OpenListTeam/OpenList/internal/stream"
"github.com/OpenListTeam/OpenList/internal/task"
"github.com/OpenListTeam/OpenList/pkg/utils"
"github.com/pkg/errors"
@ -28,94 +25,21 @@ type MoveTask struct {
dstStorage driver.Driver `json:"-"`
SrcStorageMp string `json:"src_storage_mp"`
DstStorageMp string `json:"dst_storage_mp"`
IsRootTask bool `json:"is_root_task"`
RootTaskID string `json:"root_task_id"`
TotalFiles int `json:"total_files"`
CompletedFiles int `json:"completed_files"`
Phase string `json:"phase"` // "copying", "verifying", "deleting", "completed"
ValidateExistence bool `json:"validate_existence"`
mu sync.RWMutex `json:"-"`
}
type MoveProgress struct {
TaskID string `json:"task_id"`
Phase string `json:"phase"`
TotalFiles int `json:"total_files"`
CompletedFiles int `json:"completed_files"`
CurrentFile string `json:"current_file"`
Status string `json:"status"`
Progress int `json:"progress"`
}
var moveProgressMap = sync.Map{}
func (t *MoveTask) GetName() string {
return fmt.Sprintf("move [%s](%s) to [%s](%s)", t.SrcStorageMp, t.SrcObjPath, t.DstStorageMp, t.DstDirPath)
}
func (t *MoveTask) GetStatus() string {
t.mu.RLock()
defer t.mu.RUnlock()
return t.Status
}
func (t *MoveTask) GetProgress() float64 {
t.mu.RLock()
defer t.mu.RUnlock()
if t.TotalFiles == 0 {
return 0
}
switch t.Phase {
case "copying":
return float64(t.CompletedFiles*60) / float64(t.TotalFiles)
case "verifying":
return 60 + float64(t.CompletedFiles*20)/float64(t.TotalFiles)
case "deleting":
return 80 + float64(t.CompletedFiles*20)/float64(t.TotalFiles)
case "completed":
return 100
default:
return 0
}
}
func (t *MoveTask) GetMoveProgress() *MoveProgress {
t.mu.RLock()
defer t.mu.RUnlock()
progress := int(t.GetProgress())
return &MoveProgress{
TaskID: t.GetID(),
Phase: t.Phase,
TotalFiles: t.TotalFiles,
CompletedFiles: t.CompletedFiles,
CurrentFile: t.SrcObjPath,
Status: t.Status,
Progress: progress,
}
}
func (t *MoveTask) updateProgress() {
if t.IsRootTask {
progress := t.GetMoveProgress()
moveProgressMap.Store(t.GetID(), progress)
}
}
func (t *MoveTask) Run() error {
t.ReinitCtx()
t.ClearEndTime()
t.SetStartTime(time.Now())
defer func() {
t.SetEndTime(time.Now())
if t.IsRootTask {
moveProgressMap.Delete(t.GetID())
}
}()
defer func() { t.SetEndTime(time.Now()) }()
var err error
if t.srcStorage == nil {
t.srcStorage, err = op.GetStorageByMountPath(t.SrcStorageMp)
@ -127,337 +51,11 @@ func (t *MoveTask) Run() error {
return errors.WithMessage(err, "failed get storage")
}
// Phase 1: Async validation (all validation happens in background)
t.mu.Lock()
t.Status = "validating source and destination"
t.mu.Unlock()
// Check if source exists
srcObj, err := op.Get(t.Ctx(), t.srcStorage, t.SrcObjPath)
if err != nil {
return errors.WithMessagef(err, "source file [%s] not found", stdpath.Base(t.SrcObjPath))
}
// Check if destination already exists (if validation is required)
if t.ValidateExistence {
dstFilePath := stdpath.Join(t.DstDirPath, srcObj.GetName())
if res, _ := op.Get(t.Ctx(), t.dstStorage, dstFilePath); res != nil {
return errors.Errorf("destination file [%s] already exists", srcObj.GetName())
}
}
// Phase 2: Execute move operation with proper sequencing
// Determine if we should use batch optimization for directories
if srcObj.IsDir() {
t.mu.Lock()
t.IsRootTask = true
t.RootTaskID = t.GetID()
t.mu.Unlock()
return t.runRootMoveTask()
}
// Use safe move logic for files
return t.safeMoveOperation(srcObj)
}
func (t *MoveTask) runRootMoveTask() error {
// First check if source is actually a directory
// If not, fall back to regular move logic
srcObj, err := op.Get(t.Ctx(), t.srcStorage, t.SrcObjPath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] object", t.SrcObjPath)
}
if !srcObj.IsDir() {
// Source is not a directory, use regular move logic
t.mu.Lock()
t.IsRootTask = false
t.mu.Unlock()
return t.safeMoveOperation(srcObj)
}
// Phase 1: Count total files and create directory structure
t.mu.Lock()
t.Phase = "preparing"
t.Status = "counting files and preparing directory structure"
t.mu.Unlock()
t.updateProgress()
totalFiles, err := t.countFilesAndCreateDirs(t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
if err != nil {
return errors.WithMessage(err, "failed to prepare directory structure")
}
t.mu.Lock()
t.TotalFiles = totalFiles
t.Phase = "copying"
t.Status = "copying files"
t.mu.Unlock()
t.updateProgress()
// Phase 2: Copy all files
err = t.copyAllFiles(t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
if err != nil {
return errors.WithMessage(err, "failed to copy files")
}
// Phase 3: Verify directory structure
t.mu.Lock()
t.Phase = "verifying"
t.Status = "verifying copied files"
t.CompletedFiles = 0
t.mu.Unlock()
t.updateProgress()
err = t.verifyDirectoryStructure(t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
if err != nil {
return errors.WithMessage(err, "verification failed")
}
// Phase 4: Delete source files and directories
t.mu.Lock()
t.Phase = "deleting"
t.Status = "deleting source files"
t.CompletedFiles = 0
t.mu.Unlock()
t.updateProgress()
err = t.deleteSourceRecursively(t.srcStorage, t.SrcObjPath)
if err != nil {
return errors.WithMessage(err, "failed to delete source files")
}
t.mu.Lock()
t.Phase = "completed"
t.Status = "completed"
t.mu.Unlock()
t.updateProgress()
return nil
return moveBetween2Storages(t, t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
}
var MoveTaskManager *tache.Manager[*MoveTask]
// GetMoveProgress returns the progress of a move task by task ID
func GetMoveProgress(taskID string) (*MoveProgress, bool) {
if progress, ok := moveProgressMap.Load(taskID); ok {
return progress.(*MoveProgress), true
}
return nil, false
}
// GetMoveTaskProgress returns the progress of a specific move task
func GetMoveTaskProgress(task *MoveTask) *MoveProgress {
return task.GetMoveProgress()
}
// countFilesAndCreateDirs recursively counts files and creates directory structure
func (t *MoveTask) countFilesAndCreateDirs(srcStorage, dstStorage driver.Driver, srcPath, dstPath string) (int, error) {
srcObj, err := op.Get(t.Ctx(), srcStorage, srcPath)
if err != nil {
return 0, errors.WithMessagef(err, "failed get src [%s] object", srcPath)
}
if !srcObj.IsDir() {
return 1, nil
}
// Create destination directory
dstObjPath := stdpath.Join(dstPath, srcObj.GetName())
err = op.MakeDir(t.Ctx(), dstStorage, dstObjPath)
if err != nil {
if errors.Is(err, errs.UploadNotSupported) {
return 0, errors.WithMessagef(err, "destination storage [%s] does not support creating directories", dstStorage.GetStorage().MountPath)
}
return 0, errors.WithMessagef(err, "failed to create destination directory [%s] in storage [%s]", dstObjPath, dstStorage.GetStorage().MountPath)
}
// List and count files recursively
objs, err := op.List(t.Ctx(), srcStorage, srcPath, model.ListArgs{})
if err != nil {
return 0, errors.WithMessagef(err, "failed list src [%s] objs", srcPath)
}
totalFiles := 0
for _, obj := range objs {
if utils.IsCanceled(t.Ctx()) {
return 0, nil
}
srcSubPath := stdpath.Join(srcPath, obj.GetName())
subCount, err := t.countFilesAndCreateDirs(srcStorage, dstStorage, srcSubPath, dstObjPath)
if err != nil {
return 0, err
}
totalFiles += subCount
}
return totalFiles, nil
}
// copyAllFiles recursively copies all files
func (t *MoveTask) copyAllFiles(srcStorage, dstStorage driver.Driver, srcPath, dstPath string) error {
srcObj, err := op.Get(t.Ctx(), srcStorage, srcPath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] object", srcPath)
}
if !srcObj.IsDir() {
// Copy single file
err := t.copyFile(srcStorage, dstStorage, srcPath, dstPath)
if err != nil {
return err
}
t.mu.Lock()
t.CompletedFiles++
t.mu.Unlock()
t.updateProgress()
return nil
}
// Copy directory contents
objs, err := op.List(t.Ctx(), srcStorage, srcPath, model.ListArgs{})
if err != nil {
return errors.WithMessagef(err, "failed list src [%s] objs", srcPath)
}
dstObjPath := stdpath.Join(dstPath, srcObj.GetName())
for _, obj := range objs {
if utils.IsCanceled(t.Ctx()) {
return nil
}
srcSubPath := stdpath.Join(srcPath, obj.GetName())
err := t.copyAllFiles(srcStorage, dstStorage, srcSubPath, dstObjPath)
if err != nil {
return err
}
}
return nil
}
// copyFile copies a single file between storages
func (t *MoveTask) copyFile(srcStorage, dstStorage driver.Driver, srcFilePath, dstDirPath string) error {
srcFile, err := op.Get(t.Ctx(), srcStorage, srcFilePath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] file", srcFilePath)
}
link, _, err := op.Link(t.Ctx(), srcStorage, srcFilePath, model.LinkArgs{
Header: http.Header{},
})
if err != nil {
return errors.WithMessagef(err, "failed get [%s] link", srcFilePath)
}
fs := stream.FileStream{
Obj: srcFile,
Ctx: t.Ctx(),
}
ss, err := stream.NewSeekableStream(fs, link)
if err != nil {
return errors.WithMessagef(err, "failed get [%s] stream", srcFilePath)
}
return op.Put(t.Ctx(), dstStorage, dstDirPath, ss, nil, true)
}
// verifyDirectoryStructure compares source and destination directory structures
func (t *MoveTask) verifyDirectoryStructure(srcStorage, dstStorage driver.Driver, srcPath, dstPath string) error {
srcObj, err := op.Get(t.Ctx(), srcStorage, srcPath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] object", srcPath)
}
if !srcObj.IsDir() {
// Verify single file
dstFilePath := stdpath.Join(dstPath, srcObj.GetName())
_, err := op.Get(t.Ctx(), dstStorage, dstFilePath)
if err != nil {
return errors.WithMessagef(err, "verification failed: destination file [%s] not found", dstFilePath)
}
t.mu.Lock()
t.CompletedFiles++
t.mu.Unlock()
t.updateProgress()
return nil
}
// Verify directory
dstObjPath := stdpath.Join(dstPath, srcObj.GetName())
_, err = op.Get(t.Ctx(), dstStorage, dstObjPath)
if err != nil {
return errors.WithMessagef(err, "verification failed: destination directory [%s] not found", dstObjPath)
}
// Verify directory contents
srcObjs, err := op.List(t.Ctx(), srcStorage, srcPath, model.ListArgs{})
if err != nil {
return errors.WithMessagef(err, "failed list src [%s] objs for verification", srcPath)
}
for _, obj := range srcObjs {
if utils.IsCanceled(t.Ctx()) {
return nil
}
srcSubPath := stdpath.Join(srcPath, obj.GetName())
err := t.verifyDirectoryStructure(srcStorage, dstStorage, srcSubPath, dstObjPath)
if err != nil {
return err
}
}
return nil
}
// deleteSourceRecursively deletes source files and directories recursively
func (t *MoveTask) deleteSourceRecursively(srcStorage driver.Driver, srcPath string) error {
srcObj, err := op.Get(t.Ctx(), srcStorage, srcPath)
if err != nil {
return errors.WithMessagef(err, "failed get src [%s] object for deletion", srcPath)
}
if !srcObj.IsDir() {
// Delete single file
err := op.Remove(t.Ctx(), srcStorage, srcPath)
if err != nil {
return errors.WithMessagef(err, "failed to delete src [%s] file", srcPath)
}
t.mu.Lock()
t.CompletedFiles++
t.mu.Unlock()
t.updateProgress()
return nil
}
// Delete directory contents first
objs, err := op.List(t.Ctx(), srcStorage, srcPath, model.ListArgs{})
if err != nil {
return errors.WithMessagef(err, "failed list src [%s] objs for deletion", srcPath)
}
for _, obj := range objs {
if utils.IsCanceled(t.Ctx()) {
return nil
}
srcSubPath := stdpath.Join(srcPath, obj.GetName())
err := t.deleteSourceRecursively(srcStorage, srcSubPath)
if err != nil {
return err
}
}
// Delete the directory itself
err = op.Remove(t.Ctx(), srcStorage, srcPath)
if err != nil {
return errors.WithMessagef(err, "failed to delete src [%s] directory", srcPath)
}
return nil
}
func moveBetween2Storages(t *MoveTask, srcStorage, dstStorage driver.Driver, srcObjPath, dstDirPath string) error {
t.Status = "getting src object"
@ -558,22 +156,7 @@ func moveFileBetween2Storages(tsk *MoveTask, srcStorage, dstStorage driver.Drive
}
// safeMoveOperation ensures copy-then-delete sequence for safe move operations
func (t *MoveTask) safeMoveOperation(srcObj model.Obj) error {
if srcObj.IsDir() {
// For directories, use the original logic but ensure proper sequencing
return moveBetween2Storages(t, t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
} else {
// For files, use the safe file move logic
return moveFileBetween2Storages(t, t.srcStorage, t.dstStorage, t.SrcObjPath, t.DstDirPath)
}
}
func _move(ctx context.Context, srcObjPath, dstDirPath string, lazyCache ...bool) (task.TaskExtensionInfo, error) {
return _moveWithValidation(ctx, srcObjPath, dstDirPath, false, lazyCache...)
}
func _moveWithValidation(ctx context.Context, srcObjPath, dstDirPath string, validateExistence bool, lazyCache ...bool) (task.TaskExtensionInfo, error) {
srcStorage, srcObjActualPath, err := op.GetStorageAndActualPath(srcObjPath)
if err != nil {
return nil, errors.WithMessage(err, "failed get src storage")
@ -583,7 +166,6 @@ func _moveWithValidation(ctx context.Context, srcObjPath, dstDirPath string, val
return nil, errors.WithMessage(err, "failed get dst storage")
}
// Try native move first if in the same storage
if srcStorage.GetStorage() == dstStorage.GetStorage() {
err = op.Move(ctx, srcStorage, srcObjActualPath, dstDirActualPath, lazyCache...)
if !errors.Is(err, errs.NotImplement) && !errors.Is(err, errs.NotSupport) {
@ -592,9 +174,6 @@ func _moveWithValidation(ctx context.Context, srcObjPath, dstDirPath string, val
}
taskCreator, _ := ctx.Value("user").(*model.User)
// Create task immediately without any synchronous checks to avoid blocking frontend
// All validation and type checking will be done asynchronously in the Run method
t := &MoveTask{
TaskExtension: task.TaskExtension{
Creator: taskCreator,
@ -605,10 +184,7 @@ func _moveWithValidation(ctx context.Context, srcObjPath, dstDirPath string, val
DstDirPath: dstDirActualPath,
SrcStorageMp: srcStorage.GetStorage().MountPath,
DstStorageMp: dstStorage.GetStorage().MountPath,
ValidateExistence: validateExistence,
Phase: "initializing",
}
MoveTaskManager.Add(t)
return t, nil
}

View File

@ -165,10 +165,6 @@ func (d *downloader) download() (io.ReadCloser, error) {
if maxPart < d.cfg.Concurrency {
d.cfg.Concurrency = maxPart
}
if d.params.Range.Length == 0 {
d.cfg.Concurrency = 1
}
log.Debugf("cfgConcurrency:%d", d.cfg.Concurrency)
if d.cfg.Concurrency == 1 {

View File

@ -7,6 +7,5 @@ import (
_ "github.com/OpenListTeam/OpenList/internal/offline_download/pikpak"
_ "github.com/OpenListTeam/OpenList/internal/offline_download/qbit"
_ "github.com/OpenListTeam/OpenList/internal/offline_download/thunder"
_ "github.com/OpenListTeam/OpenList/internal/offline_download/thunder_browser"
_ "github.com/OpenListTeam/OpenList/internal/offline_download/transmission"
)

View File

@ -1,171 +0,0 @@
package thunder_browser
import (
"context"
"errors"
"fmt"
"github.com/OpenListTeam/OpenList/drivers/thunder_browser"
"github.com/OpenListTeam/OpenList/internal/conf"
"github.com/OpenListTeam/OpenList/internal/setting"
"strconv"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/internal/offline_download/tool"
"github.com/OpenListTeam/OpenList/internal/op"
)
type ThunderBrowser struct {
refreshTaskCache bool
}
func (t *ThunderBrowser) Name() string {
return "ThunderBrowser"
}
func (t *ThunderBrowser) Items() []model.SettingItem {
return nil
}
func (t *ThunderBrowser) Run(task *tool.DownloadTask) error {
return errs.NotSupport
}
func (t *ThunderBrowser) Init() (string, error) {
t.refreshTaskCache = false
return "ok", nil
}
func (t *ThunderBrowser) IsReady() bool {
tempDir := setting.GetStr(conf.ThunderBrowserTempDir)
if tempDir == "" {
return false
}
storage, _, err := op.GetStorageAndActualPath(tempDir)
if err != nil {
return false
}
switch storage.(type) {
case *thunder_browser.ThunderBrowser, *thunder_browser.ThunderBrowserExpert:
return true
default:
return false
}
}
func (t *ThunderBrowser) AddURL(args *tool.AddUrlArgs) (string, error) {
// 添加新任务刷新缓存
t.refreshTaskCache = true
storage, actualPath, err := op.GetStorageAndActualPath(args.TempDir)
if err != nil {
return "", err
}
ctx := context.Background()
if err := op.MakeDir(ctx, storage, actualPath); err != nil {
return "", err
}
parentDir, err := op.GetUnwrap(ctx, storage, actualPath)
if err != nil {
return "", err
}
var task *thunder_browser.OfflineTask
switch v := storage.(type) {
case *thunder_browser.ThunderBrowser:
task, err = v.OfflineDownload(ctx, args.Url, parentDir, "")
case *thunder_browser.ThunderBrowserExpert:
task, err = v.OfflineDownload(ctx, args.Url, parentDir, "")
default:
return "", fmt.Errorf("unsupported storage driver for offline download, only ThunderBrowser is supported")
}
if err != nil {
return "", fmt.Errorf("failed to add offline download task: %w", err)
}
if task == nil {
return "", fmt.Errorf("failed to add offline download task: task is nil")
}
return task.ID, nil
}
func (t *ThunderBrowser) Remove(task *tool.DownloadTask) error {
storage, _, err := op.GetStorageAndActualPath(task.TempDir)
if err != nil {
return err
}
ctx := context.Background()
switch v := storage.(type) {
case *thunder_browser.ThunderBrowser:
err = v.DeleteOfflineTasks(ctx, []string{task.GID})
case *thunder_browser.ThunderBrowserExpert:
err = v.DeleteOfflineTasks(ctx, []string{task.GID})
default:
return fmt.Errorf("unsupported storage driver for offline download, only ThunderBrowser is supported")
}
if err != nil {
return err
}
return nil
}
func (t *ThunderBrowser) Status(task *tool.DownloadTask) (*tool.Status, error) {
storage, _, err := op.GetStorageAndActualPath(task.TempDir)
if err != nil {
return nil, err
}
var tasks []thunder_browser.OfflineTask
switch v := storage.(type) {
case *thunder_browser.ThunderBrowser:
tasks, err = t.GetTasks(v)
case *thunder_browser.ThunderBrowserExpert:
tasks, err = t.GetTasksExpert(v)
default:
return nil, fmt.Errorf("unsupported storage driver for offline download, only ThunderBrowser is supported")
}
if err != nil {
return nil, err
}
s := &tool.Status{
Progress: 0,
NewGID: "",
Completed: false,
Status: "the task has been deleted",
Err: nil,
}
for _, t := range tasks {
if t.ID == task.GID {
s.Progress = float64(t.Progress)
s.Status = t.Message
s.Completed = t.Phase == "PHASE_TYPE_COMPLETE"
s.TotalBytes, err = strconv.ParseInt(t.FileSize, 10, 64)
if err != nil {
s.TotalBytes = 0
}
if t.Phase == "PHASE_TYPE_ERROR" {
s.Err = errors.New(t.Message)
}
return s, nil
}
}
s.Err = fmt.Errorf("the task has been deleted")
return s, nil
}
func init() {
tool.Tools.Add(&ThunderBrowser{})
}

View File

@ -1,70 +0,0 @@
package thunder_browser
import (
"context"
"time"
"github.com/OpenListTeam/OpenList/drivers/thunder_browser"
"github.com/OpenListTeam/OpenList/internal/op"
"github.com/OpenListTeam/OpenList/pkg/singleflight"
"github.com/Xhofe/go-cache"
)
var taskCache = cache.NewMemCache(cache.WithShards[[]thunder_browser.OfflineTask](16))
var taskG singleflight.Group[[]thunder_browser.OfflineTask]
func (t *ThunderBrowser) GetTasks(thunderDriver *thunder_browser.ThunderBrowser) ([]thunder_browser.OfflineTask, error) {
key := op.Key(thunderDriver, "/drive/v1/task")
if !t.refreshTaskCache {
if tasks, ok := taskCache.Get(key); ok {
return tasks, nil
}
}
t.refreshTaskCache = false
tasks, err, _ := taskG.Do(key, func() ([]thunder_browser.OfflineTask, error) {
ctx := context.Background()
tasks, err := thunderDriver.OfflineList(ctx, "")
if err != nil {
return nil, err
}
// 添加缓存 10s
if len(tasks) > 0 {
taskCache.Set(key, tasks, cache.WithEx[[]thunder_browser.OfflineTask](time.Second*10))
} else {
taskCache.Del(key)
}
return tasks, nil
})
if err != nil {
return nil, err
}
return tasks, nil
}
func (t *ThunderBrowser) GetTasksExpert(thunderDriver *thunder_browser.ThunderBrowserExpert) ([]thunder_browser.OfflineTask, error) {
key := op.Key(thunderDriver, "/drive/v1/task")
if !t.refreshTaskCache {
if tasks, ok := taskCache.Get(key); ok {
return tasks, nil
}
}
t.refreshTaskCache = false
tasks, err, _ := taskG.Do(key, func() ([]thunder_browser.OfflineTask, error) {
ctx := context.Background()
tasks, err := thunderDriver.OfflineList(ctx, "")
if err != nil {
return nil, err
}
// 添加缓存 10s
if len(tasks) > 0 {
taskCache.Set(key, tasks, cache.WithEx[[]thunder_browser.OfflineTask](time.Second*10))
} else {
taskCache.Del(key)
}
return tasks, nil
})
if err != nil {
return nil, err
}
return tasks, nil
}

View File

@ -2,7 +2,6 @@ package tool
import (
"context"
"net/url"
stdpath "path"
"path/filepath"
@ -10,7 +9,6 @@ import (
_115 "github.com/OpenListTeam/OpenList/drivers/115"
"github.com/OpenListTeam/OpenList/drivers/pikpak"
"github.com/OpenListTeam/OpenList/drivers/thunder"
"github.com/OpenListTeam/OpenList/drivers/thunder_browser"
"github.com/OpenListTeam/OpenList/internal/conf"
"github.com/OpenListTeam/OpenList/internal/errs"
"github.com/OpenListTeam/OpenList/internal/fs"
@ -105,13 +103,6 @@ func AddURL(ctx context.Context, args *AddURLArgs) (task.TaskExtensionInfo, erro
} else {
tempDir = filepath.Join(setting.GetStr(conf.ThunderTempDir), uid)
}
case "ThunderBrowser":
switch storage.(type) {
case *thunder_browser.ThunderBrowser, *thunder_browser.ThunderBrowserExpert:
tempDir = args.DstDirPath
default:
tempDir = filepath.Join(setting.GetStr(conf.ThunderBrowserTempDir), uid)
}
}
taskCreator, _ := ctx.Value("user").(*model.User) // taskCreator is nil when convert failed

View File

@ -87,9 +87,6 @@ outer:
if t.tool.Name() == "Thunder" {
return nil
}
if t.tool.Name() == "ThunderBrowser" {
return nil
}
if t.tool.Name() == "115 Cloud" {
// hack for 115
<-time.After(time.Second * 1)
@ -162,7 +159,7 @@ func (t *DownloadTask) Update() (bool, error) {
func (t *DownloadTask) Transfer() error {
toolName := t.tool.Name()
if toolName == "115 Cloud" || toolName == "PikPak" || toolName == "Thunder" || toolName == "ThunderBrowser" {
if toolName == "115 Cloud" || toolName == "PikPak" || toolName == "Thunder" {
// 如果不是直接下载到目标路径,则进行转存
if t.TempDir != t.DstDirPath {
return transferObj(t.Ctx(), t.TempDir, t.DstDirPath, t.DeletePolicy)

View File

@ -88,12 +88,17 @@ func FsMove(c *gin.Context) {
common.ErrorResp(c, err, 403)
return
}
// Create all tasks immediately without any synchronous validation
// All validation will be done asynchronously in the background
if !req.Overwrite {
for _, name := range req.Names {
if res, _ := fs.Get(c, stdpath.Join(dstDir, name), &fs.GetArgs{NoLog: true}); res != nil {
common.ErrorStrResp(c, fmt.Sprintf("file [%s] exists", name), 403)
return
}
}
}
var addedTasks []task.TaskExtensionInfo
for i, name := range req.Names {
t, err := fs.MoveWithTaskAndValidation(c, stdpath.Join(srcDir, name), dstDir, !req.Overwrite, len(req.Names) > i+1)
t, err := fs.MoveWithTask(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
if t != nil {
addedTasks = append(addedTasks, t)
}
@ -102,17 +107,12 @@ func FsMove(c *gin.Context) {
return
}
}
// Return immediately with task information
if len(addedTasks) > 0 {
common.SuccessResp(c, gin.H{
"message": fmt.Sprintf("Successfully created %d move task(s)", len(addedTasks)),
"tasks": getTaskInfos(addedTasks),
})
} else {
common.SuccessResp(c, gin.H{
"message": "Move operations completed immediately",
})
common.SuccessResp(c)
}
}
@ -141,9 +141,14 @@ func FsCopy(c *gin.Context) {
common.ErrorResp(c, err, 403)
return
}
// Create all tasks immediately without any synchronous validation
// All validation will be done asynchronously in the background
if !req.Overwrite {
for _, name := range req.Names {
if res, _ := fs.Get(c, stdpath.Join(dstDir, name), &fs.GetArgs{NoLog: true}); res != nil {
common.ErrorStrResp(c, fmt.Sprintf("file [%s] exists", name), 403)
return
}
}
}
var addedTasks []task.TaskExtensionInfo
for i, name := range req.Names {
t, err := fs.Copy(c, stdpath.Join(srcDir, name), dstDir, len(req.Names) > i+1)
@ -155,18 +160,9 @@ func FsCopy(c *gin.Context) {
return
}
}
// Return immediately with task information
if len(addedTasks) > 0 {
common.SuccessResp(c, gin.H{
"message": fmt.Sprintf("Successfully created %d copy task(s)", len(addedTasks)),
"tasks": getTaskInfos(addedTasks),
})
} else {
common.SuccessResp(c, gin.H{
"message": "Copy operations completed immediately",
})
}
}
type RenameReq struct {

View File

@ -4,7 +4,6 @@ import (
_115 "github.com/OpenListTeam/OpenList/drivers/115"
"github.com/OpenListTeam/OpenList/drivers/pikpak"
"github.com/OpenListTeam/OpenList/drivers/thunder"
"github.com/OpenListTeam/OpenList/drivers/thunder_browser"
"github.com/OpenListTeam/OpenList/internal/conf"
"github.com/OpenListTeam/OpenList/internal/model"
"github.com/OpenListTeam/OpenList/internal/offline_download/tool"
@ -240,51 +239,6 @@ func SetThunder(c *gin.Context) {
common.SuccessResp(c, "ok")
}
type SetThunderBrowserReq struct {
TempDir string `json:"temp_dir" form:"temp_dir"`
}
func SetThunderBrowser(c *gin.Context) {
var req SetThunderBrowserReq
if err := c.ShouldBind(&req); err != nil {
common.ErrorResp(c, err, 400)
return
}
if req.TempDir != "" {
storage, _, err := op.GetStorageAndActualPath(req.TempDir)
if err != nil {
common.ErrorStrResp(c, "storage does not exists", 400)
return
}
if storage.Config().CheckStatus && storage.GetStorage().Status != op.WORK {
common.ErrorStrResp(c, "storage not init: "+storage.GetStorage().Status, 400)
return
}
switch storage.(type) {
case *thunder_browser.ThunderBrowser, *thunder_browser.ThunderBrowserExpert:
default:
common.ErrorStrResp(c, "unsupported storage driver for offline download, only ThunderBrowser is supported", 400)
}
}
items := []model.SettingItem{
{Key: conf.ThunderBrowserTempDir, Value: req.TempDir, Type: conf.TypeString, Group: model.OFFLINE_DOWNLOAD, Flag: model.PRIVATE},
}
if err := op.SaveSettingItems(items); err != nil {
common.ErrorResp(c, err, 500)
return
}
_tool, err := tool.Tools.Get("ThunderBrowser")
if err != nil {
common.ErrorResp(c, err, 500)
return
}
if _, err := _tool.Init(); err != nil {
common.ErrorResp(c, err, 500)
return
}
common.SuccessResp(c, "ok")
}
func OfflineDownloadTools(c *gin.Context) {
tools := tool.Tools.Names()
common.SuccessResp(c, tools)

View File

@ -147,7 +147,6 @@ func admin(g *gin.RouterGroup) {
setting.POST("/set_115", handles.Set115)
setting.POST("/set_pikpak", handles.SetPikPak)
setting.POST("/set_thunder", handles.SetThunder)
setting.POST("/set_thunder_browser", handles.SetThunderBrowser)
// retain /admin/task API to ensure compatibility with legacy automation scripts
_task(g.Group("/task"))

View File

@ -15,7 +15,7 @@ type SiteConfig struct {
func getSiteConfig() SiteConfig {
siteConfig := SiteConfig{
BasePath: conf.URL.Path,
Cdn: strings.ReplaceAll(strings.TrimSuffix(conf.Conf.Cdn, "/"), "$version", strings.TrimPrefix(conf.WebVersion, "v"),),
Cdn: strings.ReplaceAll(strings.TrimSuffix(conf.Conf.Cdn, "/"), "$version", conf.WebVersion),
}
if siteConfig.BasePath != "" {
siteConfig.BasePath = utils.FixAndCleanPath(siteConfig.BasePath)