Compare commits
67 Commits
v2.0.0-bet
...
v2.0.0
Author | SHA1 | Date | |
---|---|---|---|
1d7d37e642 | |||
b62a716267 | |||
944941db10 | |||
bd91acc5d0 | |||
cd50227835 | |||
50226f66e3 | |||
9dcaa9b07a | |||
fa6c0f78bc | |||
7f35dc6ade | |||
5d6463b75a | |||
733b38b435 | |||
50a02a7af7 | |||
71b1517de7 | |||
ffdd88ec66 | |||
4ff2756572 | |||
d955038ebc | |||
72d5e4e691 | |||
b1e662cd34 | |||
0f0e1104a4 | |||
3041da35ab | |||
9eab54a7c8 | |||
0b8d3a0a2c | |||
f9945a14a8 | |||
c39752ceb4 | |||
53b383d2cf | |||
e76fc3e616 | |||
eb21b87020 | |||
f577d82242 | |||
98691b2aa8 | |||
4fe6ed6c3e | |||
fe73ece57d | |||
59b8f1084a | |||
2f669ac45c | |||
d03d91d518 | |||
fe981f67ec | |||
8cfabfd0f5 | |||
163ee1159e | |||
e31402e94f | |||
5500980d63 | |||
b1695445e0 | |||
5db1ad4adf | |||
725f5b0c55 | |||
87a74394b3 | |||
a41c820525 | |||
cd53dc6d24 | |||
cfe16b5ed2 | |||
3d9746485d | |||
0b7f2fee7d | |||
36d52e0b75 | |||
e4d254e4b0 | |||
e8d27a30b4 | |||
69514668cc | |||
e5f8f59c87 | |||
f87ee1ed9e | |||
07155cfd01 | |||
5e982980dc | |||
8987958e26 | |||
4466cb19a5 | |||
da74e29b26 | |||
82272fcbf5 | |||
74d86f8cc4 | |||
c03646dedf | |||
c0d1888e25 | |||
f4942e89bd | |||
27e61c9eb8 | |||
aeb72320ca | |||
caddba05e9 |
2
.github/workflows/docker.yml
vendored
2
.github/workflows/docker.yml
vendored
@ -45,4 +45,4 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/ppc64le,linux/s390x
|
platforms: linux/amd64,linux/arm64,linux/arm/v7,linux/386,linux/arm/v6,linux/s390x
|
@ -32,11 +32,14 @@
|
|||||||
- 本地存储
|
- 本地存储
|
||||||
- 阿里云盘
|
- 阿里云盘
|
||||||
- Onedrive/世纪互联
|
- Onedrive/世纪互联
|
||||||
|
- 天翼云盘
|
||||||
|
- GoogleDrive
|
||||||
|
- 123pan
|
||||||
- ...
|
- ...
|
||||||
|
|
||||||
### 如何使用
|
### 如何使用
|
||||||
|
|
||||||
- https://www.nn.ci/archives/alist.html
|
- https://alist-doc.nn.ci/
|
||||||
|
|
||||||
### License
|
### License
|
||||||
|
|
||||||
|
31
alist.go
31
alist.go
@ -5,6 +5,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"github.com/Xhofe/alist/bootstrap"
|
"github.com/Xhofe/alist/bootstrap"
|
||||||
"github.com/Xhofe/alist/conf"
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/Xhofe/alist/server"
|
"github.com/Xhofe/alist/server"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
@ -14,15 +15,28 @@ func init() {
|
|||||||
flag.StringVar(&conf.ConfigFile, "conf", "data/config.json", "config file")
|
flag.StringVar(&conf.ConfigFile, "conf", "data/config.json", "config file")
|
||||||
flag.BoolVar(&conf.Debug, "debug", false, "start with debug mode")
|
flag.BoolVar(&conf.Debug, "debug", false, "start with debug mode")
|
||||||
flag.BoolVar(&conf.Version, "version", false, "print version info")
|
flag.BoolVar(&conf.Version, "version", false, "print version info")
|
||||||
|
flag.BoolVar(&conf.Password, "password", false, "print current password")
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
}
|
}
|
||||||
|
|
||||||
func Init() {
|
func Init() bool {
|
||||||
bootstrap.InitLog()
|
bootstrap.InitLog()
|
||||||
bootstrap.InitConf()
|
bootstrap.InitConf()
|
||||||
bootstrap.InitCron()
|
bootstrap.InitCron()
|
||||||
bootstrap.InitModel()
|
bootstrap.InitModel()
|
||||||
|
if conf.Password {
|
||||||
|
pass, err := model.GetSettingByKey("password")
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(err.Error())
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
log.Infof("current password: %s", pass.Value)
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
bootstrap.InitSettings()
|
||||||
|
bootstrap.InitAccounts()
|
||||||
bootstrap.InitCache()
|
bootstrap.InitCache()
|
||||||
|
return true
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@ -30,15 +44,22 @@ func main() {
|
|||||||
fmt.Printf("Built At: %s\nGo Version: %s\nAuthor: %s\nCommit ID: %s\nVersion: %s\n", conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.GitTag)
|
fmt.Printf("Built At: %s\nGo Version: %s\nAuthor: %s\nCommit ID: %s\nVersion: %s\n", conf.BuiltAt, conf.GoVersion, conf.GitAuthor, conf.GitCommit, conf.GitTag)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
Init()
|
if !Init() {
|
||||||
|
return
|
||||||
|
}
|
||||||
if !conf.Debug {
|
if !conf.Debug {
|
||||||
gin.SetMode(gin.ReleaseMode)
|
gin.SetMode(gin.ReleaseMode)
|
||||||
}
|
}
|
||||||
r := gin.Default()
|
r := gin.Default()
|
||||||
server.InitApiRouter(r)
|
server.InitApiRouter(r)
|
||||||
|
base := fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port)
|
||||||
log.Info("starting server")
|
log.Infof("start server @ %s", base)
|
||||||
err := r.Run(fmt.Sprintf("%s:%d", conf.Conf.Address, conf.Conf.Port))
|
var err error
|
||||||
|
if conf.Conf.Https {
|
||||||
|
err = r.RunTLS(base, conf.Conf.CertFile, conf.Conf.KeyFile)
|
||||||
|
} else {
|
||||||
|
err = r.Run(base)
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Errorf("failed to start: %s", err.Error())
|
log.Errorf("failed to start: %s", err.Error())
|
||||||
}
|
}
|
||||||
|
30
bootstrap/account.go
Normal file
30
bootstrap/account.go
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
package bootstrap
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/drivers"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
)
|
||||||
|
|
||||||
|
func InitAccounts() {
|
||||||
|
log.Infof("init accounts...")
|
||||||
|
var accounts []model.Account
|
||||||
|
if err := conf.DB.Find(&accounts).Error; err != nil {
|
||||||
|
log.Fatalf("failed sync init accounts")
|
||||||
|
}
|
||||||
|
for i, account := range accounts {
|
||||||
|
model.RegisterAccount(account)
|
||||||
|
driver, ok := drivers.GetDriver(account.Type)
|
||||||
|
if !ok {
|
||||||
|
log.Errorf("no [%s] driver", account.Type)
|
||||||
|
} else {
|
||||||
|
err := driver.Save(&accounts[i], nil)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("init account [%s] error:[%s]", account.Name, err.Error())
|
||||||
|
} else {
|
||||||
|
log.Infof("success init account: %s, type: %s", account.Name, account.Type)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -5,7 +5,7 @@ import (
|
|||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
)
|
)
|
||||||
|
|
||||||
// initLog init log
|
// InitLog init log
|
||||||
func InitLog() {
|
func InitLog() {
|
||||||
if conf.Debug {
|
if conf.Debug {
|
||||||
log.SetLevel(log.DebugLevel)
|
log.SetLevel(log.DebugLevel)
|
||||||
@ -18,4 +18,5 @@ func InitLog() {
|
|||||||
TimestampFormat: "2006-01-02 15:04:05",
|
TimestampFormat: "2006-01-02 15:04:05",
|
||||||
FullTimestamp: true,
|
FullTimestamp: true,
|
||||||
})
|
})
|
||||||
|
log.Infof("init log...")
|
||||||
}
|
}
|
@ -3,7 +3,6 @@ package bootstrap
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"github.com/Xhofe/alist/conf"
|
"github.com/Xhofe/alist/conf"
|
||||||
"github.com/Xhofe/alist/drivers"
|
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"gorm.io/driver/mysql"
|
"gorm.io/driver/mysql"
|
||||||
@ -72,161 +71,10 @@ func InitModel() {
|
|||||||
default:
|
default:
|
||||||
log.Fatalf("not supported database type: %s", databaseConfig.Type)
|
log.Fatalf("not supported database type: %s", databaseConfig.Type)
|
||||||
}
|
}
|
||||||
log.Infof("auto migrate model")
|
log.Infof("auto migrate model...")
|
||||||
err := conf.DB.AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{})
|
err := conf.DB.AutoMigrate(&model.SettingItem{}, &model.Account{}, &model.Meta{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("failed to auto migrate")
|
log.Fatalf("failed to auto migrate")
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO init filetype
|
|
||||||
initAccounts()
|
|
||||||
initSettings()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func initAccounts() {
|
|
||||||
log.Infof("init accounts...")
|
|
||||||
var accounts []model.Account
|
|
||||||
if err := conf.DB.Find(&accounts).Error; err != nil {
|
|
||||||
log.Fatalf("failed sync init accounts")
|
|
||||||
}
|
|
||||||
for _, account := range accounts {
|
|
||||||
model.RegisterAccount(account)
|
|
||||||
driver, ok := drivers.GetDriver(account.Type)
|
|
||||||
if !ok {
|
|
||||||
log.Errorf("no [%s] driver", driver)
|
|
||||||
} else {
|
|
||||||
err := driver.Save(&account, nil)
|
|
||||||
if err != nil {
|
|
||||||
log.Errorf("init account [%s] error:[%s]", account.Name, err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func initSettings() {
|
|
||||||
log.Infof("init settings...")
|
|
||||||
version := model.SettingItem{
|
|
||||||
Key: "version",
|
|
||||||
Value: conf.GitTag,
|
|
||||||
Description: "version",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.CONST,
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = model.SaveSetting(version)
|
|
||||||
|
|
||||||
settings := []model.SettingItem{
|
|
||||||
{
|
|
||||||
Key: "title",
|
|
||||||
Value: "Alist",
|
|
||||||
Description: "title",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "password",
|
|
||||||
Value: "alist",
|
|
||||||
Description: "password",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PRIVATE,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "logo",
|
|
||||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
|
||||||
Description: "logo",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "favicon",
|
|
||||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
|
||||||
Description: "favicon",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "icon color",
|
|
||||||
Value: "teal.300",
|
|
||||||
Description: "icon's color",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "text types",
|
|
||||||
Value: "txt,htm,html,xml,java,properties,sql,js,md,json,conf,ini,vue,php,py,bat,gitignore,yml,go,sh,c,cpp,h,hpp",
|
|
||||||
Type: "string",
|
|
||||||
Description: "text type extensions",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "hide readme file",
|
|
||||||
Value: "true",
|
|
||||||
Type: "bool",
|
|
||||||
Description: "hide readme file? ",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "music cover",
|
|
||||||
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
|
||||||
Description: "music cover image",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "site beian",
|
|
||||||
Description: "chinese beian info",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "home readme url",
|
|
||||||
Description: "when have multiple, the readme file to show",
|
|
||||||
Type: "string",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "markdown theme",
|
|
||||||
Value: "vuepress",
|
|
||||||
Description: "default | github | vuepress",
|
|
||||||
Group: model.PUBLIC,
|
|
||||||
Type: "select",
|
|
||||||
Values: "default,github,vuepress",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "autoplay video",
|
|
||||||
Value: "false",
|
|
||||||
Type: "bool",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "autoplay audio",
|
|
||||||
Value: "false",
|
|
||||||
Type: "bool",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "check parent folder",
|
|
||||||
Value: "false",
|
|
||||||
Type: "bool",
|
|
||||||
Description: "check parent folder password",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "customize style",
|
|
||||||
Value: "",
|
|
||||||
Type: "text",
|
|
||||||
Description: "customize style, don't need add <style></style>",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Key: "customize script",
|
|
||||||
Value: "",
|
|
||||||
Type: "text",
|
|
||||||
Description: "customize script, don't need add <script></script>",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, v := range settings {
|
|
||||||
_, err := model.GetSettingByKey(v.Key)
|
|
||||||
if err == gorm.ErrRecordNotFound {
|
|
||||||
err = model.SaveSetting(v)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatalf("failed write setting: %s", err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
model.LoadSettings()
|
|
||||||
}
|
|
||||||
|
169
bootstrap/setting.go
Normal file
169
bootstrap/setting.go
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
package bootstrap
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"gorm.io/gorm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func InitSettings() {
|
||||||
|
log.Infof("init settings...")
|
||||||
|
version := model.SettingItem{
|
||||||
|
Key: "version",
|
||||||
|
Value: conf.GitTag,
|
||||||
|
Description: "version",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.CONST,
|
||||||
|
}
|
||||||
|
|
||||||
|
_ = model.SaveSetting(version)
|
||||||
|
|
||||||
|
settings := []model.SettingItem{
|
||||||
|
{
|
||||||
|
Key: "title",
|
||||||
|
Value: "Alist",
|
||||||
|
Description: "title",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "password",
|
||||||
|
Value: "alist",
|
||||||
|
Description: "password",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "logo",
|
||||||
|
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
||||||
|
Description: "logo",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "favicon",
|
||||||
|
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
||||||
|
Description: "favicon",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "icon color",
|
||||||
|
Value: "teal.300",
|
||||||
|
Description: "icon's color",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "text types",
|
||||||
|
Value: "txt,htm,html,xml,java,properties,sql,js,md,json,conf,ini,vue,php,py,bat,gitignore,yml,go,sh,c,cpp,h,hpp,tsx",
|
||||||
|
Type: "string",
|
||||||
|
Description: "text type extensions",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "hide readme file",
|
||||||
|
Value: "true",
|
||||||
|
Type: "bool",
|
||||||
|
Description: "hide readme file? ",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "music cover",
|
||||||
|
Value: "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png",
|
||||||
|
Description: "music cover image",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "site beian",
|
||||||
|
Description: "chinese beian info",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "home readme url",
|
||||||
|
Description: "when have multiple, the readme file to show",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "markdown theme",
|
||||||
|
Value: "vuepress",
|
||||||
|
Description: "default | github | vuepress",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
Type: "select",
|
||||||
|
Values: "default,github,vuepress",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "autoplay video",
|
||||||
|
Value: "false",
|
||||||
|
Type: "bool",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "autoplay audio",
|
||||||
|
Value: "false",
|
||||||
|
Type: "bool",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "check parent folder",
|
||||||
|
Value: "false",
|
||||||
|
Type: "bool",
|
||||||
|
Description: "check parent folder password",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "customize style",
|
||||||
|
Value: "",
|
||||||
|
Type: "text",
|
||||||
|
Description: "customize style, don't need add <style></style>",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "customize script",
|
||||||
|
Value: "",
|
||||||
|
Type: "text",
|
||||||
|
Description: "customize script, don't need add <script></script>",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "animation",
|
||||||
|
Value: "true",
|
||||||
|
Type: "bool",
|
||||||
|
Description: "when there are a lot of files, the animation will freeze when opening",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "check down link",
|
||||||
|
Value: "false",
|
||||||
|
Type: "bool",
|
||||||
|
Description: "check down link password, your link will be 'https://alist.com/d/filename?pw=xxx'",
|
||||||
|
Group: model.PUBLIC,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "WebDAV username",
|
||||||
|
Value: "alist",
|
||||||
|
Description: "WebDAV username",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Key: "WebDAV password",
|
||||||
|
Value: "alist",
|
||||||
|
Description: "WebDAV password",
|
||||||
|
Type: "string",
|
||||||
|
Group: model.PRIVATE,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, v := range settings {
|
||||||
|
_, err := model.GetSettingByKey(v.Key)
|
||||||
|
if err == gorm.ErrRecordNotFound {
|
||||||
|
err = model.SaveSetting(v)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatalf("failed write setting: %s", err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
model.LoadSettings()
|
||||||
|
}
|
@ -14,6 +14,9 @@ type Config struct {
|
|||||||
Address string `json:"address"`
|
Address string `json:"address"`
|
||||||
Port int `json:"port"`
|
Port int `json:"port"`
|
||||||
Database Database `json:"database"`
|
Database Database `json:"database"`
|
||||||
|
Https bool `json:"https"`
|
||||||
|
CertFile string `json:"cert_file"`
|
||||||
|
KeyFile string `json:"key_file"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func DefaultConfig() *Config {
|
func DefaultConfig() *Config {
|
||||||
@ -22,11 +25,7 @@ func DefaultConfig() *Config {
|
|||||||
Port: 5244,
|
Port: 5244,
|
||||||
Database: Database{
|
Database: Database{
|
||||||
Type: "sqlite3",
|
Type: "sqlite3",
|
||||||
User: "",
|
|
||||||
Password: "",
|
|
||||||
Host: "",
|
|
||||||
Port: 0,
|
Port: 0,
|
||||||
Name: "",
|
|
||||||
TablePrefix: "x_",
|
TablePrefix: "x_",
|
||||||
DBFile: "data/data.db",
|
DBFile: "data/data.db",
|
||||||
},
|
},
|
||||||
|
11
conf/var.go
11
conf/var.go
@ -20,6 +20,7 @@ var (
|
|||||||
Conf *Config
|
Conf *Config
|
||||||
Debug bool
|
Debug bool
|
||||||
Version bool
|
Version bool
|
||||||
|
Password bool
|
||||||
|
|
||||||
DB *gorm.DB
|
DB *gorm.DB
|
||||||
Cache *cache.Cache
|
Cache *cache.Cache
|
||||||
@ -37,10 +38,14 @@ var (
|
|||||||
|
|
||||||
// settings
|
// settings
|
||||||
var (
|
var (
|
||||||
RawIndexHtml string
|
RawIndexHtml string
|
||||||
IndexHtml string
|
IndexHtml string
|
||||||
CheckParent bool
|
CheckParent bool
|
||||||
//CustomizeStyle string
|
//CustomizeStyle string
|
||||||
//CustomizeScript string
|
//CustomizeScript string
|
||||||
//Favicon string
|
//Favicon string
|
||||||
|
CheckDown bool
|
||||||
|
|
||||||
|
DavUsername string
|
||||||
|
DavPassword string
|
||||||
)
|
)
|
||||||
|
148
drivers/123.go
Normal file
148
drivers/123.go
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var pan123Client = resty.New()
|
||||||
|
|
||||||
|
type Pan123TokenResp struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Data struct {
|
||||||
|
Token string `json:"token"`
|
||||||
|
} `json:"data"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Pan123File struct {
|
||||||
|
FileName string `json:"FileName"`
|
||||||
|
Size int64 `json:"Size"`
|
||||||
|
UpdateAt *time.Time `json:"UpdateAt"`
|
||||||
|
FileId int64 `json:"FileId"`
|
||||||
|
Type int `json:"Type"`
|
||||||
|
Etag string `json:"Etag"`
|
||||||
|
S3KeyFlag string `json:"S3KeyFlag"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Pan123Files struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Data struct {
|
||||||
|
InfoList []Pan123File `json:"InfoList"`
|
||||||
|
Next string `json:"Next"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Pan123DownResp struct {
|
||||||
|
Code int `json:"code"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
Data struct {
|
||||||
|
DownloadUrl string `json:"DownloadUrl"`
|
||||||
|
} `json:"data"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Login(account *model.Account) error {
|
||||||
|
var resp Pan123TokenResp
|
||||||
|
_, err := pan123Client.R().
|
||||||
|
SetResult(&resp).
|
||||||
|
SetBody(Json{
|
||||||
|
"passport": account.Username,
|
||||||
|
"password": account.Password,
|
||||||
|
}).Post("https://www.123pan.com/api/user/sign_in")
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if resp.Code != 200 {
|
||||||
|
err = fmt.Errorf(resp.Message)
|
||||||
|
account.Status = resp.Message
|
||||||
|
} else {
|
||||||
|
account.Status = "work"
|
||||||
|
account.AccessToken = resp.Data.Token
|
||||||
|
}
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) FormatFile(file *Pan123File) *model.File {
|
||||||
|
f := &model.File{
|
||||||
|
Id: strconv.FormatInt(file.FileId, 10),
|
||||||
|
Name: file.FileName,
|
||||||
|
Size: file.Size,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: file.UpdateAt,
|
||||||
|
}
|
||||||
|
if file.Type == 1 {
|
||||||
|
f.Type = conf.FOLDER
|
||||||
|
} else {
|
||||||
|
f.Type = utils.GetFileType(filepath.Ext(file.FileName))
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) GetFiles(parentId string, account *model.Account) ([]Pan123File, error) {
|
||||||
|
next := "0"
|
||||||
|
res := make([]Pan123File, 0)
|
||||||
|
for next != "-1" {
|
||||||
|
var resp Pan123Files
|
||||||
|
_, err := pan123Client.R().SetResult(&resp).
|
||||||
|
SetHeader("authorization", "Bearer "+account.AccessToken).
|
||||||
|
SetQueryParams(map[string]string{
|
||||||
|
"driveId": "0",
|
||||||
|
"limit": "100",
|
||||||
|
"next": next,
|
||||||
|
"orderBy": account.OrderBy,
|
||||||
|
"orderDirection": account.OrderDirection,
|
||||||
|
"parentFileId": parentId,
|
||||||
|
"trashed": "false",
|
||||||
|
}).Get("https://www.123pan.com/api/file/list")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if resp.Code != 0 {
|
||||||
|
if resp.Code == 401 {
|
||||||
|
err := driver.Login(account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return driver.GetFiles(parentId, account)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf(resp.Message)
|
||||||
|
}
|
||||||
|
next = resp.Data.Next
|
||||||
|
res = append(res, resp.Data.InfoList...)
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) GetFile(path string, account *model.Account) (*Pan123File, error) {
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
dir = utils.ParsePath(dir)
|
||||||
|
_, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
parentFiles_, _ := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, dir))
|
||||||
|
parentFiles, _ := parentFiles_.([]Pan123File)
|
||||||
|
for _, file := range parentFiles {
|
||||||
|
if file.FileName == name {
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
return &file, err
|
||||||
|
} else {
|
||||||
|
return nil, NotFile
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterDriver(&Pan123{})
|
||||||
|
pan123Client.SetRetryCount(3)
|
||||||
|
}
|
192
drivers/123_driver.go
Normal file
192
drivers/123_driver.go
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
url "net/url"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Pan123 struct {}
|
||||||
|
|
||||||
|
func (driver Pan123) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "123Pan",
|
||||||
|
OnlyProxy: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "username",
|
||||||
|
Label: "username",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Description: "account username/phone number",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "password",
|
||||||
|
Label: "password",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Description: "account password",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder file_id",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_by",
|
||||||
|
Label: "order_by",
|
||||||
|
Type: "select",
|
||||||
|
Values: "name,fileId,updateAt,createAt",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_direction",
|
||||||
|
Label: "order_direction",
|
||||||
|
Type: "select",
|
||||||
|
Values: "asc,desc",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Save(account *model.Account, old *model.Account) error {
|
||||||
|
if account.RootFolder == "" {
|
||||||
|
account.RootFolder = "0"
|
||||||
|
}
|
||||||
|
err := driver.Login(account)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
var rawFiles []Pan123File
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
rawFiles, _ = cache.([]Pan123File)
|
||||||
|
} else {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(rawFiles) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.GetFile(utils.ParsePath(path), account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
var resp Pan123DownResp
|
||||||
|
_, err = pan123Client.R().SetResult(&resp).SetHeader("authorization", "Bearer "+account.AccessToken).
|
||||||
|
SetBody(Json{
|
||||||
|
"driveId": 0,
|
||||||
|
"etag": file.Etag,
|
||||||
|
"fileId": file.FileId,
|
||||||
|
"fileName": file.FileName,
|
||||||
|
"s3keyFlag": file.S3KeyFlag,
|
||||||
|
"size": file.Size,
|
||||||
|
"type": file.Type,
|
||||||
|
}).Post("https://www.123pan.com/api/file/download_info")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if resp.Code != 0 {
|
||||||
|
if resp.Code == 401 {
|
||||||
|
err := driver.Login(account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return driver.Link(path, account)
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf(resp.Message)
|
||||||
|
}
|
||||||
|
u,err := url.Parse(resp.Data.DownloadUrl)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
u_ := fmt.Sprintf("https://%s%s",u.Host,u.Path)
|
||||||
|
res, err := NoRedirectClient.R().SetQueryParamsFromValues(u.Query()).Get(u_)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
log.Debug(res.String())
|
||||||
|
if res.StatusCode() == 302 {
|
||||||
|
return res.Header().Get("location"), nil
|
||||||
|
}
|
||||||
|
return resp.Data.DownloadUrl, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
log.Debugf("pan123 path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
c.Request.Header.Del("origin")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Pan123) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*Pan123)(nil)
|
317
drivers/189.go
Normal file
317
drivers/189.go
Normal file
@ -0,0 +1,317 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"encoding/base64"
|
||||||
|
"encoding/json"
|
||||||
|
"encoding/pem"
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
mathRand "math/rand"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
var client189Map map[string]*resty.Client
|
||||||
|
|
||||||
|
func (driver Cloud189) FormatFile(file *Cloud189File) *model.File {
|
||||||
|
f := &model.File{
|
||||||
|
Id: strconv.FormatInt(file.Id, 10),
|
||||||
|
Name: file.Name,
|
||||||
|
Size: file.Size,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: nil,
|
||||||
|
Thumbnail: file.Icon.SmallUrl,
|
||||||
|
Url: file.Url,
|
||||||
|
}
|
||||||
|
loc, _ := time.LoadLocation("Local")
|
||||||
|
lastOpTime, err := time.ParseInLocation("2006-01-02 15:04:05", file.LastOpTime, loc)
|
||||||
|
if err == nil {
|
||||||
|
f.UpdatedAt = &lastOpTime
|
||||||
|
}
|
||||||
|
if file.Size == -1 {
|
||||||
|
f.Type = conf.FOLDER
|
||||||
|
f.Size = 0
|
||||||
|
} else {
|
||||||
|
f.Type = utils.GetFileType(filepath.Ext(file.Name))
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (c Cloud189) GetFile(path string, account *model.Account) (*Cloud189File, error) {
|
||||||
|
// dir, name := filepath.Split(path)
|
||||||
|
// dir = utils.ParsePath(dir)
|
||||||
|
// _, _, err := c.Path(dir, account)
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
// parentFiles_, _ := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, dir))
|
||||||
|
// parentFiles, _ := parentFiles_.([]Cloud189File)
|
||||||
|
// for _, file := range parentFiles {
|
||||||
|
// if file.Name == name {
|
||||||
|
// if file.Size != -1 {
|
||||||
|
// return &file, err
|
||||||
|
// } else {
|
||||||
|
// return nil, NotFile
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return nil, PathNotFound
|
||||||
|
//}
|
||||||
|
|
||||||
|
type Cloud189Down struct {
|
||||||
|
ResCode int `json:"res_code"`
|
||||||
|
ResMessage string `json:"res_message"`
|
||||||
|
FileDownloadUrl string `json:"fileDownloadUrl"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type LoginResp struct {
|
||||||
|
Msg string `json:"msg"`
|
||||||
|
Result int `json:"result"`
|
||||||
|
ToUrl string `json:"toUrl"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Login refer to PanIndex
|
||||||
|
func (driver Cloud189) Login(account *model.Account) error {
|
||||||
|
client, ok := client189Map[account.Name]
|
||||||
|
if !ok {
|
||||||
|
//cookieJar, _ := cookiejar.New(&cookiejar.Options{PublicSuffixList: publicsuffix.List})
|
||||||
|
client = resty.New()
|
||||||
|
//client.SetCookieJar(cookieJar)
|
||||||
|
client.SetRetryCount(3)
|
||||||
|
}
|
||||||
|
url := "https://cloud.189.cn/api/portal/loginUrl.action?redirectURL=https%3A%2F%2Fcloud.189.cn%2Fmain.action"
|
||||||
|
b := ""
|
||||||
|
lt := ""
|
||||||
|
ltText := regexp.MustCompile(`lt = "(.+?)"`)
|
||||||
|
for i := 0; i < 3; i++ {
|
||||||
|
res, err := client.R().Get(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
b = res.String()
|
||||||
|
ltTextArr := ltText.FindStringSubmatch(b)
|
||||||
|
if len(ltTextArr) > 0 {
|
||||||
|
lt = ltTextArr[1]
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
<-time.After(time.Second)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if lt == "" {
|
||||||
|
return fmt.Errorf("get empty login page")
|
||||||
|
}
|
||||||
|
captchaToken := regexp.MustCompile(`captchaToken' value='(.+?)'`).FindStringSubmatch(b)[1]
|
||||||
|
returnUrl := regexp.MustCompile(`returnUrl = '(.+?)'`).FindStringSubmatch(b)[1]
|
||||||
|
paramId := regexp.MustCompile(`paramId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||||
|
//reqId := regexp.MustCompile(`reqId = "(.+?)"`).FindStringSubmatch(b)[1]
|
||||||
|
jRsakey := regexp.MustCompile(`j_rsaKey" value="(\S+)"`).FindStringSubmatch(b)[1]
|
||||||
|
vCodeID := regexp.MustCompile(`picCaptcha\.do\?token\=([A-Za-z0-9\&\=]+)`).FindStringSubmatch(b)[1]
|
||||||
|
vCodeRS := ""
|
||||||
|
if vCodeID != "" {
|
||||||
|
// need ValidateCode
|
||||||
|
}
|
||||||
|
userRsa := RsaEncode([]byte(account.Username), jRsakey)
|
||||||
|
passwordRsa := RsaEncode([]byte(account.Password), jRsakey)
|
||||||
|
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
|
||||||
|
var loginResp LoginResp
|
||||||
|
res, err := client.R().
|
||||||
|
SetHeaders(map[string]string{
|
||||||
|
"lt": lt,
|
||||||
|
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36",
|
||||||
|
"Referer": "https://open.e.189.cn/",
|
||||||
|
"accept": "application/json;charset=UTF-8",
|
||||||
|
}).SetFormData(map[string]string{
|
||||||
|
"appKey": "cloud",
|
||||||
|
"accountType": "01",
|
||||||
|
"userName": "{RSA}" + userRsa,
|
||||||
|
"password": "{RSA}" + passwordRsa,
|
||||||
|
"validateCode": vCodeRS,
|
||||||
|
"captchaToken": captchaToken,
|
||||||
|
"returnUrl": returnUrl,
|
||||||
|
"mailSuffix": "@pan.cn",
|
||||||
|
"paramId": paramId,
|
||||||
|
"clientType": "10010",
|
||||||
|
"dynamicCheck": "FALSE",
|
||||||
|
"cb_SaveName": "1",
|
||||||
|
"isOauth2": "false",
|
||||||
|
}).Post(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
err = json.Unmarshal(res.Body(), &loginResp)
|
||||||
|
if err != nil {
|
||||||
|
log.Error(err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if loginResp.Result != 0 {
|
||||||
|
return fmt.Errorf(loginResp.Msg)
|
||||||
|
}
|
||||||
|
_, err = client.R().Get(loginResp.ToUrl)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(err.Error())
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
client189Map[account.Name] = client
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cloud189Error struct {
|
||||||
|
ErrorCode string `json:"errorCode"`
|
||||||
|
ErrorMsg string `json:"errorMsg"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cloud189File struct {
|
||||||
|
Id int64 `json:"id"`
|
||||||
|
LastOpTime string `json:"lastOpTime"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
Size int64 `json:"size"`
|
||||||
|
Icon struct {
|
||||||
|
SmallUrl string `json:"smallUrl"`
|
||||||
|
//LargeUrl string `json:"largeUrl"`
|
||||||
|
} `json:"icon"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cloud189Folder struct {
|
||||||
|
Id int64 `json:"id"`
|
||||||
|
LastOpTime string `json:"lastOpTime"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type Cloud189Files struct {
|
||||||
|
ResCode int `json:"res_code"`
|
||||||
|
ResMessage string `json:"res_message"`
|
||||||
|
FileListAO struct {
|
||||||
|
Count int `json:"count"`
|
||||||
|
FileList []Cloud189File `json:"fileList"`
|
||||||
|
FolderList []Cloud189Folder `json:"folderList"`
|
||||||
|
} `json:"fileListAO"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) GetFiles(fileId string, account *model.Account) ([]Cloud189File, error) {
|
||||||
|
client, ok := client189Map[account.Name]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("can't find [%s] client", account.Name)
|
||||||
|
}
|
||||||
|
res := make([]Cloud189File, 0)
|
||||||
|
pageNum := 1
|
||||||
|
for {
|
||||||
|
var e Cloud189Error
|
||||||
|
var resp Cloud189Files
|
||||||
|
_, err := client.R().SetResult(&resp).SetError(&e).
|
||||||
|
SetHeader("Accept", "application/json;charset=UTF-8").
|
||||||
|
SetQueryParams(map[string]string{
|
||||||
|
"noCache": random(),
|
||||||
|
"pageSize": "60",
|
||||||
|
"pageNum": strconv.Itoa(pageNum),
|
||||||
|
"mediaType": "0",
|
||||||
|
"folderId": fileId,
|
||||||
|
"iconOption": "5",
|
||||||
|
"orderBy": account.OrderBy,
|
||||||
|
"descending": account.OrderDirection,
|
||||||
|
}).Get("https://cloud.189.cn/api/open/file/listFiles.action")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if e.ErrorCode != "" {
|
||||||
|
if e.ErrorCode == "InvalidSessionKey" {
|
||||||
|
err = driver.Login(account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return driver.GetFiles(fileId, account)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if resp.ResCode != 0 {
|
||||||
|
return nil, fmt.Errorf(resp.ResMessage)
|
||||||
|
}
|
||||||
|
if resp.FileListAO.Count == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
res = append(res, resp.FileListAO.FileList...)
|
||||||
|
for _, folder := range resp.FileListAO.FolderList {
|
||||||
|
res = append(res, Cloud189File{
|
||||||
|
Id: folder.Id,
|
||||||
|
LastOpTime: folder.LastOpTime,
|
||||||
|
Name: folder.Name,
|
||||||
|
Size: -1,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
pageNum++
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func random() string {
|
||||||
|
return fmt.Sprintf("0.%17v", mathRand.New(mathRand.NewSource(time.Now().UnixNano())).Int63n(100000000000000000))
|
||||||
|
}
|
||||||
|
|
||||||
|
func RsaEncode(origData []byte, j_rsakey string) string {
|
||||||
|
publicKey := []byte("-----BEGIN PUBLIC KEY-----\n" + j_rsakey + "\n-----END PUBLIC KEY-----")
|
||||||
|
block, _ := pem.Decode(publicKey)
|
||||||
|
pubInterface, _ := x509.ParsePKIXPublicKey(block.Bytes)
|
||||||
|
pub := pubInterface.(*rsa.PublicKey)
|
||||||
|
b, err := rsa.EncryptPKCS1v15(rand.Reader, pub, origData)
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf("err: %s", err.Error())
|
||||||
|
}
|
||||||
|
return b64tohex(base64.StdEncoding.EncodeToString(b))
|
||||||
|
}
|
||||||
|
|
||||||
|
var b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||||
|
|
||||||
|
var BI_RM = "0123456789abcdefghijklmnopqrstuvwxyz"
|
||||||
|
|
||||||
|
func int2char(a int) string {
|
||||||
|
return strings.Split(BI_RM, "")[a]
|
||||||
|
}
|
||||||
|
|
||||||
|
func b64tohex(a string) string {
|
||||||
|
d := ""
|
||||||
|
e := 0
|
||||||
|
c := 0
|
||||||
|
for i := 0; i < len(a); i++ {
|
||||||
|
m := strings.Split(a, "")[i]
|
||||||
|
if m != "=" {
|
||||||
|
v := strings.Index(b64map, m)
|
||||||
|
if 0 == e {
|
||||||
|
e = 1
|
||||||
|
d += int2char(v >> 2)
|
||||||
|
c = 3 & v
|
||||||
|
} else if 1 == e {
|
||||||
|
e = 2
|
||||||
|
d += int2char(c<<2 | v>>4)
|
||||||
|
c = 15 & v
|
||||||
|
} else if 2 == e {
|
||||||
|
e = 3
|
||||||
|
d += int2char(c)
|
||||||
|
d += int2char(v >> 2)
|
||||||
|
c = 3 & v
|
||||||
|
} else {
|
||||||
|
e = 0
|
||||||
|
d += int2char(c<<2 | v>>4)
|
||||||
|
d += int2char(15 & v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if e == 1 {
|
||||||
|
d += int2char(c << 2)
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterDriver(&Cloud189{})
|
||||||
|
client189Map = make(map[string]*resty.Client, 0)
|
||||||
|
}
|
200
drivers/189_driver.go
Normal file
200
drivers/189_driver.go
Normal file
@ -0,0 +1,200 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Cloud189 struct {}
|
||||||
|
|
||||||
|
func (driver Cloud189) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "189Cloud",
|
||||||
|
OnlyProxy: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "username",
|
||||||
|
Label: "username",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Description: "account username/phone number",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "password",
|
||||||
|
Label: "password",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
Description: "account password",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder file_id",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_by",
|
||||||
|
Label: "order_by",
|
||||||
|
Type: "select",
|
||||||
|
Values: "name,size,lastOpTime,createdDate",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_direction",
|
||||||
|
Label: "desc",
|
||||||
|
Type: "select",
|
||||||
|
Values: "true,false",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Save(account *model.Account, old *model.Account) error {
|
||||||
|
if old != nil && old.Name != account.Name {
|
||||||
|
delete(client189Map, old.Name)
|
||||||
|
}
|
||||||
|
if err := driver.Login(account); err != nil {
|
||||||
|
account.Status = err.Error()
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
account.Status = "work"
|
||||||
|
err := model.SaveAccount(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
var rawFiles []Cloud189File
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
rawFiles, _ = cache.([]Cloud189File)
|
||||||
|
} else {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(rawFiles) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.File(utils.ParsePath(path), account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if file.Type == conf.FOLDER {
|
||||||
|
return "", NotFile
|
||||||
|
}
|
||||||
|
client, ok := client189Map[account.Name]
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("can't find [%s] client", account.Name)
|
||||||
|
}
|
||||||
|
var e Cloud189Error
|
||||||
|
var resp Cloud189Down
|
||||||
|
_, err = client.R().SetResult(&resp).SetError(&e).
|
||||||
|
SetHeader("Accept", "application/json;charset=UTF-8").
|
||||||
|
SetQueryParams(map[string]string{
|
||||||
|
"noCache": random(),
|
||||||
|
"fileId": file.Id,
|
||||||
|
}).Get("https://cloud.189.cn/api/open/file/getFileDownloadUrl.action")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if e.ErrorCode != "" {
|
||||||
|
if e.ErrorCode == "InvalidSessionKey" {
|
||||||
|
err = driver.Login(account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return driver.Link(path, account)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if resp.ResCode != 0 {
|
||||||
|
return "", fmt.Errorf(resp.ResMessage)
|
||||||
|
}
|
||||||
|
res, err := NoRedirectClient.R().Get(resp.FileDownloadUrl)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if res.StatusCode() == 302 {
|
||||||
|
return res.Header().Get("location"), nil
|
||||||
|
}
|
||||||
|
return resp.FileDownloadUrl, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
log.Debugf("189 path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Proxy(ctx *gin.Context, account *model.Account) {
|
||||||
|
ctx.Request.Header.Del("Origin")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Cloud189) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*Cloud189)(nil)
|
250
drivers/ali_driver.go
Normal file
250
drivers/ali_driver.go
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/robfig/cron/v3"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type AliDrive struct{}
|
||||||
|
|
||||||
|
func (driver AliDrive) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "AliDrive",
|
||||||
|
OnlyProxy: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "order_by",
|
||||||
|
Label: "order_by",
|
||||||
|
Type: "select",
|
||||||
|
Values: "name,size,updated_at,created_at",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_direction",
|
||||||
|
Label: "order_direction",
|
||||||
|
Type: "select",
|
||||||
|
Values: "ASC,DESC",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "refresh_token",
|
||||||
|
Label: "refresh token",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder file_id",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "limit",
|
||||||
|
Label: "limit",
|
||||||
|
Type: "number",
|
||||||
|
Required: false,
|
||||||
|
Description: ">0 and <=200",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Save(account *model.Account, old *model.Account) error {
|
||||||
|
if old != nil {
|
||||||
|
conf.Cron.Remove(cron.EntryID(old.CronId))
|
||||||
|
}
|
||||||
|
if account.RootFolder == "" {
|
||||||
|
account.RootFolder = "root"
|
||||||
|
}
|
||||||
|
if account.Limit == 0 {
|
||||||
|
account.Limit = 200
|
||||||
|
}
|
||||||
|
err := driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var resp Json
|
||||||
|
_, _ = aliClient.R().SetResult(&resp).
|
||||||
|
SetBody("{}").
|
||||||
|
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
||||||
|
Post("https://api.aliyundrive.com/v2/user/get")
|
||||||
|
log.Debugf("user info: %+v", resp)
|
||||||
|
account.DriveId = resp["default_drive_id"].(string)
|
||||||
|
cronId, err := conf.Cron.AddFunc("@every 2h", func() {
|
||||||
|
name := account.Name
|
||||||
|
log.Debugf("ali account name: %s", name)
|
||||||
|
newAccount, ok := model.GetAccount(name)
|
||||||
|
log.Debugf("ali account: %+v", newAccount)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = driver.RefreshToken(&newAccount)
|
||||||
|
_ = model.SaveAccount(&newAccount)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
account.CronId = int(cronId)
|
||||||
|
err = model.SaveAccount(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
var rawFiles []AliFile
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
rawFiles, _ = cache.([]AliFile)
|
||||||
|
} else {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(rawFiles) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
var resp Json
|
||||||
|
var e AliRespError
|
||||||
|
_, err = aliClient.R().SetResult(&resp).
|
||||||
|
SetError(&e).
|
||||||
|
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
||||||
|
SetBody(Json{
|
||||||
|
"drive_id": account.DriveId,
|
||||||
|
"file_id": file.Id,
|
||||||
|
"expire_sec": 14400,
|
||||||
|
}).Post("https://api.aliyundrive.com/v2/file/get_download_url")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if e.Code != "" {
|
||||||
|
if e.Code == "AccessTokenInvalid" {
|
||||||
|
err = driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
} else {
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return driver.Link(path, account)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("%s", e.Message)
|
||||||
|
}
|
||||||
|
return resp["url"].(string), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
log.Debugf("ali path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
c.Request.Header.Del("Origin")
|
||||||
|
c.Request.Header.Set("Referer", "https://www.aliyundrive.com/")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver AliDrive) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
file, err := driver.GetFile(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// office
|
||||||
|
var resp Json
|
||||||
|
var e AliRespError
|
||||||
|
var url string
|
||||||
|
req := Json{
|
||||||
|
"drive_id": account.DriveId,
|
||||||
|
"file_id": file.FileId,
|
||||||
|
}
|
||||||
|
switch file.Category {
|
||||||
|
case "doc":
|
||||||
|
{
|
||||||
|
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
||||||
|
req["access_token"] = account.AccessToken
|
||||||
|
}
|
||||||
|
case "video":
|
||||||
|
{
|
||||||
|
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
||||||
|
req["category"] = "live_transcoding"
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
_, err = aliClient.R().SetResult(&resp).SetError(&e).
|
||||||
|
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
||||||
|
SetBody(req).Post(url)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if e.Code != "" {
|
||||||
|
return nil, fmt.Errorf("%s", e.Message)
|
||||||
|
}
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*AliDrive)(nil)
|
@ -5,9 +5,7 @@ import (
|
|||||||
"github.com/Xhofe/alist/conf"
|
"github.com/Xhofe/alist/conf"
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/Xhofe/alist/utils"
|
"github.com/Xhofe/alist/utils"
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
"github.com/robfig/cron/v3"
|
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
@ -15,99 +13,6 @@ import (
|
|||||||
|
|
||||||
var aliClient = resty.New()
|
var aliClient = resty.New()
|
||||||
|
|
||||||
func init() {
|
|
||||||
RegisterDriver("AliDrive", &AliDrive{})
|
|
||||||
aliClient.
|
|
||||||
SetRetryCount(3).
|
|
||||||
SetHeader("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36").
|
|
||||||
SetHeader("content-type", "application/json").
|
|
||||||
SetHeader("origin", "https://aliyundrive.com")
|
|
||||||
}
|
|
||||||
|
|
||||||
type AliDrive struct{}
|
|
||||||
|
|
||||||
func (a AliDrive) Preview(path string, account *model.Account) (interface{}, error) {
|
|
||||||
file, err := a.GetFile(path, account)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
// office
|
|
||||||
var resp Json
|
|
||||||
var e AliRespError
|
|
||||||
var url string
|
|
||||||
req := Json{
|
|
||||||
"drive_id": account.DriveId,
|
|
||||||
"file_id": file.FileId,
|
|
||||||
}
|
|
||||||
switch file.Category {
|
|
||||||
case "doc":
|
|
||||||
{
|
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_office_preview_url"
|
|
||||||
req["access_token"] = account.AccessToken
|
|
||||||
}
|
|
||||||
case "video":
|
|
||||||
{
|
|
||||||
url = "https://api.aliyundrive.com/v2/file/get_video_preview_play_info"
|
|
||||||
req["category"] = "live_transcoding"
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("don't support")
|
|
||||||
}
|
|
||||||
_, err = aliClient.R().SetResult(&resp).SetError(&e).
|
|
||||||
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
|
||||||
SetBody(req).Post(url)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if e.Code != "" {
|
|
||||||
return nil, fmt.Errorf("%s", e.Message)
|
|
||||||
}
|
|
||||||
return resp, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a AliDrive) Items() []Item {
|
|
||||||
return []Item{
|
|
||||||
{
|
|
||||||
Name: "order_by",
|
|
||||||
Label: "order_by",
|
|
||||||
Type: "select",
|
|
||||||
Values: "name,size,updated_at,created_at",
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "order_direction",
|
|
||||||
Label: "order_direction",
|
|
||||||
Type: "select",
|
|
||||||
Values: "ASC,DESC",
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "refresh_token",
|
|
||||||
Label: "refresh token",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "root_folder",
|
|
||||||
Label: "root folder file_id",
|
|
||||||
Type: "string",
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "limit",
|
|
||||||
Label: "limit",
|
|
||||||
Type: "number",
|
|
||||||
Required: false,
|
|
||||||
Description: ">0 and <=200",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a AliDrive) Proxy(c *gin.Context) {
|
|
||||||
c.Request.Header.Del("Origin")
|
|
||||||
c.Request.Header.Set("Referer", "https://www.aliyundrive.com/")
|
|
||||||
}
|
|
||||||
|
|
||||||
type AliRespError struct {
|
type AliRespError struct {
|
||||||
Code string `json:"code"`
|
Code string `json:"code"`
|
||||||
Message string `json:"message"`
|
Message string `json:"message"`
|
||||||
@ -133,13 +38,14 @@ type AliFile struct {
|
|||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a AliDrive) FormatFile(file *AliFile) *model.File {
|
func (driver AliDrive) FormatFile(file *AliFile) *model.File {
|
||||||
f := &model.File{
|
f := &model.File{
|
||||||
|
Id: file.FileId,
|
||||||
Name: file.Name,
|
Name: file.Name,
|
||||||
Size: file.Size,
|
Size: file.Size,
|
||||||
UpdatedAt: file.UpdatedAt,
|
UpdatedAt: file.UpdatedAt,
|
||||||
Thumbnail: file.Thumbnail,
|
Thumbnail: file.Thumbnail,
|
||||||
Driver: "AliDrive",
|
Driver: driver.Config().Name,
|
||||||
Url: file.Url,
|
Url: file.Url,
|
||||||
}
|
}
|
||||||
if file.Type == "folder" {
|
if file.Type == "folder" {
|
||||||
@ -156,7 +62,7 @@ func (a AliDrive) FormatFile(file *AliFile) *model.File {
|
|||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a AliDrive) GetFiles(fileId string, account *model.Account) ([]AliFile, error) {
|
func (driver AliDrive) GetFiles(fileId string, account *model.Account) ([]AliFile, error) {
|
||||||
marker := "first"
|
marker := "first"
|
||||||
res := make([]AliFile, 0)
|
res := make([]AliFile, 0)
|
||||||
for marker != "" {
|
for marker != "" {
|
||||||
@ -187,12 +93,12 @@ func (a AliDrive) GetFiles(fileId string, account *model.Account) ([]AliFile, er
|
|||||||
}
|
}
|
||||||
if e.Code != "" {
|
if e.Code != "" {
|
||||||
if e.Code == "AccessTokenInvalid" {
|
if e.Code == "AccessTokenInvalid" {
|
||||||
err = a.RefreshToken(account)
|
err = driver.RefreshToken(account)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
} else {
|
} else {
|
||||||
_ = model.SaveAccount(account)
|
_ = model.SaveAccount(account)
|
||||||
return a.GetFiles(fileId, account)
|
return driver.GetFiles(fileId, account)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("%s", e.Message)
|
return nil, fmt.Errorf("%s", e.Message)
|
||||||
@ -203,10 +109,10 @@ func (a AliDrive) GetFiles(fileId string, account *model.Account) ([]AliFile, er
|
|||||||
return res, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a AliDrive) GetFile(path string, account *model.Account) (*AliFile, error) {
|
func (driver AliDrive) GetFile(path string, account *model.Account) (*AliFile, error) {
|
||||||
dir, name := filepath.Split(path)
|
dir, name := filepath.Split(path)
|
||||||
dir = utils.ParsePath(dir)
|
dir = utils.ParsePath(dir)
|
||||||
_, _, err := a.Path(dir, account)
|
_, err := driver.Files(dir, account)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -221,107 +127,10 @@ func (a AliDrive) GetFile(path string, account *model.Account) (*AliFile, error)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, fmt.Errorf("path not found")
|
return nil, PathNotFound
|
||||||
}
|
}
|
||||||
|
|
||||||
// path: /aaa/bbb
|
func (driver AliDrive) RefreshToken(account *model.Account) error {
|
||||||
func (a AliDrive) Path(path string, account *model.Account) (*model.File, []*model.File, error) {
|
|
||||||
path = utils.ParsePath(path)
|
|
||||||
log.Debugf("ali path: %s", path)
|
|
||||||
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
|
||||||
if err == nil {
|
|
||||||
file, ok := cache.(AliFile)
|
|
||||||
if ok {
|
|
||||||
return a.FormatFile(&file), nil, nil
|
|
||||||
} else {
|
|
||||||
files, _ := cache.([]AliFile)
|
|
||||||
if len(files) != 0 {
|
|
||||||
res := make([]*model.File, 0)
|
|
||||||
for _, file = range files {
|
|
||||||
res = append(res, a.FormatFile(&file))
|
|
||||||
}
|
|
||||||
return nil, res, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// no cache or len(files) == 0
|
|
||||||
fileId := account.RootFolder
|
|
||||||
if path != "/" {
|
|
||||||
dir, name := filepath.Split(path)
|
|
||||||
dir = utils.ParsePath(dir)
|
|
||||||
_, _, err = a.Path(dir, account)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
parentFiles_, _ := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, dir))
|
|
||||||
parentFiles, _ := parentFiles_.([]AliFile)
|
|
||||||
found := false
|
|
||||||
for _, file := range parentFiles {
|
|
||||||
if file.Name == name {
|
|
||||||
found = true
|
|
||||||
if file.Type == "file" {
|
|
||||||
url, err := a.Link(path, account)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
file.Url = url
|
|
||||||
return a.FormatFile(&file), nil, nil
|
|
||||||
} else {
|
|
||||||
fileId = file.FileId
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !found {
|
|
||||||
return nil, nil, fmt.Errorf("path not found")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
files, err := a.GetFiles(fileId, account)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), files, nil)
|
|
||||||
res := make([]*model.File, 0)
|
|
||||||
for _, file := range files {
|
|
||||||
res = append(res, a.FormatFile(&file))
|
|
||||||
}
|
|
||||||
return nil, res, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a AliDrive) Link(path string, account *model.Account) (string, error) {
|
|
||||||
file, err := a.GetFile(utils.ParsePath(path), account)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
var resp Json
|
|
||||||
var e AliRespError
|
|
||||||
_, err = aliClient.R().SetResult(&resp).
|
|
||||||
SetError(&e).
|
|
||||||
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
|
||||||
SetBody(Json{
|
|
||||||
"drive_id": account.DriveId,
|
|
||||||
"file_id": file.FileId,
|
|
||||||
"expire_sec": 14400,
|
|
||||||
}).Post("https://api.aliyundrive.com/v2/file/get_download_url")
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if e.Code != "" {
|
|
||||||
if e.Code == "AccessTokenInvalid" {
|
|
||||||
err = a.RefreshToken(account)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
} else {
|
|
||||||
_ = model.SaveAccount(account)
|
|
||||||
return a.Link(path, account)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", fmt.Errorf("%s", e.Message)
|
|
||||||
}
|
|
||||||
return resp["url"].(string), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a AliDrive) RefreshToken(account *model.Account) error {
|
|
||||||
url := "https://auth.aliyundrive.com/v2/account/token"
|
url := "https://auth.aliyundrive.com/v2/account/token"
|
||||||
var resp TokenResp
|
var resp TokenResp
|
||||||
var e AliRespError
|
var e AliRespError
|
||||||
@ -339,50 +148,18 @@ func (a AliDrive) RefreshToken(account *model.Account) error {
|
|||||||
if e.Code != "" {
|
if e.Code != "" {
|
||||||
account.Status = e.Message
|
account.Status = e.Message
|
||||||
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
return fmt.Errorf("failed to refresh token: %s", e.Message)
|
||||||
|
} else {
|
||||||
|
account.Status = "work"
|
||||||
}
|
}
|
||||||
account.RefreshToken, account.AccessToken = resp.RefreshToken, resp.AccessToken
|
account.RefreshToken, account.AccessToken = resp.RefreshToken, resp.AccessToken
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (a AliDrive) Save(account *model.Account, old *model.Account) error {
|
func init() {
|
||||||
if old != nil {
|
RegisterDriver(&AliDrive{})
|
||||||
conf.Cron.Remove(cron.EntryID(old.CronId))
|
aliClient.
|
||||||
}
|
SetRetryCount(3).
|
||||||
if account.RootFolder == "" {
|
SetHeader("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36").
|
||||||
account.RootFolder = "root"
|
SetHeader("content-type", "application/json").
|
||||||
}
|
SetHeader("origin", "https://aliyundrive.com")
|
||||||
if account.Limit == 0 {
|
|
||||||
account.Limit = 200
|
|
||||||
}
|
|
||||||
err := a.RefreshToken(account)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var resp Json
|
|
||||||
_, _ = aliClient.R().SetResult(&resp).
|
|
||||||
SetBody("{}").
|
|
||||||
SetHeader("authorization", "Bearer\t"+account.AccessToken).
|
|
||||||
Post("https://api.aliyundrive.com/v2/user/get")
|
|
||||||
log.Debugf("user info: %+v", resp)
|
|
||||||
account.DriveId = resp["default_drive_id"].(string)
|
|
||||||
cronId, err := conf.Cron.AddFunc("@every 2h", func() {
|
|
||||||
name := account.Name
|
|
||||||
newAccount, ok := model.GetAccount(name)
|
|
||||||
if !ok {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
err = a.RefreshToken(&newAccount)
|
|
||||||
_ = model.SaveAccount(&newAccount)
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
account.CronId = int(cronId)
|
|
||||||
err = model.SaveAccount(account)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ Driver = (*AliDrive)(nil)
|
|
||||||
|
@ -3,16 +3,28 @@ package drivers
|
|||||||
import (
|
import (
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"net/http"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type DriverConfig struct {
|
||||||
|
Name string
|
||||||
|
OnlyProxy bool
|
||||||
|
}
|
||||||
|
|
||||||
type Driver interface {
|
type Driver interface {
|
||||||
|
Config() DriverConfig
|
||||||
Items() []Item
|
Items() []Item
|
||||||
Path(path string, account *model.Account) (*model.File, []*model.File, error)
|
|
||||||
Link(path string, account *model.Account) (string, error)
|
|
||||||
Save(account *model.Account, old *model.Account) error
|
Save(account *model.Account, old *model.Account) error
|
||||||
Proxy(c *gin.Context)
|
File(path string, account *model.Account) (*model.File, error)
|
||||||
|
Files(path string, account *model.Account) ([]model.File, error)
|
||||||
|
Link(path string, account *model.Account) (string, error)
|
||||||
|
Path(path string, account *model.Account) (*model.File, []model.File, error)
|
||||||
|
Proxy(c *gin.Context, account *model.Account)
|
||||||
Preview(path string, account *model.Account) (interface{}, error)
|
Preview(path string, account *model.Account) (interface{}, error)
|
||||||
// TODO
|
// TODO
|
||||||
|
//Search(path string, keyword string, account *model.Account) ([]*model.File, error)
|
||||||
//MakeDir(path string, account *model.Account) error
|
//MakeDir(path string, account *model.Account) error
|
||||||
//Move(src string, des string, account *model.Account) error
|
//Move(src string, des string, account *model.Account) error
|
||||||
//Delete(path string) error
|
//Delete(path string) error
|
||||||
@ -35,8 +47,9 @@ type TokenResp struct {
|
|||||||
|
|
||||||
var driversMap = map[string]Driver{}
|
var driversMap = map[string]Driver{}
|
||||||
|
|
||||||
func RegisterDriver(name string, driver Driver) {
|
func RegisterDriver(driver Driver) {
|
||||||
driversMap[name] = driver
|
log.Infof("register driver: [%s]", driver.Config().Name)
|
||||||
|
driversMap[driver.Config().Name] = driver
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetDriver(name string) (driver Driver, ok bool) {
|
func GetDriver(name string) (driver Driver, ok bool) {
|
||||||
@ -47,9 +60,39 @@ func GetDriver(name string) (driver Driver, ok bool) {
|
|||||||
func GetDrivers() map[string][]Item {
|
func GetDrivers() map[string][]Item {
|
||||||
res := make(map[string][]Item, 0)
|
res := make(map[string][]Item, 0)
|
||||||
for k, v := range driversMap {
|
for k, v := range driversMap {
|
||||||
res[k] = v.Items()
|
if v.Config().OnlyProxy {
|
||||||
|
res[k] = v.Items()
|
||||||
|
} else {
|
||||||
|
res[k] = append([]Item{
|
||||||
|
{
|
||||||
|
Name: "proxy",
|
||||||
|
Label: "proxy",
|
||||||
|
Type: "bool",
|
||||||
|
Required: true,
|
||||||
|
Description: "allow proxy",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "webdav_proxy",
|
||||||
|
Label: "webdav proxy",
|
||||||
|
Type: "bool",
|
||||||
|
Required: true,
|
||||||
|
Description: "Transfer the WebDAV of this account through the server",
|
||||||
|
},
|
||||||
|
}, v.Items()...)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return res
|
return res
|
||||||
}
|
}
|
||||||
|
|
||||||
type Json map[string]interface{}
|
type Json map[string]interface{}
|
||||||
|
|
||||||
|
var NoRedirectClient *resty.Client
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
NoRedirectClient = resty.New().SetRedirectPolicy(
|
||||||
|
resty.RedirectPolicyFunc(func(req *http.Request, via []*http.Request) error {
|
||||||
|
return http.ErrUseLastResponse
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
NoRedirectClient.SetHeader("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36")
|
||||||
|
}
|
||||||
|
171
drivers/google_driver.go
Normal file
171
drivers/google_driver.go
Normal file
@ -0,0 +1,171 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type GoogleDrive struct{}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "GoogleDrive",
|
||||||
|
OnlyProxy: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "client_id",
|
||||||
|
Label: "client id",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "client_secret",
|
||||||
|
Label: "client secret",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "refresh_token",
|
||||||
|
Label: "refresh token",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder file_id",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Save(account *model.Account, old *model.Account) error {
|
||||||
|
account.Proxy = true
|
||||||
|
err := driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
account.Status = err.Error()
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if account.RootFolder == "" {
|
||||||
|
account.RootFolder = "root"
|
||||||
|
}
|
||||||
|
account.Status = "work"
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
var rawFiles []GoogleFile
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
rawFiles, _ = cache.([]GoogleFile)
|
||||||
|
} else {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(rawFiles) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if file.Type == conf.FOLDER {
|
||||||
|
return "", NotFile
|
||||||
|
}
|
||||||
|
link := fmt.Sprintf("https://www.googleapis.com/drive/v3/files/%s?includeItemsFromAllDrives=true&supportsAllDrives=true", file.Id)
|
||||||
|
var e GoogleError
|
||||||
|
_, _ = googleClient.R().SetError(&e).
|
||||||
|
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
||||||
|
Get(link)
|
||||||
|
if e.Error.Code != 0 {
|
||||||
|
if e.Error.Code == 401 {
|
||||||
|
err = driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return driver.Link(path, account)
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("%s: %v", e.Error.Message, e.Error.Errors)
|
||||||
|
}
|
||||||
|
return link + "&alt=media", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
log.Debugf("google path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
//file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
c.Request.Header.Add("Authorization", "Bearer "+account.AccessToken)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*GoogleDrive)(nil)
|
157
drivers/googledrive.go
Normal file
157
drivers/googledrive.go
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var googleClient = resty.New()
|
||||||
|
|
||||||
|
type GoogleTokenError struct {
|
||||||
|
Error string `json:"error"`
|
||||||
|
ErrorDescription string `json:"error_description"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) RefreshToken(account *model.Account) error {
|
||||||
|
url := "https://www.googleapis.com/oauth2/v4/token"
|
||||||
|
var resp TokenResp
|
||||||
|
var e GoogleTokenError
|
||||||
|
_, err := googleClient.R().SetResult(&resp).SetError(&e).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"client_id": account.ClientId,
|
||||||
|
"client_secret": account.ClientSecret,
|
||||||
|
"refresh_token": account.RefreshToken,
|
||||||
|
"grant_type": "refresh_token",
|
||||||
|
}).Post(url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if e.Error != "" {
|
||||||
|
return fmt.Errorf(e.Error)
|
||||||
|
}
|
||||||
|
account.AccessToken = resp.AccessToken
|
||||||
|
account.Status = "work"
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type GoogleFile struct {
|
||||||
|
Id string `json:"id"`
|
||||||
|
Name string `json:"name"`
|
||||||
|
MimeType string `json:"mimeType"`
|
||||||
|
ModifiedTime *time.Time `json:"modifiedTime"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) IsDir(mimeType string) bool {
|
||||||
|
return mimeType == "application/vnd.google-apps.folder" || mimeType == "application/vnd.google-apps.shortcut"
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) FormatFile(file *GoogleFile) *model.File {
|
||||||
|
f := &model.File{
|
||||||
|
Id: file.Id,
|
||||||
|
Name: file.Name,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: file.ModifiedTime,
|
||||||
|
Thumbnail: "",
|
||||||
|
Url: "",
|
||||||
|
}
|
||||||
|
if driver.IsDir(file.MimeType) {
|
||||||
|
f.Type = conf.FOLDER
|
||||||
|
} else {
|
||||||
|
size, _ := strconv.ParseInt(file.Size, 10, 64)
|
||||||
|
f.Size = size
|
||||||
|
f.Type = utils.GetFileType(filepath.Ext(file.Name))
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
type GoogleFiles struct {
|
||||||
|
NextPageToken string `json:"nextPageToken"`
|
||||||
|
Files []GoogleFile `json:"files"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type GoogleError struct {
|
||||||
|
Error struct {
|
||||||
|
Errors []struct {
|
||||||
|
Domain string `json:"domain"`
|
||||||
|
Reason string `json:"reason"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
LocationType string `json:"location_type"`
|
||||||
|
Location string `json:"location"`
|
||||||
|
}
|
||||||
|
Code int `json:"code"`
|
||||||
|
Message string `json:"message"`
|
||||||
|
} `json:"error"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver GoogleDrive) GetFiles(id string, account *model.Account) ([]GoogleFile, error) {
|
||||||
|
pageToken := "first"
|
||||||
|
res := make([]GoogleFile, 0)
|
||||||
|
for pageToken != "" {
|
||||||
|
if pageToken == "first" {
|
||||||
|
pageToken = ""
|
||||||
|
}
|
||||||
|
var resp GoogleFiles
|
||||||
|
var e GoogleError
|
||||||
|
_, err := googleClient.R().SetResult(&resp).SetError(&e).
|
||||||
|
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
||||||
|
SetQueryParams(map[string]string{
|
||||||
|
"orderBy": "folder,name,modifiedTime desc",
|
||||||
|
"fields": "files(id,name,mimeType,size,modifiedTime),nextPageToken",
|
||||||
|
"pageSize": "1000",
|
||||||
|
"q": fmt.Sprintf("'%s' in parents and trashed = false", id),
|
||||||
|
"includeItemsFromAllDrives": "true",
|
||||||
|
"supportsAllDrives": "true",
|
||||||
|
"pageToken": pageToken,
|
||||||
|
}).Get("https://www.googleapis.com/drive/v3/files")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if e.Error.Code != 0 {
|
||||||
|
if e.Error.Code == 401 {
|
||||||
|
err = driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return driver.GetFiles(id, account)
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("%s: %v", e.Error.Message, e.Error.Errors)
|
||||||
|
}
|
||||||
|
pageToken = resp.NextPageToken
|
||||||
|
res = append(res, resp.Files...)
|
||||||
|
}
|
||||||
|
return res, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//func (driver GoogleDrive) GetFile(path string, account *model.Account) (*GoogleFile, error) {
|
||||||
|
// dir, name := filepath.Split(path)
|
||||||
|
// dir = utils.ParsePath(dir)
|
||||||
|
// _, _, err := driver.Path(dir, account)
|
||||||
|
// if err != nil {
|
||||||
|
// return nil, err
|
||||||
|
// }
|
||||||
|
// parentFiles_, _ := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, dir))
|
||||||
|
// parentFiles, _ := parentFiles_.([]GoogleFile)
|
||||||
|
// for _, file := range parentFiles {
|
||||||
|
// if file.Name == name {
|
||||||
|
// if !driver.IsDir(file.MimeType) {
|
||||||
|
// return &file, err
|
||||||
|
// } else {
|
||||||
|
// return nil, drivers.NotFile
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// return nil, drivers.PathNotFound
|
||||||
|
//}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterDriver(&GoogleDrive{})
|
||||||
|
googleClient.SetRetryCount(3)
|
||||||
|
}
|
238
drivers/lanzou.go
Normal file
238
drivers/lanzou.go
Normal file
@ -0,0 +1,238 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var lanzouClient = resty.New()
|
||||||
|
|
||||||
|
type LanZouFile struct {
|
||||||
|
Name string `json:"name"`
|
||||||
|
NameAll string `json:"name_all"`
|
||||||
|
Id string `json:"id"`
|
||||||
|
FolId string `json:"fol_id"`
|
||||||
|
Size string `json:"size"`
|
||||||
|
Time string `json:"time"`
|
||||||
|
Folder bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver *Lanzou) FormatFile(file *LanZouFile) *model.File {
|
||||||
|
now := time.Now()
|
||||||
|
f := &model.File{
|
||||||
|
Id: file.Id,
|
||||||
|
Name: file.Name,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
SizeStr: file.Size,
|
||||||
|
TimeStr: file.Time,
|
||||||
|
UpdatedAt: &now,
|
||||||
|
}
|
||||||
|
if file.Folder {
|
||||||
|
f.Type = conf.FOLDER
|
||||||
|
f.Id = file.FolId
|
||||||
|
} else {
|
||||||
|
f.Name = file.NameAll
|
||||||
|
f.Type = utils.GetFileType(filepath.Ext(file.NameAll))
|
||||||
|
}
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
type LanZouFilesResp struct {
|
||||||
|
Zt int `json:"zt"`
|
||||||
|
Info interface{} `json:"info"`
|
||||||
|
Text []LanZouFile `json:"text"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver *Lanzou) GetFiles(folderId string, account *model.Account) ([]LanZouFile, error) {
|
||||||
|
if account.OnedriveType == "cookie" {
|
||||||
|
files := make([]LanZouFile, 0)
|
||||||
|
var resp LanZouFilesResp
|
||||||
|
// folders
|
||||||
|
res, err := lanzouClient.R().SetResult(&resp).SetHeader("Cookie", account.AccessToken).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"task": "47",
|
||||||
|
"folder_id": folderId,
|
||||||
|
}).Post("https://pc.woozooo.com/doupload.php")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
log.Debug(res.String())
|
||||||
|
if resp.Zt != 1 && resp.Zt != 2 {
|
||||||
|
return nil, fmt.Errorf("%v", resp.Info)
|
||||||
|
}
|
||||||
|
for _, file := range resp.Text {
|
||||||
|
file.Folder = true
|
||||||
|
files = append(files, file)
|
||||||
|
}
|
||||||
|
// files
|
||||||
|
pg := 1
|
||||||
|
for {
|
||||||
|
_, err = lanzouClient.R().SetResult(&resp).SetHeader("Cookie", account.AccessToken).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"task": "5",
|
||||||
|
"folder_id": folderId,
|
||||||
|
"pg": strconv.Itoa(pg),
|
||||||
|
}).Post("https://pc.woozooo.com/doupload.php")
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if resp.Zt != 1 {
|
||||||
|
return nil, fmt.Errorf("%v", resp.Info)
|
||||||
|
}
|
||||||
|
if len(resp.Text) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
files = append(files, resp.Text...)
|
||||||
|
pg++
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
} else {
|
||||||
|
return driver.GetFilesByUrl(account)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver *Lanzou) GetFilesByUrl(account *model.Account) ([]LanZouFile, error) {
|
||||||
|
files := make([]LanZouFile, 0)
|
||||||
|
shareUrl := account.SiteUrl
|
||||||
|
res, err := lanzouClient.R().Get(shareUrl)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
lxArr := regexp.MustCompile(`'lx':(.+?),`).FindStringSubmatch(res.String())
|
||||||
|
if len(lxArr) == 0 {
|
||||||
|
return nil, fmt.Errorf("get empty page")
|
||||||
|
}
|
||||||
|
lx := lxArr[1]
|
||||||
|
fid := regexp.MustCompile(`'fid':(.+?),`).FindStringSubmatch(res.String())[1]
|
||||||
|
uid := regexp.MustCompile(`'uid':'(.+?)',`).FindStringSubmatch(res.String())[1]
|
||||||
|
rep := regexp.MustCompile(`'rep':'(.+?)',`).FindStringSubmatch(res.String())[1]
|
||||||
|
up := regexp.MustCompile(`'up':(.+?),`).FindStringSubmatch(res.String())[1]
|
||||||
|
ls := regexp.MustCompile(`'ls':(.+?),`).FindStringSubmatch(res.String())[1]
|
||||||
|
tName := regexp.MustCompile(`'t':(.+?),`).FindStringSubmatch(res.String())[1]
|
||||||
|
kName := regexp.MustCompile(`'k':(.+?),`).FindStringSubmatch(res.String())[1]
|
||||||
|
t := regexp.MustCompile(`var ` + tName + ` = '(.+?)';`).FindStringSubmatch(res.String())[1]
|
||||||
|
k := regexp.MustCompile(`var ` + kName + ` = '(.+?)';`).FindStringSubmatch(res.String())[1]
|
||||||
|
pg := 1
|
||||||
|
for {
|
||||||
|
var resp LanZouFilesResp
|
||||||
|
res, err = lanzouClient.R().SetResult(&resp).SetFormData(map[string]string{
|
||||||
|
"lx": lx,
|
||||||
|
"fid": fid,
|
||||||
|
"uid": uid,
|
||||||
|
"pg": strconv.Itoa(pg),
|
||||||
|
"rep": rep,
|
||||||
|
"t": t,
|
||||||
|
"k": k,
|
||||||
|
"up": up,
|
||||||
|
"ls": ls,
|
||||||
|
"pwd": account.Password,
|
||||||
|
}).Post("https://wwa.lanzouo.com/filemoreajax.php")
|
||||||
|
if err != nil {
|
||||||
|
log.Debug(err)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
log.Debug(res.String())
|
||||||
|
if resp.Zt != 1 {
|
||||||
|
return nil, fmt.Errorf("%v", resp.Info)
|
||||||
|
}
|
||||||
|
if len(resp.Text) == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
pg++
|
||||||
|
files = append(files, resp.Text...)
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
//type LanzouDownInfo struct {
|
||||||
|
// FId string `json:"f_id"`
|
||||||
|
// IsNewd string `json:"is_newd"`
|
||||||
|
//}
|
||||||
|
|
||||||
|
// 获取下载页面的ID
|
||||||
|
func (driver *Lanzou) GetDownPageId(fileId string, account *model.Account) (string, error) {
|
||||||
|
var resp LanZouFilesResp
|
||||||
|
res, err := lanzouClient.R().SetResult(&resp).SetHeader("Cookie", account.AccessToken).
|
||||||
|
SetFormData(map[string]string{
|
||||||
|
"task": "22",
|
||||||
|
"file_id": fileId,
|
||||||
|
}).Post("https://pc.woozooo.com/doupload.php")
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
log.Debug(res.String())
|
||||||
|
if resp.Zt != 1 {
|
||||||
|
return "", fmt.Errorf("%v", resp.Info)
|
||||||
|
}
|
||||||
|
info, ok := resp.Info.(map[string]interface{})
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("%v", resp.Info)
|
||||||
|
}
|
||||||
|
fid, ok := info["f_id"].(string)
|
||||||
|
if !ok {
|
||||||
|
return "", fmt.Errorf("%v", info["f_id"])
|
||||||
|
}
|
||||||
|
return fid, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type LanzouLinkResp struct {
|
||||||
|
Dom string `json:"dom"`
|
||||||
|
Url string `json:"url"`
|
||||||
|
Zt int `json:"zt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver *Lanzou) GetLink(downId string) (string, error) {
|
||||||
|
res, err := lanzouClient.R().Get("https://wwa.lanzouo.com/" + downId)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
iframe := regexp.MustCompile(`<iframe class="ifr2" name=".{2,20}" src="(.+?)"`).FindStringSubmatch(res.String())
|
||||||
|
if len(iframe) == 0 {
|
||||||
|
return "", fmt.Errorf("get down empty page")
|
||||||
|
}
|
||||||
|
iframeUrl := "https://wwa.lanzouo.com" + iframe[1]
|
||||||
|
res, err = lanzouClient.R().Get(iframeUrl)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
ajaxdata := regexp.MustCompile(`var ajaxdata = '(.+?)'`).FindStringSubmatch(res.String())
|
||||||
|
if len(ajaxdata) == 0 {
|
||||||
|
return "", fmt.Errorf("get iframe empty page")
|
||||||
|
}
|
||||||
|
signs := ajaxdata[1]
|
||||||
|
sign := regexp.MustCompile(`var ispostdowns = '(.+?)';`).FindStringSubmatch(res.String())[1]
|
||||||
|
websignkey := regexp.MustCompile(`'websignkey':'(.+?)'`).FindStringSubmatch(res.String())[1]
|
||||||
|
var resp LanzouLinkResp
|
||||||
|
form := map[string]string{
|
||||||
|
"action": "downprocess",
|
||||||
|
"signs": signs,
|
||||||
|
"sign": sign,
|
||||||
|
"ves": "1",
|
||||||
|
"websign": "",
|
||||||
|
"websignkey": websignkey,
|
||||||
|
}
|
||||||
|
log.Debugf("form: %+v", form)
|
||||||
|
_, err = lanzouClient.R().SetResult(&resp).
|
||||||
|
SetHeader("origin", "https://wwa.lanzouo.com").
|
||||||
|
SetHeader("referer", iframeUrl).
|
||||||
|
SetFormData(form).Post("https://wwa.lanzouo.com/ajaxm.php")
|
||||||
|
if resp.Zt == 1 {
|
||||||
|
return resp.Dom + "/file/" + resp.Url, nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("can't get link")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
RegisterDriver(&Lanzou{})
|
||||||
|
lanzouClient.
|
||||||
|
SetRetryCount(3).
|
||||||
|
SetHeader("user-agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/87.0.4280.88 Safari/537.36")
|
||||||
|
}
|
163
drivers/lanzou_driver.go
Normal file
163
drivers/lanzou_driver.go
Normal file
@ -0,0 +1,163 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Lanzou struct{}
|
||||||
|
|
||||||
|
func (driver Lanzou) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "Lanzou",
|
||||||
|
OnlyProxy: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "onedrive_type",
|
||||||
|
Label: "lanzou type",
|
||||||
|
Type: SELECT,
|
||||||
|
Required: true,
|
||||||
|
Values: "cookie,url",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "access_token",
|
||||||
|
Label: "cookie",
|
||||||
|
Type: STRING,
|
||||||
|
Description: "about 15 days valid",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder file_id",
|
||||||
|
Type: STRING,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "site_url",
|
||||||
|
Label: "share url",
|
||||||
|
Type: STRING,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "password",
|
||||||
|
Label: "share password",
|
||||||
|
Type: STRING,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Save(account *model.Account, old *model.Account) error {
|
||||||
|
if account.OnedriveType == "cookie" {
|
||||||
|
if account.RootFolder == "" {
|
||||||
|
account.RootFolder = "-1"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
account.Status = "work"
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
var rawFiles []LanZouFile
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
rawFiles, _ = cache.([]LanZouFile)
|
||||||
|
} else {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rawFiles, err = driver.GetFiles(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if len(rawFiles) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), rawFiles, nil)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
log.Debugf("down file: %+v", file)
|
||||||
|
downId := file.Id
|
||||||
|
if account.OnedriveType == "cookie" {
|
||||||
|
downId, err = driver.GetDownPageId(file.Id, account)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
link, err := driver.GetLink(downId)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return link, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
log.Debugf("lanzou path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
c.Request.Header.Del("Origin")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Lanzou) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*Lanzou)(nil)
|
@ -1,111 +1,5 @@
|
|||||||
package drivers
|
package drivers
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"github.com/Xhofe/alist/conf"
|
|
||||||
"github.com/Xhofe/alist/model"
|
|
||||||
"github.com/Xhofe/alist/utils"
|
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
log "github.com/sirupsen/logrus"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Native struct {
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n Native) Preview(path string, account *model.Account) (interface{}, error) {
|
|
||||||
return nil,fmt.Errorf("no need")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n Native) Items() []Item {
|
|
||||||
return []Item{
|
|
||||||
{
|
|
||||||
Name: "root_folder",
|
|
||||||
Label: "root folder path",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n Native) Proxy(c *gin.Context) {
|
|
||||||
// unnecessary
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n Native) Save(account *model.Account, old *model.Account) error {
|
|
||||||
log.Debugf("save a account: [%s]", account.Name)
|
|
||||||
if !utils.Exists(account.RootFolder) {
|
|
||||||
return fmt.Errorf("[%s] not exist", account.RootFolder)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO sort files
|
|
||||||
func (n Native) Path(path string, account *model.Account) (*model.File, []*model.File, error) {
|
|
||||||
fullPath := filepath.Join(account.RootFolder, path)
|
|
||||||
log.Debugf("%s-%s-%s", account.RootFolder, path, fullPath)
|
|
||||||
if !utils.Exists(fullPath) {
|
|
||||||
return nil, nil, fmt.Errorf("path not found")
|
|
||||||
}
|
|
||||||
if utils.IsDir(fullPath) {
|
|
||||||
result := make([]*model.File, 0)
|
|
||||||
files, err := ioutil.ReadDir(fullPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
for _, f := range files {
|
|
||||||
if strings.HasPrefix(f.Name(), ".") {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
time := f.ModTime()
|
|
||||||
file := &model.File{
|
|
||||||
Name: f.Name(),
|
|
||||||
Size: f.Size(),
|
|
||||||
Type: 0,
|
|
||||||
UpdatedAt: &time,
|
|
||||||
Driver: "Native",
|
|
||||||
}
|
|
||||||
if f.IsDir() {
|
|
||||||
file.Type = conf.FOLDER
|
|
||||||
} else {
|
|
||||||
file.Type = utils.GetFileType(filepath.Ext(f.Name()))
|
|
||||||
}
|
|
||||||
result = append(result, file)
|
|
||||||
}
|
|
||||||
return nil, result, nil
|
|
||||||
}
|
|
||||||
f, err := os.Stat(fullPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
time := f.ModTime()
|
|
||||||
file := &model.File{
|
|
||||||
Name: f.Name(),
|
|
||||||
Size: f.Size(),
|
|
||||||
Type: utils.GetFileType(filepath.Ext(f.Name())),
|
|
||||||
UpdatedAt: &time,
|
|
||||||
Driver: "Native",
|
|
||||||
}
|
|
||||||
return file, nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (n Native) Link(path string, account *model.Account) (string, error) {
|
|
||||||
fullPath := filepath.Join(account.RootFolder, path)
|
|
||||||
s, err := os.Stat(fullPath)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if s.IsDir() {
|
|
||||||
return "", fmt.Errorf("can't down folder")
|
|
||||||
}
|
|
||||||
return fullPath, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Driver = (*Native)(nil)
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
RegisterDriver("Native", &Native{})
|
RegisterDriver(&Native{})
|
||||||
}
|
}
|
||||||
|
161
drivers/native_driver.go
Normal file
161
drivers/native_driver.go
Normal file
@ -0,0 +1,161 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Native struct{}
|
||||||
|
|
||||||
|
func (driver Native) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "Native",
|
||||||
|
OnlyProxy: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder path",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_by",
|
||||||
|
Label: "order_by",
|
||||||
|
Type: "select",
|
||||||
|
Values: "name,size,updated_at",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_direction",
|
||||||
|
Label: "order_direction",
|
||||||
|
Type: "select",
|
||||||
|
Values: "ASC,DESC",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Save(account *model.Account, old *model.Account) error {
|
||||||
|
log.Debugf("save a account: [%s]", account.Name)
|
||||||
|
if !utils.Exists(account.RootFolder) {
|
||||||
|
account.Status = fmt.Sprintf("[%s] not exist", account.RootFolder)
|
||||||
|
_ = model.SaveAccount(account)
|
||||||
|
return fmt.Errorf("[%s] not exist", account.RootFolder)
|
||||||
|
}
|
||||||
|
account.Status = "work"
|
||||||
|
account.Proxy = true
|
||||||
|
err := model.SaveAccount(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
fullPath := filepath.Join(account.RootFolder, path)
|
||||||
|
if !utils.Exists(fullPath) {
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
f, err := os.Stat(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
time := f.ModTime()
|
||||||
|
file := &model.File{
|
||||||
|
Name: f.Name(),
|
||||||
|
Size: f.Size(),
|
||||||
|
UpdatedAt: &time,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
}
|
||||||
|
if f.IsDir() {
|
||||||
|
file.Type = conf.FOLDER
|
||||||
|
} else {
|
||||||
|
file.Type = utils.GetFileType(filepath.Ext(f.Name()))
|
||||||
|
}
|
||||||
|
return file, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
fullPath := filepath.Join(account.RootFolder, path)
|
||||||
|
if !utils.Exists(fullPath) {
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
rawFiles, err := ioutil.ReadDir(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, f := range rawFiles {
|
||||||
|
if strings.HasPrefix(f.Name(), ".") {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
time := f.ModTime()
|
||||||
|
file := model.File{
|
||||||
|
Name: f.Name(),
|
||||||
|
Size: f.Size(),
|
||||||
|
Type: 0,
|
||||||
|
UpdatedAt: &time,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
}
|
||||||
|
if f.IsDir() {
|
||||||
|
file.Type = conf.FOLDER
|
||||||
|
} else {
|
||||||
|
file.Type = utils.GetFileType(filepath.Ext(f.Name()))
|
||||||
|
}
|
||||||
|
files = append(files, file)
|
||||||
|
}
|
||||||
|
model.SortFiles(files, account)
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Link(path string, account *model.Account) (string, error) {
|
||||||
|
fullPath := filepath.Join(account.RootFolder, path)
|
||||||
|
s, err := os.Stat(fullPath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if s.IsDir() {
|
||||||
|
return "", fmt.Errorf("can't down folder")
|
||||||
|
}
|
||||||
|
return fullPath, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
log.Debugf("native path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
//file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
model.SortFiles(files, account)
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
// unnecessary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Native) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*Native)(nil)
|
210
drivers/one_driver.go
Normal file
210
drivers/one_driver.go
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/robfig/cron/v3"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"path/filepath"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Onedrive struct{}
|
||||||
|
|
||||||
|
func (driver Onedrive) Config() DriverConfig {
|
||||||
|
return DriverConfig{
|
||||||
|
Name: "Onedrive",
|
||||||
|
OnlyProxy: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Items() []Item {
|
||||||
|
return []Item{
|
||||||
|
{
|
||||||
|
Name: "zone",
|
||||||
|
Label: "zone",
|
||||||
|
Type: "select",
|
||||||
|
Required: true,
|
||||||
|
Values: "global,cn,us,de",
|
||||||
|
Description: "",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "onedrive_type",
|
||||||
|
Label: "onedrive type",
|
||||||
|
Type: "select",
|
||||||
|
Required: true,
|
||||||
|
Values: "onedrive,sharepoint",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "client_id",
|
||||||
|
Label: "client id",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "client_secret",
|
||||||
|
Label: "client secret",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "redirect_uri",
|
||||||
|
Label: "redirect uri",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "refresh_token",
|
||||||
|
Label: "refresh token",
|
||||||
|
Type: "string",
|
||||||
|
Required: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "site_id",
|
||||||
|
Label: "site id",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "root_folder",
|
||||||
|
Label: "root folder path",
|
||||||
|
Type: "string",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_by",
|
||||||
|
Label: "order_by",
|
||||||
|
Type: "select",
|
||||||
|
Values: "name,size,lastModifiedDateTime",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Name: "order_direction",
|
||||||
|
Label: "order_direction",
|
||||||
|
Type: "select",
|
||||||
|
Values: "asc,desc",
|
||||||
|
Required: false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Save(account *model.Account, old *model.Account) error {
|
||||||
|
_, ok := onedriveHostMap[account.Zone]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("no [%s] zone", account.Zone)
|
||||||
|
}
|
||||||
|
if old != nil {
|
||||||
|
conf.Cron.Remove(cron.EntryID(old.CronId))
|
||||||
|
}
|
||||||
|
account.RootFolder = utils.ParsePath(account.RootFolder)
|
||||||
|
err := driver.RefreshToken(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
cronId, err := conf.Cron.AddFunc("@every 1h", func() {
|
||||||
|
name := account.Name
|
||||||
|
log.Debugf("onedrive account name: %s", name)
|
||||||
|
newAccount, ok := model.GetAccount(name)
|
||||||
|
log.Debugf("onedrive account: %+v", newAccount)
|
||||||
|
if !ok {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = driver.RefreshToken(&newAccount)
|
||||||
|
_ = model.SaveAccount(&newAccount)
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
account.CronId = int(cronId)
|
||||||
|
err = model.SaveAccount(account)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) File(path string, account *model.Account) (*model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
if path == "/" {
|
||||||
|
return &model.File{
|
||||||
|
Id: account.RootFolder,
|
||||||
|
Name: account.Name,
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: driver.Config().Name,
|
||||||
|
UpdatedAt: account.UpdatedAt,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
dir, name := filepath.Split(path)
|
||||||
|
files, err := driver.Files(dir, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, file := range files {
|
||||||
|
if file.Name == name {
|
||||||
|
return &file, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, PathNotFound
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Files(path string, account *model.Account) ([]model.File, error) {
|
||||||
|
path = utils.ParsePath(path)
|
||||||
|
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
||||||
|
if err == nil {
|
||||||
|
files, _ := cache.([]model.File)
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
rawFiles, err := driver.GetFiles(account, path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
files := make([]model.File, 0)
|
||||||
|
for _, file := range rawFiles {
|
||||||
|
files = append(files, *driver.FormatFile(&file))
|
||||||
|
}
|
||||||
|
if len(files) > 0 {
|
||||||
|
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), files, nil)
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Link(path string, account *model.Account) (string, error) {
|
||||||
|
file, err := driver.GetFile(account, path)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
if file.File.MimeType == "" {
|
||||||
|
return "", fmt.Errorf("can't down folder")
|
||||||
|
}
|
||||||
|
return file.Url, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Path(path string, account *model.Account) (*model.File, []model.File, error) {
|
||||||
|
log.Debugf("onedrive path: %s", path)
|
||||||
|
file, err := driver.File(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
if file.Type != conf.FOLDER {
|
||||||
|
//file.Url, _ = driver.Link(path, account)
|
||||||
|
return file, nil, nil
|
||||||
|
}
|
||||||
|
files, err := driver.Files(path, account)
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
return nil, files, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Proxy(c *gin.Context, account *model.Account) {
|
||||||
|
c.Request.Header.Del("Origin")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (driver Onedrive) Preview(path string, account *model.Account) (interface{}, error) {
|
||||||
|
return nil, NotSupport
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ Driver = (*Onedrive)(nil)
|
@ -5,23 +5,20 @@ import (
|
|||||||
"github.com/Xhofe/alist/conf"
|
"github.com/Xhofe/alist/conf"
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/Xhofe/alist/utils"
|
"github.com/Xhofe/alist/utils"
|
||||||
"github.com/gin-gonic/gin"
|
|
||||||
"github.com/go-resty/resty/v2"
|
"github.com/go-resty/resty/v2"
|
||||||
"github.com/robfig/cron/v3"
|
log "github.com/sirupsen/logrus"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"time"
|
"time"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Onedrive struct{}
|
|
||||||
|
|
||||||
var oneClient = resty.New()
|
var oneClient = resty.New()
|
||||||
|
|
||||||
type OnedriveHost struct {
|
type Host struct {
|
||||||
Oauth string
|
Oauth string
|
||||||
Api string
|
Api string
|
||||||
}
|
}
|
||||||
|
|
||||||
var onedriveHostMap = map[string]OnedriveHost{
|
var onedriveHostMap = map[string]Host{
|
||||||
"global": {
|
"global": {
|
||||||
Oauth: "https://login.microsoftonline.com",
|
Oauth: "https://login.microsoftonline.com",
|
||||||
Api: "https://graph.microsoft.com",
|
Api: "https://graph.microsoft.com",
|
||||||
@ -40,13 +37,9 @@ var onedriveHostMap = map[string]OnedriveHost{
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
func init() {
|
func (driver Onedrive) GetMetaUrl(account *model.Account, auth bool, path string) string {
|
||||||
RegisterDriver("Onedrive", &Onedrive{})
|
|
||||||
oneClient.SetRetryCount(3)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o Onedrive) GetMetaUrl(account *model.Account, auth bool, path string) string {
|
|
||||||
path = filepath.Join(account.RootFolder, path)
|
path = filepath.Join(account.RootFolder, path)
|
||||||
|
log.Debugf(path)
|
||||||
host, _ := onedriveHostMap[account.Zone]
|
host, _ := onedriveHostMap[account.Zone]
|
||||||
if auth {
|
if auth {
|
||||||
return host.Oauth
|
return host.Oauth
|
||||||
@ -54,7 +47,7 @@ func (o Onedrive) GetMetaUrl(account *model.Account, auth bool, path string) str
|
|||||||
switch account.OnedriveType {
|
switch account.OnedriveType {
|
||||||
case "onedrive":
|
case "onedrive":
|
||||||
{
|
{
|
||||||
if path == "/" {
|
if path == "/" || path == "\\" {
|
||||||
return fmt.Sprintf("%s/v1.0/me/drive/root", host.Api)
|
return fmt.Sprintf("%s/v1.0/me/drive/root", host.Api)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Sprintf("%s/v1.0/me/drive/root:%s:", host.Api, path)
|
return fmt.Sprintf("%s/v1.0/me/drive/root:%s:", host.Api, path)
|
||||||
@ -62,7 +55,7 @@ func (o Onedrive) GetMetaUrl(account *model.Account, auth bool, path string) str
|
|||||||
}
|
}
|
||||||
case "sharepoint":
|
case "sharepoint":
|
||||||
{
|
{
|
||||||
if path == "/" {
|
if path == "/" || path == "\\" {
|
||||||
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root", host.Api, account.SiteId)
|
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root", host.Api, account.SiteId)
|
||||||
} else {
|
} else {
|
||||||
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root:%s:", host.Api, account.SiteId, path)
|
return fmt.Sprintf("%s/v1.0/sites/%s/drive/root:%s:", host.Api, account.SiteId, path)
|
||||||
@ -73,69 +66,13 @@ func (o Onedrive) GetMetaUrl(account *model.Account, auth bool, path string) str
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) Items() []Item {
|
|
||||||
return []Item{
|
|
||||||
{
|
|
||||||
Name: "zone",
|
|
||||||
Label: "zone",
|
|
||||||
Type: "select",
|
|
||||||
Required: true,
|
|
||||||
Values: "global,cn,us,de",
|
|
||||||
Description: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "onedrive_type",
|
|
||||||
Label: "onedrive type",
|
|
||||||
Type: "select",
|
|
||||||
Required: true,
|
|
||||||
Values: "onedrive,sharepoint",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "client_id",
|
|
||||||
Label: "client id",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "client_secret",
|
|
||||||
Label: "client secret",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "redirect_uri",
|
|
||||||
Label: "redirect uri",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "refresh_token",
|
|
||||||
Label: "refresh token",
|
|
||||||
Type: "string",
|
|
||||||
Required: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "site_id",
|
|
||||||
Label: "site id",
|
|
||||||
Type: "string",
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
Name: "root_folder",
|
|
||||||
Label: "root folder path",
|
|
||||||
Type: "string",
|
|
||||||
Required: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type OneTokenErr struct {
|
type OneTokenErr struct {
|
||||||
Error string `json:"error"`
|
Error string `json:"error"`
|
||||||
ErrorDescription string `json:"error_description"`
|
ErrorDescription string `json:"error_description"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) RefreshToken(account *model.Account) error {
|
func (driver Onedrive) RefreshToken(account *model.Account) error {
|
||||||
url := o.GetMetaUrl(account, true, "") + "/common/oauth2/v2.0/token"
|
url := driver.GetMetaUrl(account, true, "") + "/common/oauth2/v2.0/token"
|
||||||
var resp TokenResp
|
var resp TokenResp
|
||||||
var e OneTokenErr
|
var e OneTokenErr
|
||||||
_, err := oneClient.R().SetResult(&resp).SetError(&e).SetFormData(map[string]string{
|
_, err := oneClient.R().SetResult(&resp).SetError(&e).SetFormData(map[string]string{
|
||||||
@ -152,6 +89,8 @@ func (o Onedrive) RefreshToken(account *model.Account) error {
|
|||||||
if e.Error != "" {
|
if e.Error != "" {
|
||||||
account.Status = e.ErrorDescription
|
account.Status = e.ErrorDescription
|
||||||
return fmt.Errorf("%s", e.ErrorDescription)
|
return fmt.Errorf("%s", e.ErrorDescription)
|
||||||
|
} else {
|
||||||
|
account.Status = "work"
|
||||||
}
|
}
|
||||||
account.RefreshToken, account.AccessToken = resp.RefreshToken, resp.AccessToken
|
account.RefreshToken, account.AccessToken = resp.RefreshToken, resp.AccessToken
|
||||||
return nil
|
return nil
|
||||||
@ -165,10 +104,16 @@ type OneFile struct {
|
|||||||
File struct {
|
File struct {
|
||||||
MimeType string `json:"mimeType"`
|
MimeType string `json:"mimeType"`
|
||||||
} `json:"file"`
|
} `json:"file"`
|
||||||
|
Thumbnails []struct{
|
||||||
|
Medium struct{
|
||||||
|
Url string `json:"url"`
|
||||||
|
} `json:"medium"`
|
||||||
|
} `json:"thumbnails"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OneFiles struct {
|
type OneFiles struct {
|
||||||
Value []OneFile `json:"value"`
|
Value []OneFile `json:"value"`
|
||||||
|
NextLink string `json:"@odata.nextLink"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type OneRespErr struct {
|
type OneRespErr struct {
|
||||||
@ -178,14 +123,17 @@ type OneRespErr struct {
|
|||||||
} `json:"error"`
|
} `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) FormatFile(file *OneFile) *model.File {
|
func (driver Onedrive) FormatFile(file *OneFile) *model.File {
|
||||||
f := &model.File{
|
f := &model.File{
|
||||||
Name: file.Name,
|
Name: file.Name,
|
||||||
Size: file.Size,
|
Size: file.Size,
|
||||||
UpdatedAt: file.LastModifiedDateTime,
|
UpdatedAt: file.LastModifiedDateTime,
|
||||||
Driver: "OneDrive",
|
Driver: driver.Config().Name,
|
||||||
Url: file.Url,
|
Url: file.Url,
|
||||||
}
|
}
|
||||||
|
if len(file.Thumbnails) > 0 {
|
||||||
|
f.Thumbnail = file.Thumbnails[0].Medium.Url
|
||||||
|
}
|
||||||
if file.File.MimeType == "" {
|
if file.File.MimeType == "" {
|
||||||
f.Type = conf.FOLDER
|
f.Type = conf.FOLDER
|
||||||
} else {
|
} else {
|
||||||
@ -194,27 +142,39 @@ func (o Onedrive) FormatFile(file *OneFile) *model.File {
|
|||||||
return f
|
return f
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) GetFiles(account *model.Account, path string) ([]OneFile, error) {
|
func (driver Onedrive) GetFiles(account *model.Account, path string) ([]OneFile, error) {
|
||||||
var files OneFiles
|
var res []OneFile
|
||||||
var e OneRespErr
|
nextLink := driver.GetMetaUrl(account, false, path) + "/children?$expand=thumbnails"
|
||||||
_, err := oneClient.R().SetResult(&files).SetError(&e).
|
if account.OrderBy != "" {
|
||||||
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
nextLink += fmt.Sprintf("&orderby=%s", account.OrderBy)
|
||||||
Get(o.GetMetaUrl(account, false, path) + "/children")
|
if account.OrderDirection != "" {
|
||||||
if err != nil {
|
nextLink += fmt.Sprintf("%%20%s", account.OrderDirection)
|
||||||
return nil, err
|
}
|
||||||
}
|
}
|
||||||
if e.Error.Code != "" {
|
for nextLink != "" {
|
||||||
return nil, fmt.Errorf("%s", e.Error.Message)
|
var files OneFiles
|
||||||
|
var e OneRespErr
|
||||||
|
_, err := oneClient.R().SetResult(&files).SetError(&e).
|
||||||
|
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
||||||
|
Get(nextLink)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if e.Error.Code != "" {
|
||||||
|
return nil, fmt.Errorf("%s", e.Error.Message)
|
||||||
|
}
|
||||||
|
res = append(res, files.Value...)
|
||||||
|
nextLink = files.NextLink
|
||||||
}
|
}
|
||||||
return files.Value, nil
|
return res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) GetFile(account *model.Account, path string) (*OneFile, error) {
|
func (driver Onedrive) GetFile(account *model.Account, path string) (*OneFile, error) {
|
||||||
var file OneFile
|
var file OneFile
|
||||||
var e OneRespErr
|
var e OneRespErr
|
||||||
_, err := oneClient.R().SetResult(&file).SetError(&e).
|
_, err := oneClient.R().SetResult(&file).SetError(&e).
|
||||||
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
SetHeader("Authorization", "Bearer "+account.AccessToken).
|
||||||
Get(o.GetMetaUrl(account, false, path))
|
Get(driver.GetMetaUrl(account, false, path))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -224,83 +184,7 @@ func (o Onedrive) GetFile(account *model.Account, path string) (*OneFile, error)
|
|||||||
return &file, nil
|
return &file, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) Path(path string, account *model.Account) (*model.File, []*model.File, error) {
|
func init() {
|
||||||
path = utils.ParsePath(path)
|
RegisterDriver(&Onedrive{})
|
||||||
cache, err := conf.Cache.Get(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path))
|
oneClient.SetRetryCount(3)
|
||||||
if err == nil {
|
|
||||||
files, _ := cache.([]*model.File)
|
|
||||||
return nil, files, nil
|
|
||||||
}
|
|
||||||
file, err := o.GetFile(account, path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
if file.File.MimeType != "" {
|
|
||||||
return o.FormatFile(file), nil, nil
|
|
||||||
} else {
|
|
||||||
files, err := o.GetFiles(account, path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, nil, err
|
|
||||||
}
|
|
||||||
res := make([]*model.File, 0)
|
|
||||||
for _, file := range files {
|
|
||||||
res = append(res, o.FormatFile(&file))
|
|
||||||
}
|
|
||||||
_ = conf.Cache.Set(conf.Ctx, fmt.Sprintf("%s%s", account.Name, path), res, nil)
|
|
||||||
return nil, res, nil
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (o Onedrive) Link(path string, account *model.Account) (string, error) {
|
|
||||||
file, err := o.GetFile(account, path)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
if file.File.MimeType == "" {
|
|
||||||
return "", fmt.Errorf("can't down folder")
|
|
||||||
}
|
|
||||||
return file.Url, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o Onedrive) Save(account *model.Account, old *model.Account) error {
|
|
||||||
_, ok := onedriveHostMap[account.Zone]
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf("no [%s] zone", account.Zone)
|
|
||||||
}
|
|
||||||
if old != nil {
|
|
||||||
conf.Cron.Remove(cron.EntryID(old.CronId))
|
|
||||||
}
|
|
||||||
account.RootFolder = utils.ParsePath(account.RootFolder)
|
|
||||||
err := o.RefreshToken(account)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
cronId, err := conf.Cron.AddFunc("@every 1h", func() {
|
|
||||||
name := account.Name
|
|
||||||
newAccount, ok := model.GetAccount(name)
|
|
||||||
if !ok {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
err = o.RefreshToken(&newAccount)
|
|
||||||
_ = model.SaveAccount(&newAccount)
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
account.CronId = int(cronId)
|
|
||||||
err = model.SaveAccount(account)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o Onedrive) Proxy(c *gin.Context) {
|
|
||||||
c.Request.Header.Del("Origin")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o Onedrive) Preview(path string, account *model.Account) (interface{}, error) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ Driver = (*Onedrive)(nil)
|
|
||||||
|
16
drivers/types.go
Normal file
16
drivers/types.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
package drivers
|
||||||
|
|
||||||
|
import "fmt"
|
||||||
|
|
||||||
|
var (
|
||||||
|
PathNotFound = fmt.Errorf("path not found")
|
||||||
|
NotFile = fmt.Errorf("not file")
|
||||||
|
NotImplement = fmt.Errorf("not implement")
|
||||||
|
NotSupport = fmt.Errorf("not support")
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
STRING = "string"
|
||||||
|
SELECT = "select"
|
||||||
|
BOOL = "bool"
|
||||||
|
)
|
@ -32,6 +32,7 @@ type Account struct {
|
|||||||
SiteUrl string `json:"site_url"`
|
SiteUrl string `json:"site_url"`
|
||||||
SiteId string `json:"site_id"`
|
SiteId string `json:"site_id"`
|
||||||
OnedriveType string `json:"onedrive_type"`
|
OnedriveType string `json:"onedrive_type"`
|
||||||
|
WebdavProxy bool `json:"webdav_proxy"`
|
||||||
}
|
}
|
||||||
|
|
||||||
var accountsMap = map[string]Account{}
|
var accountsMap = map[string]Account{}
|
||||||
@ -68,6 +69,10 @@ func DeleteAccount(id uint) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func DeleteAccountFromMap(name string) {
|
||||||
|
delete(accountsMap, name)
|
||||||
|
}
|
||||||
|
|
||||||
func AccountsCount() int {
|
func AccountsCount() int {
|
||||||
return len(accountsMap)
|
return len(accountsMap)
|
||||||
}
|
}
|
||||||
@ -86,14 +91,23 @@ func GetAccount(name string) (Account, bool) {
|
|||||||
return account, ok
|
return account, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetAccountFiles() ([]*File, error) {
|
func GetAccountById(id uint) (*Account, error) {
|
||||||
files := make([]*File, 0)
|
var account Account
|
||||||
|
account.ID = id
|
||||||
|
if err := conf.DB.First(&account).Error; err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &account, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetAccountFiles() ([]File, error) {
|
||||||
|
files := make([]File, 0)
|
||||||
var accounts []Account
|
var accounts []Account
|
||||||
if err := conf.DB.Order("`index`").Find(&accounts).Error; err != nil {
|
if err := conf.DB.Order("`index`").Find(&accounts).Error; err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
for _, v := range accounts {
|
for _, v := range accounts {
|
||||||
files = append(files, &File{
|
files = append(files, File{
|
||||||
Name: v.Name,
|
Name: v.Name,
|
||||||
Size: 0,
|
Size: 0,
|
||||||
Type: conf.FOLDER,
|
Type: conf.FOLDER,
|
||||||
|
@ -1,8 +1,14 @@
|
|||||||
package model
|
package model
|
||||||
|
|
||||||
import "time"
|
import (
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
type File struct {
|
type File struct {
|
||||||
|
Id string `json:"-"`
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Size int64 `json:"size"`
|
Size int64 `json:"size"`
|
||||||
Type int `json:"type"`
|
Type int `json:"type"`
|
||||||
@ -10,4 +16,53 @@ type File struct {
|
|||||||
UpdatedAt *time.Time `json:"updated_at"`
|
UpdatedAt *time.Time `json:"updated_at"`
|
||||||
Thumbnail string `json:"thumbnail"`
|
Thumbnail string `json:"thumbnail"`
|
||||||
Url string `json:"url"`
|
Url string `json:"url"`
|
||||||
|
SizeStr string `json:"size_str"`
|
||||||
|
TimeStr string `json:"time_str"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func SortFiles(files []File, account *Account) {
|
||||||
|
if account.OrderBy == "" {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
sort.Slice(files, func(i, j int) bool {
|
||||||
|
switch account.OrderBy {
|
||||||
|
case "name":
|
||||||
|
{
|
||||||
|
c := strings.Compare(files[i].Name, files[j].Name)
|
||||||
|
if account.OrderDirection == "DESC" {
|
||||||
|
return c >= 0
|
||||||
|
}
|
||||||
|
return c <= 0
|
||||||
|
}
|
||||||
|
case "size":
|
||||||
|
{
|
||||||
|
if account.OrderDirection == "DESC" {
|
||||||
|
return files[i].Size >= files[j].Size
|
||||||
|
}
|
||||||
|
return files[i].Size <= files[j].Size
|
||||||
|
}
|
||||||
|
case "updated_at":
|
||||||
|
if account.OrderDirection == "DESC" {
|
||||||
|
return files[i].UpdatedAt.After(*files[j].UpdatedAt)
|
||||||
|
}
|
||||||
|
return files[i].UpdatedAt.Before(*files[j].UpdatedAt)
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f File) GetSize() uint64 {
|
||||||
|
return uint64(f.Size)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f File) GetName() string {
|
||||||
|
return f.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f File) ModTime() time.Time {
|
||||||
|
return *f.UpdatedAt
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f File) IsDir() bool {
|
||||||
|
return f.Type == conf.FOLDER
|
||||||
}
|
}
|
@ -61,11 +61,20 @@ func LoadSettings() {
|
|||||||
if err == nil {
|
if err == nil {
|
||||||
conf.CheckParent = checkParent.Value == "true"
|
conf.CheckParent = checkParent.Value == "true"
|
||||||
}
|
}
|
||||||
|
checkDown,err := GetSettingByKey("check down link")
|
||||||
|
if err == nil {
|
||||||
|
conf.CheckDown = checkDown.Value == "true"
|
||||||
|
}
|
||||||
favicon, err := GetSettingByKey("favicon")
|
favicon, err := GetSettingByKey("favicon")
|
||||||
if err == nil {
|
if err == nil {
|
||||||
//conf.Favicon = favicon.Value
|
//conf.Favicon = favicon.Value
|
||||||
conf.IndexHtml = strings.Replace(conf.RawIndexHtml, "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png", favicon.Value, 1)
|
conf.IndexHtml = strings.Replace(conf.RawIndexHtml, "https://store.heytapimage.com/cdo-portal/feedback/202110/30/d43c41c5d257c9bc36366e310374fb19.png", favicon.Value, 1)
|
||||||
}
|
}
|
||||||
|
title, err := GetSettingByKey("title")
|
||||||
|
if err == nil {
|
||||||
|
//conf.CustomizeStyle = customizeStyle.Value
|
||||||
|
conf.IndexHtml = strings.Replace(conf.IndexHtml, "Loading...", title.Value, 1)
|
||||||
|
}
|
||||||
customizeStyle, err := GetSettingByKey("customize style")
|
customizeStyle, err := GetSettingByKey("customize style")
|
||||||
if err == nil {
|
if err == nil {
|
||||||
//conf.CustomizeStyle = customizeStyle.Value
|
//conf.CustomizeStyle = customizeStyle.Value
|
||||||
@ -76,4 +85,13 @@ func LoadSettings() {
|
|||||||
//conf.CustomizeStyle = customizeScript.Value
|
//conf.CustomizeStyle = customizeScript.Value
|
||||||
conf.IndexHtml = strings.Replace(conf.IndexHtml, "// customize-js", customizeScript.Value, 1)
|
conf.IndexHtml = strings.Replace(conf.IndexHtml, "// customize-js", customizeScript.Value, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
davUsername, err := GetSettingByKey("WebDAV username")
|
||||||
|
if err == nil {
|
||||||
|
conf.DavUsername = davUsername.Value
|
||||||
|
}
|
||||||
|
davPassword, err := GetSettingByKey("WebDAV password")
|
||||||
|
if err == nil {
|
||||||
|
conf.DavPassword = davPassword.Value
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -56,18 +56,21 @@ func SaveAccount(c *gin.Context) {
|
|||||||
ErrorResp(c, fmt.Errorf("no [%s] driver", req.Type), 400)
|
ErrorResp(c, fmt.Errorf("no [%s] driver", req.Type), 400)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
old, ok := model.GetAccount(req.Name)
|
old, err := model.GetAccountById(req.ID)
|
||||||
|
if err != nil {
|
||||||
|
ErrorResp(c, err, 400)
|
||||||
|
return
|
||||||
|
}
|
||||||
now := time.Now()
|
now := time.Now()
|
||||||
req.UpdatedAt = &now
|
req.UpdatedAt = &now
|
||||||
|
if old.Name != req.Name {
|
||||||
|
model.DeleteAccountFromMap(old.Name)
|
||||||
|
}
|
||||||
if err := model.SaveAccount(&req); err != nil {
|
if err := model.SaveAccount(&req); err != nil {
|
||||||
ErrorResp(c, err, 500)
|
ErrorResp(c, err, 500)
|
||||||
} else {
|
} else {
|
||||||
log.Debugf("save account: %+v", req)
|
log.Debugf("save account: %+v", req)
|
||||||
if ok {
|
err = driver.Save(&req, old)
|
||||||
err = driver.Save(&req, &old)
|
|
||||||
} else {
|
|
||||||
err = driver.Save(&req, nil)
|
|
||||||
}
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ErrorResp(c, err, 500)
|
ErrorResp(c, err, 500)
|
||||||
return
|
return
|
||||||
|
@ -2,11 +2,12 @@ package server
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/Xhofe/alist/utils"
|
"github.com/Xhofe/alist/utils"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
"gorm.io/gorm"
|
"gorm.io/gorm"
|
||||||
"path/filepath"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func Auth(c *gin.Context) {
|
func Auth(c *gin.Context) {
|
||||||
@ -50,6 +51,29 @@ func CheckParent(path string, password string) bool {
|
|||||||
if path == "/" {
|
if path == "/" {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
||||||
return CheckParent(filepath.Dir(path), password)
|
return CheckParent(utils.Dir(path), password)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func CheckDownLink(path string, passwordMd5 string) bool {
|
||||||
|
if !conf.CheckDown {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
meta, err := model.GetMetaByPath(path)
|
||||||
|
log.Debugf("check down path: %s", path)
|
||||||
|
if err == nil {
|
||||||
|
log.Debugf("check down link: %s,%s", meta.Password, passwordMd5)
|
||||||
|
if meta.Password != "" && utils.Get16MD5Encode(meta.Password) != passwordMd5 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
} else {
|
||||||
|
if !conf.CheckParent {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if path == "/" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return CheckDownLink(utils.Dir(path), passwordMd5)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
"github.com/Xhofe/alist/drivers"
|
"github.com/Xhofe/alist/drivers"
|
||||||
"github.com/Xhofe/alist/model"
|
"github.com/Xhofe/alist/model"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -39,6 +40,7 @@ func ParsePath(rawPath string) (*model.Account, string, drivers.Driver, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func ErrorResp(c *gin.Context, err error, code int) {
|
func ErrorResp(c *gin.Context, err error, code int) {
|
||||||
|
log.Error(err.Error())
|
||||||
c.JSON(200, Resp{
|
c.JSON(200, Resp{
|
||||||
Code: code,
|
Code: code,
|
||||||
Message: err.Error(),
|
Message: err.Error(),
|
||||||
|
@ -5,6 +5,7 @@ import (
|
|||||||
"github.com/Xhofe/alist/conf"
|
"github.com/Xhofe/alist/conf"
|
||||||
"github.com/Xhofe/alist/utils"
|
"github.com/Xhofe/alist/utils"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
|
"github.com/go-resty/resty/v2"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"net/http/httputil"
|
"net/http/httputil"
|
||||||
"net/url"
|
"net/url"
|
||||||
@ -13,16 +14,21 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func Down(c *gin.Context) {
|
func Down(c *gin.Context) {
|
||||||
rawPath, err := url.PathUnescape(c.Param("path"))
|
rawPath := c.Param("path")
|
||||||
|
rawPath = utils.ParsePath(rawPath)
|
||||||
|
log.Debugf("down: %s", rawPath)
|
||||||
|
pw := c.Query("pw")
|
||||||
|
if !CheckDownLink(utils.Dir(rawPath), pw) {
|
||||||
|
ErrorResp(c, fmt.Errorf("wrong password"), 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
account, path, driver, err := ParsePath(rawPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ErrorResp(c, err, 500)
|
ErrorResp(c, err, 500)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
rawPath = utils.ParsePath(rawPath)
|
if account.Type == "GoogleDrive" {
|
||||||
log.Debugf("down: %s", rawPath)
|
Proxy(c)
|
||||||
account, path, driver, err := ParsePath(rawPath)
|
|
||||||
if err != nil {
|
|
||||||
ErrorResp(c, err, 500)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
link, err := driver.Link(path, account)
|
link, err := driver.Link(path, account)
|
||||||
@ -40,13 +46,14 @@ func Down(c *gin.Context) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func Proxy(c *gin.Context) {
|
func Proxy(c *gin.Context) {
|
||||||
rawPath, err := url.PathUnescape(c.Param("path"))
|
rawPath := c.Param("path")
|
||||||
if err != nil {
|
|
||||||
ErrorResp(c, err, 500)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rawPath = utils.ParsePath(rawPath)
|
rawPath = utils.ParsePath(rawPath)
|
||||||
log.Debugf("proxy: %s", rawPath)
|
log.Debugf("proxy: %s", rawPath)
|
||||||
|
pw := c.Query("pw")
|
||||||
|
if !CheckDownLink(utils.Dir(rawPath), pw) {
|
||||||
|
ErrorResp(c, fmt.Errorf("wrong password"), 401)
|
||||||
|
return
|
||||||
|
}
|
||||||
account, path, driver, err := ParsePath(rawPath)
|
account, path, driver, err := ParsePath(rawPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
ErrorResp(c, err, 500)
|
ErrorResp(c, err, 500)
|
||||||
@ -65,7 +72,11 @@ func Proxy(c *gin.Context) {
|
|||||||
c.File(link)
|
c.File(link)
|
||||||
return
|
return
|
||||||
} else {
|
} else {
|
||||||
driver.Proxy(c)
|
if utils.GetFileType(filepath.Ext(rawPath)) == conf.TEXT {
|
||||||
|
Text(c, link)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
driver.Proxy(c, account)
|
||||||
r := c.Request
|
r := c.Request
|
||||||
w := c.Writer
|
w := c.Writer
|
||||||
target, err := url.Parse(link)
|
target, err := url.Parse(link)
|
||||||
@ -84,3 +95,30 @@ func Proxy(c *gin.Context) {
|
|||||||
proxy.ServeHTTP(w, r)
|
proxy.ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var client *resty.Client
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
client = resty.New()
|
||||||
|
client.SetRetryCount(3)
|
||||||
|
}
|
||||||
|
|
||||||
|
func Text(c *gin.Context, link string) {
|
||||||
|
res, err := client.R().Get(link)
|
||||||
|
if err != nil {
|
||||||
|
ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
text := res.String()
|
||||||
|
t := utils.GetStrCoding(res.Body())
|
||||||
|
log.Debugf("text type: %s", t)
|
||||||
|
if t != utils.UTF8 {
|
||||||
|
body, err := utils.GbkToUtf8(res.Body())
|
||||||
|
if err != nil {
|
||||||
|
ErrorResp(c, err, 500)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
text = string(body)
|
||||||
|
}
|
||||||
|
c.String(200, text)
|
||||||
|
}
|
||||||
|
@ -7,7 +7,6 @@ import (
|
|||||||
"github.com/Xhofe/alist/utils"
|
"github.com/Xhofe/alist/utils"
|
||||||
"github.com/gin-gonic/gin"
|
"github.com/gin-gonic/gin"
|
||||||
log "github.com/sirupsen/logrus"
|
log "github.com/sirupsen/logrus"
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -31,9 +30,8 @@ func Path(c *gin.Context) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
// TODO hide or ignore?
|
// TODO hide or ignore?
|
||||||
}
|
} else if conf.CheckParent {
|
||||||
if conf.CheckParent {
|
if !CheckParent(utils.Dir(req.Path), req.Password) {
|
||||||
if !CheckParent(filepath.Dir(req.Path), req.Password) {
|
|
||||||
ErrorResp(c, fmt.Errorf("wrong password"), 401)
|
ErrorResp(c, fmt.Errorf("wrong password"), 401)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@ -72,7 +70,7 @@ func Path(c *gin.Context) {
|
|||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
if meta != nil && meta.Hide != "" {
|
if meta != nil && meta.Hide != "" {
|
||||||
tmpFiles := make([]*model.File, 0)
|
tmpFiles := make([]model.File, 0)
|
||||||
hideFiles := strings.Split(meta.Hide, ",")
|
hideFiles := strings.Split(meta.Hide, ",")
|
||||||
for _, item := range files {
|
for _, item := range files {
|
||||||
if !utils.IsContain(hideFiles, item.Name) {
|
if !utils.IsContain(hideFiles, item.Name) {
|
||||||
|
@ -40,6 +40,7 @@ func InitApiRouter(r *gin.Engine) {
|
|||||||
admin.DELETE("/meta", DeleteMeta)
|
admin.DELETE("/meta", DeleteMeta)
|
||||||
}
|
}
|
||||||
Static(r)
|
Static(r)
|
||||||
|
WebDav(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func Cors(r *gin.Engine) {
|
func Cors(r *gin.Engine) {
|
||||||
|
@ -12,7 +12,11 @@ import (
|
|||||||
|
|
||||||
|
|
||||||
func init() {
|
func init() {
|
||||||
index, _ := public.Public.Open("index.html")
|
index, err := public.Public.Open("index.html")
|
||||||
|
if err != nil {
|
||||||
|
log.Errorf(err.Error())
|
||||||
|
return
|
||||||
|
}
|
||||||
data, _ := ioutil.ReadAll(index)
|
data, _ := ioutil.ReadAll(index)
|
||||||
conf.RawIndexHtml = string(data)
|
conf.RawIndexHtml = string(data)
|
||||||
}
|
}
|
||||||
@ -28,5 +32,6 @@ func Static(r *gin.Engine) {
|
|||||||
c.Header("Content-Type", "text/html")
|
c.Header("Content-Type", "text/html")
|
||||||
_, _ = c.Writer.WriteString(conf.IndexHtml)
|
_, _ = c.Writer.WriteString(conf.IndexHtml)
|
||||||
c.Writer.Flush()
|
c.Writer.Flush()
|
||||||
|
c.Writer.WriteHeaderNow()
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
60
server/webdav.go
Normal file
60
server/webdav.go
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
package server
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/server/webdav"
|
||||||
|
"github.com/gin-gonic/gin"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
var handler *webdav.Handler
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
handler = &webdav.Handler{
|
||||||
|
Prefix: "/dav",
|
||||||
|
LockSystem: webdav.NewMemLS(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func WebDav(r *gin.Engine) {
|
||||||
|
dav := r.Group("/dav")
|
||||||
|
dav.Use(WebDAVAuth)
|
||||||
|
dav.Any("/*path", ServeWebDAV)
|
||||||
|
dav.Any("", ServeWebDAV)
|
||||||
|
dav.Handle("PROPFIND", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("PROPFIND", "", ServeWebDAV)
|
||||||
|
dav.Handle("MKCOL", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("LOCK", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("UNLOCK", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("PROPPATCH", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("COPY", "/*path", ServeWebDAV)
|
||||||
|
dav.Handle("MOVE", "/*path", ServeWebDAV)
|
||||||
|
}
|
||||||
|
|
||||||
|
func ServeWebDAV(c *gin.Context) {
|
||||||
|
fs := webdav.FileSystem{}
|
||||||
|
handler.ServeHTTP(c.Writer,c.Request,&fs)
|
||||||
|
}
|
||||||
|
|
||||||
|
func WebDAVAuth(c *gin.Context) {
|
||||||
|
if c.Request.Method == "OPTIONS" {
|
||||||
|
c.Next()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
username, password, ok := c.Request.BasicAuth()
|
||||||
|
if !ok {
|
||||||
|
c.Writer.Header()["WWW-Authenticate"] = []string{`Basic realm="alist"`}
|
||||||
|
c.Status(http.StatusUnauthorized)
|
||||||
|
c.Abort()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if conf.DavUsername != "" && conf.DavUsername != username {
|
||||||
|
c.Status(http.StatusUnauthorized)
|
||||||
|
c.Abort()
|
||||||
|
}
|
||||||
|
if conf.DavPassword != "" && conf.DavPassword != password {
|
||||||
|
c.Status(http.StatusUnauthorized)
|
||||||
|
c.Abort()
|
||||||
|
}
|
||||||
|
c.Next()
|
||||||
|
}
|
202
server/webdav/file.go
Normal file
202
server/webdav/file.go
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package webdav
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/conf"
|
||||||
|
"github.com/Xhofe/alist/drivers"
|
||||||
|
"github.com/Xhofe/alist/model"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"net/http"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FileSystem struct{}
|
||||||
|
|
||||||
|
func ParsePath(rawPath string) (*model.Account, string, drivers.Driver, error) {
|
||||||
|
var path, name string
|
||||||
|
switch model.AccountsCount() {
|
||||||
|
case 0:
|
||||||
|
return nil, "", nil, fmt.Errorf("no accounts,please add one first")
|
||||||
|
case 1:
|
||||||
|
path = rawPath
|
||||||
|
break
|
||||||
|
default:
|
||||||
|
paths := strings.Split(rawPath, "/")
|
||||||
|
path = "/" + strings.Join(paths[2:], "/")
|
||||||
|
name = paths[1]
|
||||||
|
}
|
||||||
|
account, ok := model.GetAccount(name)
|
||||||
|
if !ok {
|
||||||
|
return nil, "", nil, fmt.Errorf("no [%s] account", name)
|
||||||
|
}
|
||||||
|
driver, ok := drivers.GetDriver(account.Type)
|
||||||
|
if !ok {
|
||||||
|
return nil, "", nil, fmt.Errorf("no [%s] driver", account.Type)
|
||||||
|
}
|
||||||
|
return &account, path, driver, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *FileSystem) File(rawPath string) (*model.File, error) {
|
||||||
|
rawPath = utils.ParsePath(rawPath)
|
||||||
|
if model.AccountsCount() > 1 && rawPath == "/" {
|
||||||
|
now := time.Now()
|
||||||
|
return &model.File{
|
||||||
|
Name: "root",
|
||||||
|
Size: 0,
|
||||||
|
Type: conf.FOLDER,
|
||||||
|
Driver: "root",
|
||||||
|
UpdatedAt: &now,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
account, path_, driver, err := ParsePath(rawPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return driver.File(path_, account)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *FileSystem) Files(rawPath string) ([]model.File, error) {
|
||||||
|
rawPath = utils.ParsePath(rawPath)
|
||||||
|
if model.AccountsCount() > 1 && rawPath == "/" {
|
||||||
|
files, err := model.GetAccountFiles()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return files, nil
|
||||||
|
}
|
||||||
|
account, path_, driver, err := ParsePath(rawPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return driver.Files(path_, account)
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPW(path string) string {
|
||||||
|
if !conf.CheckDown {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
meta, err := model.GetMetaByPath(path)
|
||||||
|
if err == nil {
|
||||||
|
if meta.Password != "" {
|
||||||
|
utils.Get16MD5Encode(meta.Password)
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
} else {
|
||||||
|
if !conf.CheckParent {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
if path == "/" {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return GetPW(utils.Dir(path))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *FileSystem) Link(r *http.Request, rawPath string) (string, error) {
|
||||||
|
rawPath = utils.ParsePath(rawPath)
|
||||||
|
log.Debugf("get link path: %s", rawPath)
|
||||||
|
if model.AccountsCount() > 1 && rawPath == "/" {
|
||||||
|
// error
|
||||||
|
}
|
||||||
|
account, path_, driver, err := ParsePath(rawPath)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
link := ""
|
||||||
|
protocol := "http"
|
||||||
|
if r.TLS != nil {
|
||||||
|
protocol = "https"
|
||||||
|
}
|
||||||
|
if driver.Config().OnlyProxy || account.WebdavProxy {
|
||||||
|
link = fmt.Sprintf("%s://%s/p%s", protocol, r.Host, rawPath)
|
||||||
|
if conf.CheckDown {
|
||||||
|
pw := GetPW(utils.Dir(rawPath))
|
||||||
|
link += "?pw" + pw
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
link, err = driver.Link(path_, account)
|
||||||
|
}
|
||||||
|
log.Debugf("webdav get link: %s", link)
|
||||||
|
return link, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (fs *FileSystem) CreateDirectory(ctx context.Context, reqPath string) (interface{}, error) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// slashClean is equivalent to but slightly more efficient than
|
||||||
|
// path.Clean("/" + name).
|
||||||
|
func slashClean(name string) string {
|
||||||
|
if name == "" || name[0] != '/' {
|
||||||
|
name = "/" + name
|
||||||
|
}
|
||||||
|
return path.Clean(name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// moveFiles moves files and/or directories from src to dst.
|
||||||
|
//
|
||||||
|
// See section 9.9.4 for when various HTTP status codes apply.
|
||||||
|
func moveFiles(ctx context.Context, fs *FileSystem, src FileInfo, dst string, overwrite bool) (status int, err error) {
|
||||||
|
|
||||||
|
return http.StatusNoContent, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// copyFiles copies files and/or directories from src to dst.
|
||||||
|
//
|
||||||
|
// See section 9.8.5 for when various HTTP status codes apply.
|
||||||
|
func copyFiles(ctx context.Context, fs *FileSystem, src FileInfo, dst string, overwrite bool, depth int, recursion int) (status int, err error) {
|
||||||
|
|
||||||
|
return http.StatusNoContent, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// walkFS traverses filesystem fs starting at name up to depth levels.
|
||||||
|
//
|
||||||
|
// Allowed values for depth are 0, 1 or infiniteDepth. For each visited node,
|
||||||
|
// walkFS calls walkFn. If a visited file system node is a directory and
|
||||||
|
// walkFn returns filepath.SkipDir, walkFS will skip traversal of this node.
|
||||||
|
func walkFS(
|
||||||
|
ctx context.Context,
|
||||||
|
fs *FileSystem,
|
||||||
|
depth int,
|
||||||
|
name string,
|
||||||
|
info FileInfo,
|
||||||
|
walkFn func(reqPath string, info FileInfo, err error) error) error {
|
||||||
|
// This implementation is based on Walk's code in the standard path/filepath package.
|
||||||
|
err := walkFn(name, info, nil)
|
||||||
|
if err != nil {
|
||||||
|
if info.IsDir() && err == filepath.SkipDir {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !info.IsDir() || depth == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if depth == 1 {
|
||||||
|
depth = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
files, err := fs.Files(name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, fileInfo := range files {
|
||||||
|
filename := path.Join(name, fileInfo.Name)
|
||||||
|
err = walkFS(ctx, fs, depth, filename, &fileInfo, walkFn)
|
||||||
|
if err != nil {
|
||||||
|
if !fileInfo.IsDir() || err != filepath.SkipDir {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
173
server/webdav/if.go
Normal file
173
server/webdav/if.go
Normal file
@ -0,0 +1,173 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package webdav
|
||||||
|
|
||||||
|
// The If header is covered by Section 10.4.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#HEADER_If
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ifHeader is a disjunction (OR) of ifLists.
|
||||||
|
type ifHeader struct {
|
||||||
|
lists []ifList
|
||||||
|
}
|
||||||
|
|
||||||
|
// ifList is a conjunction (AND) of Conditions, and an optional resource tag.
|
||||||
|
type ifList struct {
|
||||||
|
resourceTag string
|
||||||
|
conditions []Condition
|
||||||
|
}
|
||||||
|
|
||||||
|
// parseIfHeader parses the "If: foo bar" HTTP header. The httpHeader string
|
||||||
|
// should omit the "If:" prefix and have any "\r\n"s collapsed to a " ", as is
|
||||||
|
// returned by req.Header.Get("If") for a http.Request req.
|
||||||
|
func parseIfHeader(httpHeader string) (h ifHeader, ok bool) {
|
||||||
|
s := strings.TrimSpace(httpHeader)
|
||||||
|
switch tokenType, _, _ := lex(s); tokenType {
|
||||||
|
case '(':
|
||||||
|
return parseNoTagLists(s)
|
||||||
|
case angleTokenType:
|
||||||
|
return parseTaggedLists(s)
|
||||||
|
default:
|
||||||
|
return ifHeader{}, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseNoTagLists(s string) (h ifHeader, ok bool) {
|
||||||
|
for {
|
||||||
|
l, remaining, ok := parseList(s)
|
||||||
|
if !ok {
|
||||||
|
return ifHeader{}, false
|
||||||
|
}
|
||||||
|
h.lists = append(h.lists, l)
|
||||||
|
if remaining == "" {
|
||||||
|
return h, true
|
||||||
|
}
|
||||||
|
s = remaining
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTaggedLists(s string) (h ifHeader, ok bool) {
|
||||||
|
resourceTag, n := "", 0
|
||||||
|
for first := true; ; first = false {
|
||||||
|
tokenType, tokenStr, remaining := lex(s)
|
||||||
|
switch tokenType {
|
||||||
|
case angleTokenType:
|
||||||
|
if !first && n == 0 {
|
||||||
|
return ifHeader{}, false
|
||||||
|
}
|
||||||
|
resourceTag, n = tokenStr, 0
|
||||||
|
s = remaining
|
||||||
|
case '(':
|
||||||
|
n++
|
||||||
|
l, remaining, ok := parseList(s)
|
||||||
|
if !ok {
|
||||||
|
return ifHeader{}, false
|
||||||
|
}
|
||||||
|
l.resourceTag = resourceTag
|
||||||
|
h.lists = append(h.lists, l)
|
||||||
|
if remaining == "" {
|
||||||
|
return h, true
|
||||||
|
}
|
||||||
|
s = remaining
|
||||||
|
default:
|
||||||
|
return ifHeader{}, false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseList(s string) (l ifList, remaining string, ok bool) {
|
||||||
|
tokenType, _, s := lex(s)
|
||||||
|
if tokenType != '(' {
|
||||||
|
return ifList{}, "", false
|
||||||
|
}
|
||||||
|
for {
|
||||||
|
tokenType, _, remaining = lex(s)
|
||||||
|
if tokenType == ')' {
|
||||||
|
if len(l.conditions) == 0 {
|
||||||
|
return ifList{}, "", false
|
||||||
|
}
|
||||||
|
return l, remaining, true
|
||||||
|
}
|
||||||
|
c, remaining, ok := parseCondition(s)
|
||||||
|
if !ok {
|
||||||
|
return ifList{}, "", false
|
||||||
|
}
|
||||||
|
l.conditions = append(l.conditions, c)
|
||||||
|
s = remaining
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseCondition(s string) (c Condition, remaining string, ok bool) {
|
||||||
|
tokenType, tokenStr, s := lex(s)
|
||||||
|
if tokenType == notTokenType {
|
||||||
|
c.Not = true
|
||||||
|
tokenType, tokenStr, s = lex(s)
|
||||||
|
}
|
||||||
|
switch tokenType {
|
||||||
|
case strTokenType, angleTokenType:
|
||||||
|
c.Token = tokenStr
|
||||||
|
case squareTokenType:
|
||||||
|
c.ETag = tokenStr
|
||||||
|
default:
|
||||||
|
return Condition{}, "", false
|
||||||
|
}
|
||||||
|
return c, s, true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Single-rune tokens like '(' or ')' have a token type equal to their rune.
|
||||||
|
// All other tokens have a negative token type.
|
||||||
|
const (
|
||||||
|
errTokenType = rune(-1)
|
||||||
|
eofTokenType = rune(-2)
|
||||||
|
strTokenType = rune(-3)
|
||||||
|
notTokenType = rune(-4)
|
||||||
|
angleTokenType = rune(-5)
|
||||||
|
squareTokenType = rune(-6)
|
||||||
|
)
|
||||||
|
|
||||||
|
func lex(s string) (tokenType rune, tokenStr string, remaining string) {
|
||||||
|
// The net/textproto Data that parses the HTTP header will collapse
|
||||||
|
// Linear White Space that spans multiple "\r\n" lines to a single " ",
|
||||||
|
// so we don't need to look for '\r' or '\n'.
|
||||||
|
for len(s) > 0 && (s[0] == '\t' || s[0] == ' ') {
|
||||||
|
s = s[1:]
|
||||||
|
}
|
||||||
|
if len(s) == 0 {
|
||||||
|
return eofTokenType, "", ""
|
||||||
|
}
|
||||||
|
i := 0
|
||||||
|
loop:
|
||||||
|
for ; i < len(s); i++ {
|
||||||
|
switch s[i] {
|
||||||
|
case '\t', ' ', '(', ')', '<', '>', '[', ']':
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if i != 0 {
|
||||||
|
tokenStr, remaining = s[:i], s[i:]
|
||||||
|
if tokenStr == "Not" {
|
||||||
|
return notTokenType, "", remaining
|
||||||
|
}
|
||||||
|
return strTokenType, tokenStr, remaining
|
||||||
|
}
|
||||||
|
|
||||||
|
j := 0
|
||||||
|
switch s[0] {
|
||||||
|
case '<':
|
||||||
|
j, tokenType = strings.IndexByte(s, '>'), angleTokenType
|
||||||
|
case '[':
|
||||||
|
j, tokenType = strings.IndexByte(s, ']'), squareTokenType
|
||||||
|
default:
|
||||||
|
return rune(s[0]), "", s[1:]
|
||||||
|
}
|
||||||
|
if j < 0 {
|
||||||
|
return errTokenType, "", ""
|
||||||
|
}
|
||||||
|
return tokenType, s[1:j], s[j+1:]
|
||||||
|
}
|
11
server/webdav/internal/xml/README
Normal file
11
server/webdav/internal/xml/README
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
This is a fork of the encoding/xml package at ca1d6c4, the last commit before
|
||||||
|
https://go.googlesource.com/go/+/c0d6d33 "encoding/xml: restore Go 1.4 name
|
||||||
|
space behavior" made late in the lead-up to the Go 1.5 release.
|
||||||
|
|
||||||
|
The list of encoding/xml changes is at
|
||||||
|
https://go.googlesource.com/go/+log/master/src/encoding/xml
|
||||||
|
|
||||||
|
This fork is temporary, and I (nigeltao) expect to revert it after Go 1.6 is
|
||||||
|
released.
|
||||||
|
|
||||||
|
See http://golang.org/issue/11841
|
1223
server/webdav/internal/xml/marshal.go
Normal file
1223
server/webdav/internal/xml/marshal.go
Normal file
File diff suppressed because it is too large
Load Diff
692
server/webdav/internal/xml/read.go
Normal file
692
server/webdav/internal/xml/read.go
Normal file
@ -0,0 +1,692 @@
|
|||||||
|
// Copyright 2009 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package xml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// BUG(rsc): Mapping between XML elements and data structures is inherently flawed:
|
||||||
|
// an XML element is an order-dependent collection of anonymous
|
||||||
|
// values, while a data structure is an order-independent collection
|
||||||
|
// of named values.
|
||||||
|
// See package json for a textual representation more suitable
|
||||||
|
// to data structures.
|
||||||
|
|
||||||
|
// Unmarshal parses the XML-encoded data and stores the result in
|
||||||
|
// the value pointed to by v, which must be an arbitrary struct,
|
||||||
|
// slice, or string. Well-formed data that does not fit into v is
|
||||||
|
// discarded.
|
||||||
|
//
|
||||||
|
// Because Unmarshal uses the reflect package, it can only assign
|
||||||
|
// to exported (upper case) fields. Unmarshal uses a case-sensitive
|
||||||
|
// comparison to match XML element names to tag values and struct
|
||||||
|
// field names.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element to a struct using the following rules.
|
||||||
|
// In the rules, the tag of a field refers to the value associated with the
|
||||||
|
// key 'xml' in the struct field's tag (see the example above).
|
||||||
|
//
|
||||||
|
// * If the struct has a field of type []byte or string with tag
|
||||||
|
// ",innerxml", Unmarshal accumulates the raw XML nested inside the
|
||||||
|
// element in that field. The rest of the rules still apply.
|
||||||
|
//
|
||||||
|
// * If the struct has a field named XMLName of type xml.Name,
|
||||||
|
// Unmarshal records the element name in that field.
|
||||||
|
//
|
||||||
|
// * If the XMLName field has an associated tag of the form
|
||||||
|
// "name" or "namespace-URL name", the XML element must have
|
||||||
|
// the given name (and, optionally, name space) or else Unmarshal
|
||||||
|
// returns an error.
|
||||||
|
//
|
||||||
|
// * If the XML element has an attribute whose name matches a
|
||||||
|
// struct field name with an associated tag containing ",attr" or
|
||||||
|
// the explicit name in a struct field tag of the form "name,attr",
|
||||||
|
// Unmarshal records the attribute value in that field.
|
||||||
|
//
|
||||||
|
// * If the XML element contains character data, that data is
|
||||||
|
// accumulated in the first struct field that has tag ",chardata".
|
||||||
|
// The struct field may have type []byte or string.
|
||||||
|
// If there is no such field, the character data is discarded.
|
||||||
|
//
|
||||||
|
// * If the XML element contains comments, they are accumulated in
|
||||||
|
// the first struct field that has tag ",comment". The struct
|
||||||
|
// field may have type []byte or string. If there is no such
|
||||||
|
// field, the comments are discarded.
|
||||||
|
//
|
||||||
|
// * If the XML element contains a sub-element whose name matches
|
||||||
|
// the prefix of a tag formatted as "a" or "a>b>c", unmarshal
|
||||||
|
// will descend into the XML structure looking for elements with the
|
||||||
|
// given names, and will map the innermost elements to that struct
|
||||||
|
// field. A tag starting with ">" is equivalent to one starting
|
||||||
|
// with the field name followed by ">".
|
||||||
|
//
|
||||||
|
// * If the XML element contains a sub-element whose name matches
|
||||||
|
// a struct field's XMLName tag and the struct field has no
|
||||||
|
// explicit name tag as per the previous rule, unmarshal maps
|
||||||
|
// the sub-element to that struct field.
|
||||||
|
//
|
||||||
|
// * If the XML element contains a sub-element whose name matches a
|
||||||
|
// field without any mode flags (",attr", ",chardata", etc), Unmarshal
|
||||||
|
// maps the sub-element to that struct field.
|
||||||
|
//
|
||||||
|
// * If the XML element contains a sub-element that hasn't matched any
|
||||||
|
// of the above rules and the struct has a field with tag ",any",
|
||||||
|
// unmarshal maps the sub-element to that struct field.
|
||||||
|
//
|
||||||
|
// * An anonymous struct field is handled as if the fields of its
|
||||||
|
// value were part of the outer struct.
|
||||||
|
//
|
||||||
|
// * A struct field with tag "-" is never unmarshalled into.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element to a string or []byte by saving the
|
||||||
|
// concatenation of that element's character data in the string or
|
||||||
|
// []byte. The saved []byte is never nil.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an attribute value to a string or []byte by saving
|
||||||
|
// the value in the string or slice.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element to a slice by extending the length of
|
||||||
|
// the slice and mapping the element to the newly created value.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element or attribute value to a bool by
|
||||||
|
// setting it to the boolean value represented by the string.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element or attribute value to an integer or
|
||||||
|
// floating-point field by setting the field to the result of
|
||||||
|
// interpreting the string value in decimal. There is no check for
|
||||||
|
// overflow.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element to an xml.Name by recording the
|
||||||
|
// element name.
|
||||||
|
//
|
||||||
|
// Unmarshal maps an XML element to a pointer by setting the pointer
|
||||||
|
// to a freshly allocated value and then mapping the element to that value.
|
||||||
|
//
|
||||||
|
func Unmarshal(data []byte, v interface{}) error {
|
||||||
|
return NewDecoder(bytes.NewReader(data)).Decode(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decode works like xml.Unmarshal, except it reads the decoder
|
||||||
|
// stream to find the start element.
|
||||||
|
func (d *Decoder) Decode(v interface{}) error {
|
||||||
|
return d.DecodeElement(v, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// DecodeElement works like xml.Unmarshal except that it takes
|
||||||
|
// a pointer to the start XML element to decode into v.
|
||||||
|
// It is useful when a client reads some raw XML tokens itself
|
||||||
|
// but also wants to defer to Unmarshal for some elements.
|
||||||
|
func (d *Decoder) DecodeElement(v interface{}, start *StartElement) error {
|
||||||
|
val := reflect.ValueOf(v)
|
||||||
|
if val.Kind() != reflect.Ptr {
|
||||||
|
return errors.New("non-pointer passed to Unmarshal")
|
||||||
|
}
|
||||||
|
return d.unmarshal(val.Elem(), start)
|
||||||
|
}
|
||||||
|
|
||||||
|
// An UnmarshalError represents an error in the unmarshalling process.
|
||||||
|
type UnmarshalError string
|
||||||
|
|
||||||
|
func (e UnmarshalError) Error() string { return string(e) }
|
||||||
|
|
||||||
|
// Unmarshaler is the interface implemented by objects that can unmarshal
|
||||||
|
// an XML element description of themselves.
|
||||||
|
//
|
||||||
|
// UnmarshalXML decodes a single XML element
|
||||||
|
// beginning with the given start element.
|
||||||
|
// If it returns an error, the outer call to Unmarshal stops and
|
||||||
|
// returns that error.
|
||||||
|
// UnmarshalXML must consume exactly one XML element.
|
||||||
|
// One common implementation strategy is to unmarshal into
|
||||||
|
// a separate value with a layout matching the expected XML
|
||||||
|
// using d.DecodeElement, and then to copy the data from
|
||||||
|
// that value into the receiver.
|
||||||
|
// Another common strategy is to use d.Token to process the
|
||||||
|
// XML object one token at a time.
|
||||||
|
// UnmarshalXML may not use d.RawToken.
|
||||||
|
type Unmarshaler interface {
|
||||||
|
UnmarshalXML(d *Decoder, start StartElement) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalerAttr is the interface implemented by objects that can unmarshal
|
||||||
|
// an XML attribute description of themselves.
|
||||||
|
//
|
||||||
|
// UnmarshalXMLAttr decodes a single XML attribute.
|
||||||
|
// If it returns an error, the outer call to Unmarshal stops and
|
||||||
|
// returns that error.
|
||||||
|
// UnmarshalXMLAttr is used only for struct fields with the
|
||||||
|
// "attr" option in the field tag.
|
||||||
|
type UnmarshalerAttr interface {
|
||||||
|
UnmarshalXMLAttr(attr Attr) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// receiverType returns the receiver type to use in an expression like "%s.MethodName".
|
||||||
|
func receiverType(val interface{}) string {
|
||||||
|
t := reflect.TypeOf(val)
|
||||||
|
if t.Name() != "" {
|
||||||
|
return t.String()
|
||||||
|
}
|
||||||
|
return "(" + t.String() + ")"
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalInterface unmarshals a single XML element into val.
|
||||||
|
// start is the opening tag of the element.
|
||||||
|
func (p *Decoder) unmarshalInterface(val Unmarshaler, start *StartElement) error {
|
||||||
|
// Record that decoder must stop at end tag corresponding to start.
|
||||||
|
p.pushEOF()
|
||||||
|
|
||||||
|
p.unmarshalDepth++
|
||||||
|
err := val.UnmarshalXML(p, *start)
|
||||||
|
p.unmarshalDepth--
|
||||||
|
if err != nil {
|
||||||
|
p.popEOF()
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !p.popEOF() {
|
||||||
|
return fmt.Errorf("xml: %s.UnmarshalXML did not consume entire <%s> element", receiverType(val), start.Name.Local)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalTextInterface unmarshals a single XML element into val.
|
||||||
|
// The chardata contained in the element (but not its children)
|
||||||
|
// is passed to the text unmarshaler.
|
||||||
|
func (p *Decoder) unmarshalTextInterface(val encoding.TextUnmarshaler, start *StartElement) error {
|
||||||
|
var buf []byte
|
||||||
|
depth := 1
|
||||||
|
for depth > 0 {
|
||||||
|
t, err := p.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch t := t.(type) {
|
||||||
|
case CharData:
|
||||||
|
if depth == 1 {
|
||||||
|
buf = append(buf, t...)
|
||||||
|
}
|
||||||
|
case StartElement:
|
||||||
|
depth++
|
||||||
|
case EndElement:
|
||||||
|
depth--
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return val.UnmarshalText(buf)
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalAttr unmarshals a single XML attribute into val.
|
||||||
|
func (p *Decoder) unmarshalAttr(val reflect.Value, attr Attr) error {
|
||||||
|
if val.Kind() == reflect.Ptr {
|
||||||
|
if val.IsNil() {
|
||||||
|
val.Set(reflect.New(val.Type().Elem()))
|
||||||
|
}
|
||||||
|
val = val.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.CanInterface() && val.Type().Implements(unmarshalerAttrType) {
|
||||||
|
// This is an unmarshaler with a non-pointer receiver,
|
||||||
|
// so it's likely to be incorrect, but we do what we're told.
|
||||||
|
return val.Interface().(UnmarshalerAttr).UnmarshalXMLAttr(attr)
|
||||||
|
}
|
||||||
|
if val.CanAddr() {
|
||||||
|
pv := val.Addr()
|
||||||
|
if pv.CanInterface() && pv.Type().Implements(unmarshalerAttrType) {
|
||||||
|
return pv.Interface().(UnmarshalerAttr).UnmarshalXMLAttr(attr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not an UnmarshalerAttr; try encoding.TextUnmarshaler.
|
||||||
|
if val.CanInterface() && val.Type().Implements(textUnmarshalerType) {
|
||||||
|
// This is an unmarshaler with a non-pointer receiver,
|
||||||
|
// so it's likely to be incorrect, but we do what we're told.
|
||||||
|
return val.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(attr.Value))
|
||||||
|
}
|
||||||
|
if val.CanAddr() {
|
||||||
|
pv := val.Addr()
|
||||||
|
if pv.CanInterface() && pv.Type().Implements(textUnmarshalerType) {
|
||||||
|
return pv.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(attr.Value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
copyValue(val, []byte(attr.Value))
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
unmarshalerType = reflect.TypeOf((*Unmarshaler)(nil)).Elem()
|
||||||
|
unmarshalerAttrType = reflect.TypeOf((*UnmarshalerAttr)(nil)).Elem()
|
||||||
|
textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||||
|
)
|
||||||
|
|
||||||
|
// Unmarshal a single XML element into val.
|
||||||
|
func (p *Decoder) unmarshal(val reflect.Value, start *StartElement) error {
|
||||||
|
// Find start element if we need it.
|
||||||
|
if start == nil {
|
||||||
|
for {
|
||||||
|
tok, err := p.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if t, ok := tok.(StartElement); ok {
|
||||||
|
start = &t
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load value from interface, but only if the result will be
|
||||||
|
// usefully addressable.
|
||||||
|
if val.Kind() == reflect.Interface && !val.IsNil() {
|
||||||
|
e := val.Elem()
|
||||||
|
if e.Kind() == reflect.Ptr && !e.IsNil() {
|
||||||
|
val = e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.Kind() == reflect.Ptr {
|
||||||
|
if val.IsNil() {
|
||||||
|
val.Set(reflect.New(val.Type().Elem()))
|
||||||
|
}
|
||||||
|
val = val.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.CanInterface() && val.Type().Implements(unmarshalerType) {
|
||||||
|
// This is an unmarshaler with a non-pointer receiver,
|
||||||
|
// so it's likely to be incorrect, but we do what we're told.
|
||||||
|
return p.unmarshalInterface(val.Interface().(Unmarshaler), start)
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.CanAddr() {
|
||||||
|
pv := val.Addr()
|
||||||
|
if pv.CanInterface() && pv.Type().Implements(unmarshalerType) {
|
||||||
|
return p.unmarshalInterface(pv.Interface().(Unmarshaler), start)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.CanInterface() && val.Type().Implements(textUnmarshalerType) {
|
||||||
|
return p.unmarshalTextInterface(val.Interface().(encoding.TextUnmarshaler), start)
|
||||||
|
}
|
||||||
|
|
||||||
|
if val.CanAddr() {
|
||||||
|
pv := val.Addr()
|
||||||
|
if pv.CanInterface() && pv.Type().Implements(textUnmarshalerType) {
|
||||||
|
return p.unmarshalTextInterface(pv.Interface().(encoding.TextUnmarshaler), start)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
data []byte
|
||||||
|
saveData reflect.Value
|
||||||
|
comment []byte
|
||||||
|
saveComment reflect.Value
|
||||||
|
saveXML reflect.Value
|
||||||
|
saveXMLIndex int
|
||||||
|
saveXMLData []byte
|
||||||
|
saveAny reflect.Value
|
||||||
|
sv reflect.Value
|
||||||
|
tinfo *typeInfo
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
|
||||||
|
switch v := val; v.Kind() {
|
||||||
|
default:
|
||||||
|
return errors.New("unknown type " + v.Type().String())
|
||||||
|
|
||||||
|
case reflect.Interface:
|
||||||
|
// TODO: For now, simply ignore the field. In the near
|
||||||
|
// future we may choose to unmarshal the start
|
||||||
|
// element on it, if not nil.
|
||||||
|
return p.Skip()
|
||||||
|
|
||||||
|
case reflect.Slice:
|
||||||
|
typ := v.Type()
|
||||||
|
if typ.Elem().Kind() == reflect.Uint8 {
|
||||||
|
// []byte
|
||||||
|
saveData = v
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
// Slice of element values.
|
||||||
|
// Grow slice.
|
||||||
|
n := v.Len()
|
||||||
|
if n >= v.Cap() {
|
||||||
|
ncap := 2 * n
|
||||||
|
if ncap < 4 {
|
||||||
|
ncap = 4
|
||||||
|
}
|
||||||
|
new := reflect.MakeSlice(typ, n, ncap)
|
||||||
|
reflect.Copy(new, v)
|
||||||
|
v.Set(new)
|
||||||
|
}
|
||||||
|
v.SetLen(n + 1)
|
||||||
|
|
||||||
|
// Recur to read element into slice.
|
||||||
|
if err := p.unmarshal(v.Index(n), start); err != nil {
|
||||||
|
v.SetLen(n)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
|
||||||
|
case reflect.Bool, reflect.Float32, reflect.Float64, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, reflect.String:
|
||||||
|
saveData = v
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
typ := v.Type()
|
||||||
|
if typ == nameType {
|
||||||
|
v.Set(reflect.ValueOf(start.Name))
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
sv = v
|
||||||
|
tinfo, err = getTypeInfo(typ)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate and assign element name.
|
||||||
|
if tinfo.xmlname != nil {
|
||||||
|
finfo := tinfo.xmlname
|
||||||
|
if finfo.name != "" && finfo.name != start.Name.Local {
|
||||||
|
return UnmarshalError("expected element type <" + finfo.name + "> but have <" + start.Name.Local + ">")
|
||||||
|
}
|
||||||
|
if finfo.xmlns != "" && finfo.xmlns != start.Name.Space {
|
||||||
|
e := "expected element <" + finfo.name + "> in name space " + finfo.xmlns + " but have "
|
||||||
|
if start.Name.Space == "" {
|
||||||
|
e += "no name space"
|
||||||
|
} else {
|
||||||
|
e += start.Name.Space
|
||||||
|
}
|
||||||
|
return UnmarshalError(e)
|
||||||
|
}
|
||||||
|
fv := finfo.value(sv)
|
||||||
|
if _, ok := fv.Interface().(Name); ok {
|
||||||
|
fv.Set(reflect.ValueOf(start.Name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Assign attributes.
|
||||||
|
// Also, determine whether we need to save character data or comments.
|
||||||
|
for i := range tinfo.fields {
|
||||||
|
finfo := &tinfo.fields[i]
|
||||||
|
switch finfo.flags & fMode {
|
||||||
|
case fAttr:
|
||||||
|
strv := finfo.value(sv)
|
||||||
|
// Look for attribute.
|
||||||
|
for _, a := range start.Attr {
|
||||||
|
if a.Name.Local == finfo.name && (finfo.xmlns == "" || finfo.xmlns == a.Name.Space) {
|
||||||
|
if err := p.unmarshalAttr(strv, a); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case fCharData:
|
||||||
|
if !saveData.IsValid() {
|
||||||
|
saveData = finfo.value(sv)
|
||||||
|
}
|
||||||
|
|
||||||
|
case fComment:
|
||||||
|
if !saveComment.IsValid() {
|
||||||
|
saveComment = finfo.value(sv)
|
||||||
|
}
|
||||||
|
|
||||||
|
case fAny, fAny | fElement:
|
||||||
|
if !saveAny.IsValid() {
|
||||||
|
saveAny = finfo.value(sv)
|
||||||
|
}
|
||||||
|
|
||||||
|
case fInnerXml:
|
||||||
|
if !saveXML.IsValid() {
|
||||||
|
saveXML = finfo.value(sv)
|
||||||
|
if p.saved == nil {
|
||||||
|
saveXMLIndex = 0
|
||||||
|
p.saved = new(bytes.Buffer)
|
||||||
|
} else {
|
||||||
|
saveXMLIndex = p.savedOffset()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find end element.
|
||||||
|
// Process sub-elements along the way.
|
||||||
|
Loop:
|
||||||
|
for {
|
||||||
|
var savedOffset int
|
||||||
|
if saveXML.IsValid() {
|
||||||
|
savedOffset = p.savedOffset()
|
||||||
|
}
|
||||||
|
tok, err := p.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case StartElement:
|
||||||
|
consumed := false
|
||||||
|
if sv.IsValid() {
|
||||||
|
consumed, err = p.unmarshalPath(tinfo, sv, nil, &t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if !consumed && saveAny.IsValid() {
|
||||||
|
consumed = true
|
||||||
|
if err := p.unmarshal(saveAny, &t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !consumed {
|
||||||
|
if err := p.Skip(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case EndElement:
|
||||||
|
if saveXML.IsValid() {
|
||||||
|
saveXMLData = p.saved.Bytes()[saveXMLIndex:savedOffset]
|
||||||
|
if saveXMLIndex == 0 {
|
||||||
|
p.saved = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break Loop
|
||||||
|
|
||||||
|
case CharData:
|
||||||
|
if saveData.IsValid() {
|
||||||
|
data = append(data, t...)
|
||||||
|
}
|
||||||
|
|
||||||
|
case Comment:
|
||||||
|
if saveComment.IsValid() {
|
||||||
|
comment = append(comment, t...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if saveData.IsValid() && saveData.CanInterface() && saveData.Type().Implements(textUnmarshalerType) {
|
||||||
|
if err := saveData.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
saveData = reflect.Value{}
|
||||||
|
}
|
||||||
|
|
||||||
|
if saveData.IsValid() && saveData.CanAddr() {
|
||||||
|
pv := saveData.Addr()
|
||||||
|
if pv.CanInterface() && pv.Type().Implements(textUnmarshalerType) {
|
||||||
|
if err := pv.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
saveData = reflect.Value{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := copyValue(saveData, data); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t := saveComment; t.Kind() {
|
||||||
|
case reflect.String:
|
||||||
|
t.SetString(string(comment))
|
||||||
|
case reflect.Slice:
|
||||||
|
t.Set(reflect.ValueOf(comment))
|
||||||
|
}
|
||||||
|
|
||||||
|
switch t := saveXML; t.Kind() {
|
||||||
|
case reflect.String:
|
||||||
|
t.SetString(string(saveXMLData))
|
||||||
|
case reflect.Slice:
|
||||||
|
t.Set(reflect.ValueOf(saveXMLData))
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func copyValue(dst reflect.Value, src []byte) (err error) {
|
||||||
|
dst0 := dst
|
||||||
|
|
||||||
|
if dst.Kind() == reflect.Ptr {
|
||||||
|
if dst.IsNil() {
|
||||||
|
dst.Set(reflect.New(dst.Type().Elem()))
|
||||||
|
}
|
||||||
|
dst = dst.Elem()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save accumulated data.
|
||||||
|
switch dst.Kind() {
|
||||||
|
case reflect.Invalid:
|
||||||
|
// Probably a comment.
|
||||||
|
default:
|
||||||
|
return errors.New("cannot unmarshal into " + dst0.Type().String())
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||||
|
itmp, err := strconv.ParseInt(string(src), 10, dst.Type().Bits())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dst.SetInt(itmp)
|
||||||
|
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
|
||||||
|
utmp, err := strconv.ParseUint(string(src), 10, dst.Type().Bits())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dst.SetUint(utmp)
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
ftmp, err := strconv.ParseFloat(string(src), dst.Type().Bits())
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dst.SetFloat(ftmp)
|
||||||
|
case reflect.Bool:
|
||||||
|
value, err := strconv.ParseBool(strings.TrimSpace(string(src)))
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
dst.SetBool(value)
|
||||||
|
case reflect.String:
|
||||||
|
dst.SetString(string(src))
|
||||||
|
case reflect.Slice:
|
||||||
|
if len(src) == 0 {
|
||||||
|
// non-nil to flag presence
|
||||||
|
src = []byte{}
|
||||||
|
}
|
||||||
|
dst.SetBytes(src)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// unmarshalPath walks down an XML structure looking for wanted
|
||||||
|
// paths, and calls unmarshal on them.
|
||||||
|
// The consumed result tells whether XML elements have been consumed
|
||||||
|
// from the Decoder until start's matching end element, or if it's
|
||||||
|
// still untouched because start is uninteresting for sv's fields.
|
||||||
|
func (p *Decoder) unmarshalPath(tinfo *typeInfo, sv reflect.Value, parents []string, start *StartElement) (consumed bool, err error) {
|
||||||
|
recurse := false
|
||||||
|
Loop:
|
||||||
|
for i := range tinfo.fields {
|
||||||
|
finfo := &tinfo.fields[i]
|
||||||
|
if finfo.flags&fElement == 0 || len(finfo.parents) < len(parents) || finfo.xmlns != "" && finfo.xmlns != start.Name.Space {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for j := range parents {
|
||||||
|
if parents[j] != finfo.parents[j] {
|
||||||
|
continue Loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(finfo.parents) == len(parents) && finfo.name == start.Name.Local {
|
||||||
|
// It's a perfect match, unmarshal the field.
|
||||||
|
return true, p.unmarshal(finfo.value(sv), start)
|
||||||
|
}
|
||||||
|
if len(finfo.parents) > len(parents) && finfo.parents[len(parents)] == start.Name.Local {
|
||||||
|
// It's a prefix for the field. Break and recurse
|
||||||
|
// since it's not ok for one field path to be itself
|
||||||
|
// the prefix for another field path.
|
||||||
|
recurse = true
|
||||||
|
|
||||||
|
// We can reuse the same slice as long as we
|
||||||
|
// don't try to append to it.
|
||||||
|
parents = finfo.parents[:len(parents)+1]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !recurse {
|
||||||
|
// We have no business with this element.
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
// The element is not a perfect match for any field, but one
|
||||||
|
// or more fields have the path to this element as a parent
|
||||||
|
// prefix. Recurse and attempt to match these.
|
||||||
|
for {
|
||||||
|
var tok Token
|
||||||
|
tok, err = p.Token()
|
||||||
|
if err != nil {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
|
switch t := tok.(type) {
|
||||||
|
case StartElement:
|
||||||
|
consumed2, err := p.unmarshalPath(tinfo, sv, parents, &t)
|
||||||
|
if err != nil {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
|
if !consumed2 {
|
||||||
|
if err := p.Skip(); err != nil {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case EndElement:
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Skip reads tokens until it has consumed the end element
|
||||||
|
// matching the most recent start element already consumed.
|
||||||
|
// It recurs if it encounters a start element, so it can be used to
|
||||||
|
// skip nested structures.
|
||||||
|
// It returns nil if it finds an end element matching the start
|
||||||
|
// element; otherwise it returns an error describing the problem.
|
||||||
|
func (d *Decoder) Skip() error {
|
||||||
|
for {
|
||||||
|
tok, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch tok.(type) {
|
||||||
|
case StartElement:
|
||||||
|
if err := d.Skip(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
case EndElement:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
371
server/webdav/internal/xml/typeinfo.go
Normal file
371
server/webdav/internal/xml/typeinfo.go
Normal file
@ -0,0 +1,371 @@
|
|||||||
|
// Copyright 2011 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package xml
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
)
|
||||||
|
|
||||||
|
// typeInfo holds details for the xml representation of a type.
|
||||||
|
type typeInfo struct {
|
||||||
|
xmlname *fieldInfo
|
||||||
|
fields []fieldInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
// fieldInfo holds details for the xml representation of a single field.
|
||||||
|
type fieldInfo struct {
|
||||||
|
idx []int
|
||||||
|
name string
|
||||||
|
xmlns string
|
||||||
|
flags fieldFlags
|
||||||
|
parents []string
|
||||||
|
}
|
||||||
|
|
||||||
|
type fieldFlags int
|
||||||
|
|
||||||
|
const (
|
||||||
|
fElement fieldFlags = 1 << iota
|
||||||
|
fAttr
|
||||||
|
fCharData
|
||||||
|
fInnerXml
|
||||||
|
fComment
|
||||||
|
fAny
|
||||||
|
|
||||||
|
fOmitEmpty
|
||||||
|
|
||||||
|
fMode = fElement | fAttr | fCharData | fInnerXml | fComment | fAny
|
||||||
|
)
|
||||||
|
|
||||||
|
var tinfoMap = make(map[reflect.Type]*typeInfo)
|
||||||
|
var tinfoLock sync.RWMutex
|
||||||
|
|
||||||
|
var nameType = reflect.TypeOf(Name{})
|
||||||
|
|
||||||
|
// getTypeInfo returns the typeInfo structure with details necessary
|
||||||
|
// for marshalling and unmarshalling typ.
|
||||||
|
func getTypeInfo(typ reflect.Type) (*typeInfo, error) {
|
||||||
|
tinfoLock.RLock()
|
||||||
|
tinfo, ok := tinfoMap[typ]
|
||||||
|
tinfoLock.RUnlock()
|
||||||
|
if ok {
|
||||||
|
return tinfo, nil
|
||||||
|
}
|
||||||
|
tinfo = &typeInfo{}
|
||||||
|
if typ.Kind() == reflect.Struct && typ != nameType {
|
||||||
|
n := typ.NumField()
|
||||||
|
for i := 0; i < n; i++ {
|
||||||
|
f := typ.Field(i)
|
||||||
|
if f.PkgPath != "" || f.Tag.Get("xml") == "-" {
|
||||||
|
continue // Private field
|
||||||
|
}
|
||||||
|
|
||||||
|
// For embedded structs, embed its fields.
|
||||||
|
if f.Anonymous {
|
||||||
|
t := f.Type
|
||||||
|
if t.Kind() == reflect.Ptr {
|
||||||
|
t = t.Elem()
|
||||||
|
}
|
||||||
|
if t.Kind() == reflect.Struct {
|
||||||
|
inner, err := getTypeInfo(t)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if tinfo.xmlname == nil {
|
||||||
|
tinfo.xmlname = inner.xmlname
|
||||||
|
}
|
||||||
|
for _, finfo := range inner.fields {
|
||||||
|
finfo.idx = append([]int{i}, finfo.idx...)
|
||||||
|
if err := addFieldInfo(typ, tinfo, &finfo); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
finfo, err := structFieldInfo(typ, &f)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Name == "XMLName" {
|
||||||
|
tinfo.xmlname = finfo
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the field if it doesn't conflict with other fields.
|
||||||
|
if err := addFieldInfo(typ, tinfo, finfo); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
tinfoLock.Lock()
|
||||||
|
tinfoMap[typ] = tinfo
|
||||||
|
tinfoLock.Unlock()
|
||||||
|
return tinfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// structFieldInfo builds and returns a fieldInfo for f.
|
||||||
|
func structFieldInfo(typ reflect.Type, f *reflect.StructField) (*fieldInfo, error) {
|
||||||
|
finfo := &fieldInfo{idx: f.Index}
|
||||||
|
|
||||||
|
// Split the tag from the xml namespace if necessary.
|
||||||
|
tag := f.Tag.Get("xml")
|
||||||
|
if i := strings.Index(tag, " "); i >= 0 {
|
||||||
|
finfo.xmlns, tag = tag[:i], tag[i+1:]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse flags.
|
||||||
|
tokens := strings.Split(tag, ",")
|
||||||
|
if len(tokens) == 1 {
|
||||||
|
finfo.flags = fElement
|
||||||
|
} else {
|
||||||
|
tag = tokens[0]
|
||||||
|
for _, flag := range tokens[1:] {
|
||||||
|
switch flag {
|
||||||
|
case "attr":
|
||||||
|
finfo.flags |= fAttr
|
||||||
|
case "chardata":
|
||||||
|
finfo.flags |= fCharData
|
||||||
|
case "innerxml":
|
||||||
|
finfo.flags |= fInnerXml
|
||||||
|
case "comment":
|
||||||
|
finfo.flags |= fComment
|
||||||
|
case "any":
|
||||||
|
finfo.flags |= fAny
|
||||||
|
case "omitempty":
|
||||||
|
finfo.flags |= fOmitEmpty
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate the flags used.
|
||||||
|
valid := true
|
||||||
|
switch mode := finfo.flags & fMode; mode {
|
||||||
|
case 0:
|
||||||
|
finfo.flags |= fElement
|
||||||
|
case fAttr, fCharData, fInnerXml, fComment, fAny:
|
||||||
|
if f.Name == "XMLName" || tag != "" && mode != fAttr {
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
// This will also catch multiple modes in a single field.
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
if finfo.flags&fMode == fAny {
|
||||||
|
finfo.flags |= fElement
|
||||||
|
}
|
||||||
|
if finfo.flags&fOmitEmpty != 0 && finfo.flags&(fElement|fAttr) == 0 {
|
||||||
|
valid = false
|
||||||
|
}
|
||||||
|
if !valid {
|
||||||
|
return nil, fmt.Errorf("xml: invalid tag in field %s of type %s: %q",
|
||||||
|
f.Name, typ, f.Tag.Get("xml"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use of xmlns without a name is not allowed.
|
||||||
|
if finfo.xmlns != "" && tag == "" {
|
||||||
|
return nil, fmt.Errorf("xml: namespace without name in field %s of type %s: %q",
|
||||||
|
f.Name, typ, f.Tag.Get("xml"))
|
||||||
|
}
|
||||||
|
|
||||||
|
if f.Name == "XMLName" {
|
||||||
|
// The XMLName field records the XML element name. Don't
|
||||||
|
// process it as usual because its name should default to
|
||||||
|
// empty rather than to the field name.
|
||||||
|
finfo.name = tag
|
||||||
|
return finfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if tag == "" {
|
||||||
|
// If the name part of the tag is completely empty, get
|
||||||
|
// default from XMLName of underlying struct if feasible,
|
||||||
|
// or field name otherwise.
|
||||||
|
if xmlname := lookupXMLName(f.Type); xmlname != nil {
|
||||||
|
finfo.xmlns, finfo.name = xmlname.xmlns, xmlname.name
|
||||||
|
} else {
|
||||||
|
finfo.name = f.Name
|
||||||
|
}
|
||||||
|
return finfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if finfo.xmlns == "" && finfo.flags&fAttr == 0 {
|
||||||
|
// If it's an element no namespace specified, get the default
|
||||||
|
// from the XMLName of enclosing struct if possible.
|
||||||
|
if xmlname := lookupXMLName(typ); xmlname != nil {
|
||||||
|
finfo.xmlns = xmlname.xmlns
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare field name and parents.
|
||||||
|
parents := strings.Split(tag, ">")
|
||||||
|
if parents[0] == "" {
|
||||||
|
parents[0] = f.Name
|
||||||
|
}
|
||||||
|
if parents[len(parents)-1] == "" {
|
||||||
|
return nil, fmt.Errorf("xml: trailing '>' in field %s of type %s", f.Name, typ)
|
||||||
|
}
|
||||||
|
finfo.name = parents[len(parents)-1]
|
||||||
|
if len(parents) > 1 {
|
||||||
|
if (finfo.flags & fElement) == 0 {
|
||||||
|
return nil, fmt.Errorf("xml: %s chain not valid with %s flag", tag, strings.Join(tokens[1:], ","))
|
||||||
|
}
|
||||||
|
finfo.parents = parents[:len(parents)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the field type has an XMLName field, the names must match
|
||||||
|
// so that the behavior of both marshalling and unmarshalling
|
||||||
|
// is straightforward and unambiguous.
|
||||||
|
if finfo.flags&fElement != 0 {
|
||||||
|
ftyp := f.Type
|
||||||
|
xmlname := lookupXMLName(ftyp)
|
||||||
|
if xmlname != nil && xmlname.name != finfo.name {
|
||||||
|
return nil, fmt.Errorf("xml: name %q in tag of %s.%s conflicts with name %q in %s.XMLName",
|
||||||
|
finfo.name, typ, f.Name, xmlname.name, ftyp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return finfo, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// lookupXMLName returns the fieldInfo for typ's XMLName field
|
||||||
|
// in case it exists and has a valid xml field tag, otherwise
|
||||||
|
// it returns nil.
|
||||||
|
func lookupXMLName(typ reflect.Type) (xmlname *fieldInfo) {
|
||||||
|
for typ.Kind() == reflect.Ptr {
|
||||||
|
typ = typ.Elem()
|
||||||
|
}
|
||||||
|
if typ.Kind() != reflect.Struct {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for i, n := 0, typ.NumField(); i < n; i++ {
|
||||||
|
f := typ.Field(i)
|
||||||
|
if f.Name != "XMLName" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
finfo, err := structFieldInfo(typ, &f)
|
||||||
|
if finfo.name != "" && err == nil {
|
||||||
|
return finfo
|
||||||
|
}
|
||||||
|
// Also consider errors as a non-existent field tag
|
||||||
|
// and let getTypeInfo itself report the error.
|
||||||
|
break
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func min(a, b int) int {
|
||||||
|
if a <= b {
|
||||||
|
return a
|
||||||
|
}
|
||||||
|
return b
|
||||||
|
}
|
||||||
|
|
||||||
|
// addFieldInfo adds finfo to tinfo.fields if there are no
|
||||||
|
// conflicts, or if conflicts arise from previous fields that were
|
||||||
|
// obtained from deeper embedded structures than finfo. In the latter
|
||||||
|
// case, the conflicting entries are dropped.
|
||||||
|
// A conflict occurs when the path (parent + name) to a field is
|
||||||
|
// itself a prefix of another path, or when two paths match exactly.
|
||||||
|
// It is okay for field paths to share a common, shorter prefix.
|
||||||
|
func addFieldInfo(typ reflect.Type, tinfo *typeInfo, newf *fieldInfo) error {
|
||||||
|
var conflicts []int
|
||||||
|
Loop:
|
||||||
|
// First, figure all conflicts. Most working code will have none.
|
||||||
|
for i := range tinfo.fields {
|
||||||
|
oldf := &tinfo.fields[i]
|
||||||
|
if oldf.flags&fMode != newf.flags&fMode {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if oldf.xmlns != "" && newf.xmlns != "" && oldf.xmlns != newf.xmlns {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
minl := min(len(newf.parents), len(oldf.parents))
|
||||||
|
for p := 0; p < minl; p++ {
|
||||||
|
if oldf.parents[p] != newf.parents[p] {
|
||||||
|
continue Loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(oldf.parents) > len(newf.parents) {
|
||||||
|
if oldf.parents[len(newf.parents)] == newf.name {
|
||||||
|
conflicts = append(conflicts, i)
|
||||||
|
}
|
||||||
|
} else if len(oldf.parents) < len(newf.parents) {
|
||||||
|
if newf.parents[len(oldf.parents)] == oldf.name {
|
||||||
|
conflicts = append(conflicts, i)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if newf.name == oldf.name {
|
||||||
|
conflicts = append(conflicts, i)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Without conflicts, add the new field and return.
|
||||||
|
if conflicts == nil {
|
||||||
|
tinfo.fields = append(tinfo.fields, *newf)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// If any conflict is shallower, ignore the new field.
|
||||||
|
// This matches the Go field resolution on embedding.
|
||||||
|
for _, i := range conflicts {
|
||||||
|
if len(tinfo.fields[i].idx) < len(newf.idx) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, if any of them is at the same depth level, it's an error.
|
||||||
|
for _, i := range conflicts {
|
||||||
|
oldf := &tinfo.fields[i]
|
||||||
|
if len(oldf.idx) == len(newf.idx) {
|
||||||
|
f1 := typ.FieldByIndex(oldf.idx)
|
||||||
|
f2 := typ.FieldByIndex(newf.idx)
|
||||||
|
return &TagPathError{typ, f1.Name, f1.Tag.Get("xml"), f2.Name, f2.Tag.Get("xml")}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, the new field is shallower, and thus takes precedence,
|
||||||
|
// so drop the conflicting fields from tinfo and append the new one.
|
||||||
|
for c := len(conflicts) - 1; c >= 0; c-- {
|
||||||
|
i := conflicts[c]
|
||||||
|
copy(tinfo.fields[i:], tinfo.fields[i+1:])
|
||||||
|
tinfo.fields = tinfo.fields[:len(tinfo.fields)-1]
|
||||||
|
}
|
||||||
|
tinfo.fields = append(tinfo.fields, *newf)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// A TagPathError represents an error in the unmarshalling process
|
||||||
|
// caused by the use of field tags with conflicting paths.
|
||||||
|
type TagPathError struct {
|
||||||
|
Struct reflect.Type
|
||||||
|
Field1, Tag1 string
|
||||||
|
Field2, Tag2 string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *TagPathError) Error() string {
|
||||||
|
return fmt.Sprintf("%s field %q with tag %q conflicts with field %q with tag %q", e.Struct, e.Field1, e.Tag1, e.Field2, e.Tag2)
|
||||||
|
}
|
||||||
|
|
||||||
|
// value returns v's field value corresponding to finfo.
|
||||||
|
// It's equivalent to v.FieldByIndex(finfo.idx), but initializes
|
||||||
|
// and dereferences pointers as necessary.
|
||||||
|
func (finfo *fieldInfo) value(v reflect.Value) reflect.Value {
|
||||||
|
for i, x := range finfo.idx {
|
||||||
|
if i > 0 {
|
||||||
|
t := v.Type()
|
||||||
|
if t.Kind() == reflect.Ptr && t.Elem().Kind() == reflect.Struct {
|
||||||
|
if v.IsNil() {
|
||||||
|
v.Set(reflect.New(v.Type().Elem()))
|
||||||
|
}
|
||||||
|
v = v.Elem()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
v = v.Field(x)
|
||||||
|
}
|
||||||
|
return v
|
||||||
|
}
|
1998
server/webdav/internal/xml/xml.go
Normal file
1998
server/webdav/internal/xml/xml.go
Normal file
File diff suppressed because it is too large
Load Diff
445
server/webdav/lock.go
Normal file
445
server/webdav/lock.go
Normal file
@ -0,0 +1,445 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package webdav
|
||||||
|
|
||||||
|
import (
|
||||||
|
"container/heap"
|
||||||
|
"errors"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// ErrConfirmationFailed is returned by a LockSystem's Confirm method.
|
||||||
|
ErrConfirmationFailed = errors.New("webdav: confirmation failed")
|
||||||
|
// ErrForbidden is returned by a LockSystem's Unlock method.
|
||||||
|
ErrForbidden = errors.New("webdav: forbidden")
|
||||||
|
// ErrLocked is returned by a LockSystem's Create, Refresh and Unlock methods.
|
||||||
|
ErrLocked = errors.New("webdav: locked")
|
||||||
|
// ErrNoSuchLock is returned by a LockSystem's Refresh and Unlock methods.
|
||||||
|
ErrNoSuchLock = errors.New("webdav: no such lock")
|
||||||
|
)
|
||||||
|
|
||||||
|
// Condition can match a WebDAV resource, based on a token or ETag.
|
||||||
|
// Exactly one of Token and ETag should be non-empty.
|
||||||
|
type Condition struct {
|
||||||
|
Not bool
|
||||||
|
Token string
|
||||||
|
ETag string
|
||||||
|
}
|
||||||
|
|
||||||
|
// LockSystem manages access to a collection of named resources. The elements
|
||||||
|
// in a lock name are separated by slash ('/', U+002F) characters, regardless
|
||||||
|
// of host operating system convention.
|
||||||
|
type LockSystem interface {
|
||||||
|
// Confirm confirms that the caller can claim all of the locks specified by
|
||||||
|
// the given conditions, and that holding the union of all of those locks
|
||||||
|
// gives exclusive access to all of the named resources. Up to two resources
|
||||||
|
// can be named. Empty names are ignored.
|
||||||
|
//
|
||||||
|
// Exactly one of release and err will be non-nil. If release is non-nil,
|
||||||
|
// all of the requested locks are held until release is called. Calling
|
||||||
|
// release does not unlock the lock, in the WebDAV UNLOCK sense, but once
|
||||||
|
// Confirm has confirmed that a lock claim is valid, that lock cannot be
|
||||||
|
// Confirmed again until it has been released.
|
||||||
|
//
|
||||||
|
// If Confirm returns ErrConfirmationFailed then the Handler will continue
|
||||||
|
// to try any other set of locks presented (a WebDAV HTTP request can
|
||||||
|
// present more than one set of locks). If it returns any other non-nil
|
||||||
|
// error, the Handler will write a "500 Internal Server Error" HTTP status.
|
||||||
|
Confirm(now time.Time, name0, name1 string, conditions ...Condition) (release func(), err error)
|
||||||
|
|
||||||
|
// Create creates a lock with the given depth, duration, owner and root
|
||||||
|
// (name). The depth will either be negative (meaning infinite) or zero.
|
||||||
|
//
|
||||||
|
// If Create returns ErrLocked then the Handler will write a "423 Locked"
|
||||||
|
// HTTP status. If it returns any other non-nil error, the Handler will
|
||||||
|
// write a "500 Internal Server Error" HTTP status.
|
||||||
|
//
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#rfc.section.9.10.6 for
|
||||||
|
// when to use each error.
|
||||||
|
//
|
||||||
|
// The token returned identifies the created lock. It should be an absolute
|
||||||
|
// URI as defined by RFC 3986, Section 4.3. In particular, it should not
|
||||||
|
// contain whitespace.
|
||||||
|
Create(now time.Time, details LockDetails) (token string, err error)
|
||||||
|
|
||||||
|
// Refresh refreshes the lock with the given token.
|
||||||
|
//
|
||||||
|
// If Refresh returns ErrLocked then the Handler will write a "423 Locked"
|
||||||
|
// HTTP Status. If Refresh returns ErrNoSuchLock then the Handler will write
|
||||||
|
// a "412 Precondition Failed" HTTP Status. If it returns any other non-nil
|
||||||
|
// error, the Handler will write a "500 Internal Server Error" HTTP status.
|
||||||
|
//
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#rfc.section.9.10.6 for
|
||||||
|
// when to use each error.
|
||||||
|
Refresh(now time.Time, token string, duration time.Duration) (LockDetails, error)
|
||||||
|
|
||||||
|
// Unlock unlocks the lock with the given token.
|
||||||
|
//
|
||||||
|
// If Unlock returns ErrForbidden then the Handler will write a "403
|
||||||
|
// Forbidden" HTTP Status. If Unlock returns ErrLocked then the Handler
|
||||||
|
// will write a "423 Locked" HTTP status. If Unlock returns ErrNoSuchLock
|
||||||
|
// then the Handler will write a "409 Conflict" HTTP Status. If it returns
|
||||||
|
// any other non-nil error, the Handler will write a "500 Internal Server
|
||||||
|
// Error" HTTP status.
|
||||||
|
//
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#rfc.section.9.11.1 for
|
||||||
|
// when to use each error.
|
||||||
|
Unlock(now time.Time, token string) error
|
||||||
|
}
|
||||||
|
|
||||||
|
// LockDetails are a lock's metadata.
|
||||||
|
type LockDetails struct {
|
||||||
|
// Root is the root resource name being locked. For a zero-depth lock, the
|
||||||
|
// root is the only resource being locked.
|
||||||
|
Root string
|
||||||
|
// Duration is the lock timeout. A negative duration means infinite.
|
||||||
|
Duration time.Duration
|
||||||
|
// OwnerXML is the verbatim <owner> XML given in a LOCK HTTP request.
|
||||||
|
//
|
||||||
|
// TODO: does the "verbatim" nature play well with XML namespaces?
|
||||||
|
// Does the OwnerXML field need to have more structure? See
|
||||||
|
// https://codereview.appspot.com/175140043/#msg2
|
||||||
|
OwnerXML string
|
||||||
|
// ZeroDepth is whether the lock has zero depth. If it does not have zero
|
||||||
|
// depth, it has infinite depth.
|
||||||
|
ZeroDepth bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMemLS returns a new in-memory LockSystem.
|
||||||
|
func NewMemLS() LockSystem {
|
||||||
|
return &memLS{
|
||||||
|
byName: make(map[string]*memLSNode),
|
||||||
|
byToken: make(map[string]*memLSNode),
|
||||||
|
gen: uint64(time.Now().Unix()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type memLS struct {
|
||||||
|
mu sync.Mutex
|
||||||
|
byName map[string]*memLSNode
|
||||||
|
byToken map[string]*memLSNode
|
||||||
|
gen uint64
|
||||||
|
// byExpiry only contains those nodes whose LockDetails have a finite
|
||||||
|
// Duration and are yet to expire.
|
||||||
|
byExpiry byExpiry
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) nextToken() string {
|
||||||
|
m.gen++
|
||||||
|
return strconv.FormatUint(m.gen, 10)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) collectExpiredNodes(now time.Time) {
|
||||||
|
for len(m.byExpiry) > 0 {
|
||||||
|
if now.Before(m.byExpiry[0].expiry) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
m.remove(m.byExpiry[0])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) Confirm(now time.Time, name0, name1 string, conditions ...Condition) (func(), error) {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
m.collectExpiredNodes(now)
|
||||||
|
|
||||||
|
var n0, n1 *memLSNode
|
||||||
|
if name0 != "" {
|
||||||
|
if n0 = m.lookup(slashClean(name0), conditions...); n0 == nil {
|
||||||
|
return nil, ErrConfirmationFailed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if name1 != "" {
|
||||||
|
if n1 = m.lookup(slashClean(name1), conditions...); n1 == nil {
|
||||||
|
return nil, ErrConfirmationFailed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Don't hold the same node twice.
|
||||||
|
if n1 == n0 {
|
||||||
|
n1 = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if n0 != nil {
|
||||||
|
m.hold(n0)
|
||||||
|
}
|
||||||
|
if n1 != nil {
|
||||||
|
m.hold(n1)
|
||||||
|
}
|
||||||
|
return func() {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
if n1 != nil {
|
||||||
|
m.unhold(n1)
|
||||||
|
}
|
||||||
|
if n0 != nil {
|
||||||
|
m.unhold(n0)
|
||||||
|
}
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// lookup returns the node n that locks the named resource, provided that n
|
||||||
|
// matches at least one of the given conditions and that lock isn't held by
|
||||||
|
// another party. Otherwise, it returns nil.
|
||||||
|
//
|
||||||
|
// n may be a parent of the named resource, if n is an infinite depth lock.
|
||||||
|
func (m *memLS) lookup(name string, conditions ...Condition) (n *memLSNode) {
|
||||||
|
// TODO: support Condition.Not and Condition.ETag.
|
||||||
|
for _, c := range conditions {
|
||||||
|
n = m.byToken[c.Token]
|
||||||
|
if n == nil || n.held {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if name == n.details.Root {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
if n.details.ZeroDepth {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if n.details.Root == "/" || strings.HasPrefix(name, n.details.Root+"/") {
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) hold(n *memLSNode) {
|
||||||
|
if n.held {
|
||||||
|
panic("webdav: memLS inconsistent held state")
|
||||||
|
}
|
||||||
|
n.held = true
|
||||||
|
if n.details.Duration >= 0 && n.byExpiryIndex >= 0 {
|
||||||
|
heap.Remove(&m.byExpiry, n.byExpiryIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) unhold(n *memLSNode) {
|
||||||
|
if !n.held {
|
||||||
|
panic("webdav: memLS inconsistent held state")
|
||||||
|
}
|
||||||
|
n.held = false
|
||||||
|
if n.details.Duration >= 0 {
|
||||||
|
heap.Push(&m.byExpiry, n)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) Create(now time.Time, details LockDetails) (string, error) {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
m.collectExpiredNodes(now)
|
||||||
|
details.Root = slashClean(details.Root)
|
||||||
|
|
||||||
|
if !m.canCreate(details.Root, details.ZeroDepth) {
|
||||||
|
return "", ErrLocked
|
||||||
|
}
|
||||||
|
n := m.create(details.Root)
|
||||||
|
n.token = m.nextToken()
|
||||||
|
m.byToken[n.token] = n
|
||||||
|
n.details = details
|
||||||
|
if n.details.Duration >= 0 {
|
||||||
|
n.expiry = now.Add(n.details.Duration)
|
||||||
|
heap.Push(&m.byExpiry, n)
|
||||||
|
}
|
||||||
|
return n.token, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) Refresh(now time.Time, token string, duration time.Duration) (LockDetails, error) {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
m.collectExpiredNodes(now)
|
||||||
|
|
||||||
|
n := m.byToken[token]
|
||||||
|
if n == nil {
|
||||||
|
return LockDetails{}, ErrNoSuchLock
|
||||||
|
}
|
||||||
|
if n.held {
|
||||||
|
return LockDetails{}, ErrLocked
|
||||||
|
}
|
||||||
|
if n.byExpiryIndex >= 0 {
|
||||||
|
heap.Remove(&m.byExpiry, n.byExpiryIndex)
|
||||||
|
}
|
||||||
|
n.details.Duration = duration
|
||||||
|
if n.details.Duration >= 0 {
|
||||||
|
n.expiry = now.Add(n.details.Duration)
|
||||||
|
heap.Push(&m.byExpiry, n)
|
||||||
|
}
|
||||||
|
return n.details, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) Unlock(now time.Time, token string) error {
|
||||||
|
m.mu.Lock()
|
||||||
|
defer m.mu.Unlock()
|
||||||
|
m.collectExpiredNodes(now)
|
||||||
|
|
||||||
|
n := m.byToken[token]
|
||||||
|
if n == nil {
|
||||||
|
return ErrNoSuchLock
|
||||||
|
}
|
||||||
|
if n.held {
|
||||||
|
return ErrLocked
|
||||||
|
}
|
||||||
|
m.remove(n)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) canCreate(name string, zeroDepth bool) bool {
|
||||||
|
return walkToRoot(name, func(name0 string, first bool) bool {
|
||||||
|
n := m.byName[name0]
|
||||||
|
if n == nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if first {
|
||||||
|
if n.token != "" {
|
||||||
|
// The target node is already locked.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !zeroDepth {
|
||||||
|
// The requested lock depth is infinite, and the fact that n exists
|
||||||
|
// (n != nil) means that a descendent of the target node is locked.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
} else if n.token != "" && !n.details.ZeroDepth {
|
||||||
|
// An ancestor of the target node is locked with infinite depth.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) create(name string) (ret *memLSNode) {
|
||||||
|
walkToRoot(name, func(name0 string, first bool) bool {
|
||||||
|
n := m.byName[name0]
|
||||||
|
if n == nil {
|
||||||
|
n = &memLSNode{
|
||||||
|
details: LockDetails{
|
||||||
|
Root: name0,
|
||||||
|
},
|
||||||
|
byExpiryIndex: -1,
|
||||||
|
}
|
||||||
|
m.byName[name0] = n
|
||||||
|
}
|
||||||
|
n.refCount++
|
||||||
|
if first {
|
||||||
|
ret = n
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return ret
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *memLS) remove(n *memLSNode) {
|
||||||
|
delete(m.byToken, n.token)
|
||||||
|
n.token = ""
|
||||||
|
walkToRoot(n.details.Root, func(name0 string, first bool) bool {
|
||||||
|
x := m.byName[name0]
|
||||||
|
x.refCount--
|
||||||
|
if x.refCount == 0 {
|
||||||
|
delete(m.byName, name0)
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
if n.byExpiryIndex >= 0 {
|
||||||
|
heap.Remove(&m.byExpiry, n.byExpiryIndex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func walkToRoot(name string, f func(name0 string, first bool) bool) bool {
|
||||||
|
for first := true; ; first = false {
|
||||||
|
if !f(name, first) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if name == "/" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
name = name[:strings.LastIndex(name, "/")]
|
||||||
|
if name == "" {
|
||||||
|
name = "/"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
type memLSNode struct {
|
||||||
|
// details are the lock metadata. Even if this node's name is not explicitly locked,
|
||||||
|
// details.Root will still equal the node's name.
|
||||||
|
details LockDetails
|
||||||
|
// token is the unique identifier for this node's lock. An empty token means that
|
||||||
|
// this node is not explicitly locked.
|
||||||
|
token string
|
||||||
|
// refCount is the number of self-or-descendent nodes that are explicitly locked.
|
||||||
|
refCount int
|
||||||
|
// expiry is when this node's lock expires.
|
||||||
|
expiry time.Time
|
||||||
|
// byExpiryIndex is the index of this node in memLS.byExpiry. It is -1
|
||||||
|
// if this node does not expire, or has expired.
|
||||||
|
byExpiryIndex int
|
||||||
|
// held is whether this node's lock is actively held by a Confirm call.
|
||||||
|
held bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type byExpiry []*memLSNode
|
||||||
|
|
||||||
|
func (b *byExpiry) Len() int {
|
||||||
|
return len(*b)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *byExpiry) Less(i, j int) bool {
|
||||||
|
return (*b)[i].expiry.Before((*b)[j].expiry)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *byExpiry) Swap(i, j int) {
|
||||||
|
(*b)[i], (*b)[j] = (*b)[j], (*b)[i]
|
||||||
|
(*b)[i].byExpiryIndex = i
|
||||||
|
(*b)[j].byExpiryIndex = j
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *byExpiry) Push(x interface{}) {
|
||||||
|
n := x.(*memLSNode)
|
||||||
|
n.byExpiryIndex = len(*b)
|
||||||
|
*b = append(*b, n)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (b *byExpiry) Pop() interface{} {
|
||||||
|
i := len(*b) - 1
|
||||||
|
n := (*b)[i]
|
||||||
|
(*b)[i] = nil
|
||||||
|
n.byExpiryIndex = -1
|
||||||
|
*b = (*b)[:i]
|
||||||
|
return n
|
||||||
|
}
|
||||||
|
|
||||||
|
const infiniteTimeout = -1
|
||||||
|
|
||||||
|
// parseTimeout parses the Timeout HTTP header, as per section 10.7. If s is
|
||||||
|
// empty, an infiniteTimeout is returned.
|
||||||
|
func parseTimeout(s string) (time.Duration, error) {
|
||||||
|
if s == "" {
|
||||||
|
return infiniteTimeout, nil
|
||||||
|
}
|
||||||
|
if i := strings.IndexByte(s, ','); i >= 0 {
|
||||||
|
s = s[:i]
|
||||||
|
}
|
||||||
|
s = strings.TrimSpace(s)
|
||||||
|
if s == "Infinite" {
|
||||||
|
return infiniteTimeout, nil
|
||||||
|
}
|
||||||
|
const pre = "Second-"
|
||||||
|
if !strings.HasPrefix(s, pre) {
|
||||||
|
return 0, errInvalidTimeout
|
||||||
|
}
|
||||||
|
s = s[len(pre):]
|
||||||
|
if s == "" || s[0] < '0' || '9' < s[0] {
|
||||||
|
return 0, errInvalidTimeout
|
||||||
|
}
|
||||||
|
n, err := strconv.ParseInt(s, 10, 64)
|
||||||
|
if err != nil || 1<<32-1 < n {
|
||||||
|
return 0, errInvalidTimeout
|
||||||
|
}
|
||||||
|
return time.Duration(n) * time.Second, nil
|
||||||
|
}
|
413
server/webdav/prop.go
Normal file
413
server/webdav/prop.go
Normal file
@ -0,0 +1,413 @@
|
|||||||
|
// Copyright 2015 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package webdav
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"encoding/xml"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type FileInfo interface {
|
||||||
|
GetSize() uint64
|
||||||
|
GetName() string
|
||||||
|
ModTime() time.Time
|
||||||
|
IsDir() bool
|
||||||
|
//GetPosition() string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Proppatch describes a property update instruction as defined in RFC 4918.
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#METHOD_PROPPATCH
|
||||||
|
type Proppatch struct {
|
||||||
|
// Remove specifies whether this patch removes properties. If it does not
|
||||||
|
// remove them, it sets them.
|
||||||
|
Remove bool
|
||||||
|
// Props contains the properties to be set or removed.
|
||||||
|
Props []Property
|
||||||
|
}
|
||||||
|
|
||||||
|
// Propstat describes a XML propstat element as defined in RFC 4918.
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#ELEMENT_propstat
|
||||||
|
type Propstat struct {
|
||||||
|
// Props contains the properties for which Status applies.
|
||||||
|
Props []Property
|
||||||
|
|
||||||
|
// Status defines the HTTP status code of the properties in Prop.
|
||||||
|
// Allowed values include, but are not limited to the WebDAV status
|
||||||
|
// code extensions for HTTP/1.1.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#status.code.extensions.to.http11
|
||||||
|
Status int
|
||||||
|
|
||||||
|
// XMLError contains the XML representation of the optional error element.
|
||||||
|
// XML content within this field must not rely on any predefined
|
||||||
|
// namespace declarations or prefixes. If empty, the XML error element
|
||||||
|
// is omitted.
|
||||||
|
XMLError string
|
||||||
|
|
||||||
|
// ResponseDescription contains the contents of the optional
|
||||||
|
// responsedescription field. If empty, the XML element is omitted.
|
||||||
|
ResponseDescription string
|
||||||
|
}
|
||||||
|
|
||||||
|
// makePropstats returns a slice containing those of x and y whose Props slice
|
||||||
|
// is non-empty. If both are empty, it returns a slice containing an otherwise
|
||||||
|
// zero Propstat whose HTTP status code is 200 OK.
|
||||||
|
func makePropstats(x, y Propstat) []Propstat {
|
||||||
|
pstats := make([]Propstat, 0, 2)
|
||||||
|
if len(x.Props) != 0 {
|
||||||
|
pstats = append(pstats, x)
|
||||||
|
}
|
||||||
|
if len(y.Props) != 0 {
|
||||||
|
pstats = append(pstats, y)
|
||||||
|
}
|
||||||
|
if len(pstats) == 0 {
|
||||||
|
pstats = append(pstats, Propstat{
|
||||||
|
Status: http.StatusOK,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return pstats
|
||||||
|
}
|
||||||
|
|
||||||
|
// DeadPropsHolder holds the dead properties of a resource.
|
||||||
|
//
|
||||||
|
// Dead properties are those properties that are explicitly defined. In
|
||||||
|
// comparison, live properties, such as DAV:getcontentlength, are implicitly
|
||||||
|
// defined by the underlying resource, and cannot be explicitly overridden or
|
||||||
|
// removed. See the Terminology section of
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#rfc.section.3
|
||||||
|
//
|
||||||
|
// There is a whitelist of the names of live properties. This package handles
|
||||||
|
// all live properties, and will only pass non-whitelisted names to the Patch
|
||||||
|
// method of DeadPropsHolder implementations.
|
||||||
|
type DeadPropsHolder interface {
|
||||||
|
// DeadProps returns a copy of the dead properties held.
|
||||||
|
DeadProps() (map[xml.Name]Property, error)
|
||||||
|
|
||||||
|
// Patch patches the dead properties held.
|
||||||
|
//
|
||||||
|
// Patching is atomic; either all or no patches succeed. It returns (nil,
|
||||||
|
// non-nil) if an internal server error occurred, otherwise the Propstats
|
||||||
|
// collectively contain one Property for each proposed patch Property. If
|
||||||
|
// all patches succeed, Patch returns a slice of length one and a Propstat
|
||||||
|
// element with a 200 OK HTTP status code. If none succeed, for reasons
|
||||||
|
// other than an internal server error, no Propstat has status 200 OK.
|
||||||
|
//
|
||||||
|
// For more details on when various HTTP status codes apply, see
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#PROPPATCH-status
|
||||||
|
Patch([]Proppatch) ([]Propstat, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// liveProps contains all supported, protected DAV: properties.
|
||||||
|
var liveProps = map[xml.Name]struct {
|
||||||
|
// findFn implements the propfind function of this property. If nil,
|
||||||
|
// it indicates a hidden property.
|
||||||
|
findFn func(context.Context, *FileSystem, LockSystem, string, FileInfo) (string, error)
|
||||||
|
// dir is true if the property applies to directories.
|
||||||
|
dir bool
|
||||||
|
}{
|
||||||
|
{Space: "DAV:", Local: "resourcetype"}: {
|
||||||
|
findFn: findResourceType,
|
||||||
|
dir: true,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "displayname"}: {
|
||||||
|
findFn: findDisplayName,
|
||||||
|
dir: true,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "getcontentlength"}: {
|
||||||
|
findFn: findContentLength,
|
||||||
|
dir: false,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "getlastmodified"}: {
|
||||||
|
findFn: findLastModified,
|
||||||
|
// http://webdav.org/specs/rfc4918.html#PROPERTY_getlastmodified
|
||||||
|
// suggests that getlastmodified should only apply to GETable
|
||||||
|
// resources, and this package does not support GET on directories.
|
||||||
|
//
|
||||||
|
// Nonetheless, some WebDAV clients expect child directories to be
|
||||||
|
// sortable by getlastmodified date, so this value is true, not false.
|
||||||
|
// See golang.org/issue/15334.
|
||||||
|
dir: true,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "creationdate"}: {
|
||||||
|
findFn: nil,
|
||||||
|
dir: false,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "getcontentlanguage"}: {
|
||||||
|
findFn: nil,
|
||||||
|
dir: false,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "getcontenttype"}: {
|
||||||
|
findFn: findContentType,
|
||||||
|
dir: false,
|
||||||
|
},
|
||||||
|
{Space: "DAV:", Local: "getetag"}: {
|
||||||
|
findFn: findETag,
|
||||||
|
// findETag implements ETag as the concatenated hex values of a file's
|
||||||
|
// modification time and size. This is not a reliable synchronization
|
||||||
|
// mechanism for directories, so we do not advertise getetag for DAV
|
||||||
|
// collections.
|
||||||
|
dir: false,
|
||||||
|
},
|
||||||
|
|
||||||
|
// TODO: The lockdiscovery property requires LockSystem to list the
|
||||||
|
// active locks on a resource.
|
||||||
|
{Space: "DAV:", Local: "lockdiscovery"}: {},
|
||||||
|
{Space: "DAV:", Local: "supportedlock"}: {
|
||||||
|
findFn: findSupportedLock,
|
||||||
|
dir: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(nigeltao) merge props and allprop?
|
||||||
|
|
||||||
|
// Props returns the status of the properties named pnames for resource name.
|
||||||
|
//
|
||||||
|
// Each Propstat has a unique status and each property name will only be part
|
||||||
|
// of one Propstat element.
|
||||||
|
func props(ctx context.Context, fs *FileSystem, ls LockSystem, fi FileInfo, pnames []xml.Name) ([]Propstat, error) {
|
||||||
|
isDir := fi.IsDir()
|
||||||
|
|
||||||
|
var deadProps map[xml.Name]Property
|
||||||
|
|
||||||
|
pstatOK := Propstat{Status: http.StatusOK}
|
||||||
|
pstatNotFound := Propstat{Status: http.StatusNotFound}
|
||||||
|
for _, pn := range pnames {
|
||||||
|
// If this file has dead properties, check if they contain pn.
|
||||||
|
if dp, ok := deadProps[pn]; ok {
|
||||||
|
pstatOK.Props = append(pstatOK.Props, dp)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Otherwise, it must either be a live property or we don't know it.
|
||||||
|
if prop := liveProps[pn]; prop.findFn != nil && (prop.dir || !isDir) {
|
||||||
|
innerXML, err := prop.findFn(ctx, fs, ls, fi.GetName(), fi)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
pstatOK.Props = append(pstatOK.Props, Property{
|
||||||
|
XMLName: pn,
|
||||||
|
InnerXML: []byte(innerXML),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
pstatNotFound.Props = append(pstatNotFound.Props, Property{
|
||||||
|
XMLName: pn,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return makePropstats(pstatOK, pstatNotFound), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Propnames returns the property names defined for resource name.
|
||||||
|
func propnames(ctx context.Context, fs *FileSystem, ls LockSystem, fi FileInfo) ([]xml.Name, error) {
|
||||||
|
isDir := fi.IsDir()
|
||||||
|
|
||||||
|
var deadProps map[xml.Name]Property
|
||||||
|
|
||||||
|
pnames := make([]xml.Name, 0, len(liveProps)+len(deadProps))
|
||||||
|
for pn, prop := range liveProps {
|
||||||
|
if prop.findFn != nil && (prop.dir || !isDir) {
|
||||||
|
pnames = append(pnames, pn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pnames, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Allprop returns the properties defined for resource name and the properties
|
||||||
|
// named in include.
|
||||||
|
//
|
||||||
|
// Note that RFC 4918 defines 'allprop' to return the DAV: properties defined
|
||||||
|
// within the RFC plus dead properties. Other live properties should only be
|
||||||
|
// returned if they are named in 'include'.
|
||||||
|
//
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#METHOD_PROPFIND
|
||||||
|
func allprop(ctx context.Context, fs *FileSystem, ls LockSystem, info FileInfo, include []xml.Name) ([]Propstat, error) {
|
||||||
|
pnames, err := propnames(ctx, fs, ls, info)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// Add names from include if they are not already covered in pnames.
|
||||||
|
nameset := make(map[xml.Name]bool)
|
||||||
|
for _, pn := range pnames {
|
||||||
|
nameset[pn] = true
|
||||||
|
}
|
||||||
|
for _, pn := range include {
|
||||||
|
if !nameset[pn] {
|
||||||
|
pnames = append(pnames, pn)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return props(ctx, fs, ls, info, pnames)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Patch patches the properties of resource name. The return values are
|
||||||
|
// constrained in the same manner as DeadPropsHolder.Patch.
|
||||||
|
func patch(ctx context.Context, fs *FileSystem, ls LockSystem, name string, patches []Proppatch) ([]Propstat, error) {
|
||||||
|
conflict := false
|
||||||
|
loop:
|
||||||
|
for _, patch := range patches {
|
||||||
|
for _, p := range patch.Props {
|
||||||
|
if _, ok := liveProps[p.XMLName]; ok {
|
||||||
|
conflict = true
|
||||||
|
break loop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if conflict {
|
||||||
|
pstatForbidden := Propstat{
|
||||||
|
Status: http.StatusForbidden,
|
||||||
|
XMLError: `<D:cannot-modify-protected-property xmlns:D="DAV:"/>`,
|
||||||
|
}
|
||||||
|
pstatFailedDep := Propstat{
|
||||||
|
Status: StatusFailedDependency,
|
||||||
|
}
|
||||||
|
for _, patch := range patches {
|
||||||
|
for _, p := range patch.Props {
|
||||||
|
if _, ok := liveProps[p.XMLName]; ok {
|
||||||
|
pstatForbidden.Props = append(pstatForbidden.Props, Property{XMLName: p.XMLName})
|
||||||
|
} else {
|
||||||
|
pstatFailedDep.Props = append(pstatFailedDep.Props, Property{XMLName: p.XMLName})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return makePropstats(pstatForbidden, pstatFailedDep), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// The file doesn't implement the optional DeadPropsHolder interface, so
|
||||||
|
// all patches are forbidden.
|
||||||
|
pstat := Propstat{Status: http.StatusOK}
|
||||||
|
for _, patch := range patches {
|
||||||
|
for _, p := range patch.Props {
|
||||||
|
pstat.Props = append(pstat.Props, Property{XMLName: p.XMLName})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return []Propstat{pstat}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func escapeXML(s string) string {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
// As an optimization, if s contains only ASCII letters, digits or a
|
||||||
|
// few special characters, the escaped value is s itself and we don't
|
||||||
|
// need to allocate a buffer and convert between string and []byte.
|
||||||
|
switch c := s[i]; {
|
||||||
|
case c == ' ' || c == '_' ||
|
||||||
|
('+' <= c && c <= '9') || // Digits as well as + , - . and /
|
||||||
|
('A' <= c && c <= 'Z') ||
|
||||||
|
('a' <= c && c <= 'z'):
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
// Otherwise, go through the full escaping process.
|
||||||
|
var buf bytes.Buffer
|
||||||
|
xml.EscapeText(&buf, []byte(s))
|
||||||
|
return buf.String()
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
func findResourceType(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
if fi.IsDir() {
|
||||||
|
return `<D:collection xmlns:D="DAV:"/>`, nil
|
||||||
|
}
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findDisplayName(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
if slashClean(name) == "/" {
|
||||||
|
// Hide the real name of a possibly prefixed root directory.
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
return escapeXML(fi.GetName()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findContentLength(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
return strconv.FormatUint(fi.GetSize(), 10), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findLastModified(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
return fi.ModTime().UTC().Format(http.TimeFormat), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrNotImplemented should be returned by optional interfaces if they
|
||||||
|
// want the original implementation to be used.
|
||||||
|
var ErrNotImplemented = errors.New("not implemented")
|
||||||
|
|
||||||
|
// ContentTyper is an optional interface for the os.FileInfo
|
||||||
|
// objects returned by the FileSystem.
|
||||||
|
//
|
||||||
|
// If this interface is defined then it will be used to read the
|
||||||
|
// content type from the object.
|
||||||
|
//
|
||||||
|
// If this interface is not defined the file will be opened and the
|
||||||
|
// content type will be guessed from the initial contents of the file.
|
||||||
|
type ContentTyper interface {
|
||||||
|
// ContentType returns the content type for the file.
|
||||||
|
//
|
||||||
|
// If this returns error ErrNotImplemented then the error will
|
||||||
|
// be ignored and the base implementation will be used
|
||||||
|
// instead.
|
||||||
|
ContentType(ctx context.Context) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func findContentType(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
//if do, ok := fi.(ContentTyper); ok {
|
||||||
|
// ctype, err := do.ContentType(ctx)
|
||||||
|
// if err != ErrNotImplemented {
|
||||||
|
// return ctype, err
|
||||||
|
// }
|
||||||
|
//}
|
||||||
|
//f, err := fs.OpenFile(ctx, name, os.O_RDONLY, 0)
|
||||||
|
//if err != nil {
|
||||||
|
// return "", err
|
||||||
|
//}
|
||||||
|
//defer f.Close()
|
||||||
|
//// This implementation is based on serveContent's code in the standard net/http package.
|
||||||
|
//ctype := mime.TypeByExtension(filepath.Ext(name))
|
||||||
|
//if ctype != "" {
|
||||||
|
// return ctype, nil
|
||||||
|
//}
|
||||||
|
//// Read a chunk to decide between utf-8 text and binary.
|
||||||
|
//var buf [512]byte
|
||||||
|
//n, err := io.ReadFull(f, buf[:])
|
||||||
|
//if err != nil && err != io.EOF && err != io.ErrUnexpectedEOF {
|
||||||
|
// return "", err
|
||||||
|
//}
|
||||||
|
//ctype = http.DetectContentType(buf[:n])
|
||||||
|
//// Rewind file.
|
||||||
|
//_, err = f.Seek(0, os.SEEK_SET)
|
||||||
|
//return ctype, err
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ETager is an optional interface for the os.FileInfo objects
|
||||||
|
// returned by the FileSystem.
|
||||||
|
//
|
||||||
|
// If this interface is defined then it will be used to read the ETag
|
||||||
|
// for the object.
|
||||||
|
//
|
||||||
|
// If this interface is not defined an ETag will be computed using the
|
||||||
|
// ModTime() and the Size() methods of the os.FileInfo object.
|
||||||
|
type ETager interface {
|
||||||
|
// ETag returns an ETag for the file. This should be of the
|
||||||
|
// form "value" or W/"value"
|
||||||
|
//
|
||||||
|
// If this returns error ErrNotImplemented then the error will
|
||||||
|
// be ignored and the base implementation will be used
|
||||||
|
// instead.
|
||||||
|
ETag(ctx context.Context) (string, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func findETag(ctx context.Context, fs *FileSystem, ls LockSystem, reqPath string, fi FileInfo) (string, error) {
|
||||||
|
return fmt.Sprintf(`"%x%x"`, fi.ModTime().UnixNano(), fi.GetSize()), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func findSupportedLock(ctx context.Context, fs *FileSystem, ls LockSystem, name string, fi FileInfo) (string, error) {
|
||||||
|
return `` +
|
||||||
|
`<D:lockentry xmlns:D="DAV:">` +
|
||||||
|
`<D:lockscope><D:exclusive/></D:lockscope>` +
|
||||||
|
`<D:locktype><D:write/></D:locktype>` +
|
||||||
|
`</D:lockentry>`, nil
|
||||||
|
}
|
730
server/webdav/webdav.go
Normal file
730
server/webdav/webdav.go
Normal file
@ -0,0 +1,730 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
// Package webdav provides a WebDAV server implementation.
|
||||||
|
package webdav // import "golang.org/x/net/webdav"
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"github.com/Xhofe/alist/utils"
|
||||||
|
log "github.com/sirupsen/logrus"
|
||||||
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Handler struct {
|
||||||
|
// Prefix is the URL path prefix to strip from WebDAV resource paths.
|
||||||
|
Prefix string
|
||||||
|
// LockSystem is the lock management system.
|
||||||
|
LockSystem LockSystem
|
||||||
|
// Logger is an optional error logger. If non-nil, it will be called
|
||||||
|
// for all HTTP requests.
|
||||||
|
Logger func(*http.Request, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) stripPrefix(p string) (string, int, error) {
|
||||||
|
if h.Prefix == "" {
|
||||||
|
return p, http.StatusOK, nil
|
||||||
|
}
|
||||||
|
prefix := h.Prefix
|
||||||
|
if r := strings.TrimPrefix(p, prefix); len(r) < len(p) {
|
||||||
|
if len(r) == 0 {
|
||||||
|
r = "/"
|
||||||
|
}
|
||||||
|
return utils.RemoveLastSlash(r), http.StatusOK, nil
|
||||||
|
}
|
||||||
|
return p, http.StatusNotFound, errPrefixMismatch
|
||||||
|
}
|
||||||
|
|
||||||
|
// isPathExist 路径是否存在
|
||||||
|
func isPathExist(ctx context.Context, fs *FileSystem, path string) (bool, FileInfo) {
|
||||||
|
file, err := fs.File(path)
|
||||||
|
if err != nil {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
return true, file
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request, fs *FileSystem) {
|
||||||
|
status, err := http.StatusBadRequest, errUnsupportedMethod
|
||||||
|
if h.LockSystem == nil {
|
||||||
|
status, err = http.StatusInternalServerError, errNoLockSystem
|
||||||
|
} else {
|
||||||
|
switch r.Method {
|
||||||
|
case "OPTIONS":
|
||||||
|
status, err = h.handleOptions(w, r, fs)
|
||||||
|
case "GET", "HEAD", "POST":
|
||||||
|
status, err = h.handleGetHeadPost(w, r, fs)
|
||||||
|
case "DELETE":
|
||||||
|
status, err = h.handleDelete(w, r, fs)
|
||||||
|
case "PUT":
|
||||||
|
status, err = h.handlePut(w, r, fs)
|
||||||
|
case "MKCOL":
|
||||||
|
status, err = h.handleMkcol(w, r, fs)
|
||||||
|
case "COPY", "MOVE":
|
||||||
|
status, err = h.handleCopyMove(w, r, fs)
|
||||||
|
case "LOCK":
|
||||||
|
status, err = h.handleLock(w, r, fs)
|
||||||
|
case "UNLOCK":
|
||||||
|
status, err = h.handleUnlock(w, r, fs)
|
||||||
|
case "PROPFIND":
|
||||||
|
status, err = h.handlePropfind(w, r, fs)
|
||||||
|
case "PROPPATCH":
|
||||||
|
status, err = h.handleProppatch(w, r, fs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
log.Error(err)
|
||||||
|
}
|
||||||
|
if status != 0 {
|
||||||
|
w.WriteHeader(status)
|
||||||
|
if status != http.StatusNoContent {
|
||||||
|
w.Write([]byte(StatusText(status)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if h.Logger != nil {
|
||||||
|
h.Logger(r, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) lock(now time.Time, root string, fs *FileSystem) (token string, status int, err error) {
|
||||||
|
token, err = h.LockSystem.Create(now, LockDetails{
|
||||||
|
Root: root,
|
||||||
|
Duration: infiniteTimeout,
|
||||||
|
ZeroDepth: true,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
if err == ErrLocked {
|
||||||
|
return "", StatusLocked, err
|
||||||
|
}
|
||||||
|
return "", http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
return token, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ok
|
||||||
|
func (h *Handler) confirmLocks(r *http.Request, src, dst string, fs *FileSystem) (release func(), status int, err error) {
|
||||||
|
hdr := r.Header.Get("If")
|
||||||
|
if hdr == "" {
|
||||||
|
// An empty If header means that the client hasn't previously created locks.
|
||||||
|
// Even if this client doesn't care about locks, we still need to check that
|
||||||
|
// the resources aren't locked by another client, so we create temporary
|
||||||
|
// locks that would conflict with another client's locks. These temporary
|
||||||
|
// locks are unlocked at the end of the HTTP request.
|
||||||
|
now, srcToken, dstToken := time.Now(), "", ""
|
||||||
|
if src != "" {
|
||||||
|
srcToken, status, err = h.lock(now, src, fs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, status, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if dst != "" {
|
||||||
|
dstToken, status, err = h.lock(now, dst, fs)
|
||||||
|
if err != nil {
|
||||||
|
if srcToken != "" {
|
||||||
|
h.LockSystem.Unlock(now, srcToken)
|
||||||
|
}
|
||||||
|
return nil, status, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return func() {
|
||||||
|
if dstToken != "" {
|
||||||
|
h.LockSystem.Unlock(now, dstToken)
|
||||||
|
}
|
||||||
|
if srcToken != "" {
|
||||||
|
h.LockSystem.Unlock(now, srcToken)
|
||||||
|
}
|
||||||
|
}, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ih, ok := parseIfHeader(hdr)
|
||||||
|
if !ok {
|
||||||
|
return nil, http.StatusBadRequest, errInvalidIfHeader
|
||||||
|
}
|
||||||
|
// ih is a disjunction (OR) of ifLists, so any ifList will do.
|
||||||
|
for _, l := range ih.lists {
|
||||||
|
lsrc := l.resourceTag
|
||||||
|
if lsrc == "" {
|
||||||
|
lsrc = src
|
||||||
|
} else {
|
||||||
|
u, err := url.Parse(lsrc)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
//if u.Host != r.Host {
|
||||||
|
// continue
|
||||||
|
//}
|
||||||
|
lsrc, status, err = h.stripPrefix(u.Path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, status, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
release, err = h.LockSystem.Confirm(
|
||||||
|
time.Now(),
|
||||||
|
lsrc,
|
||||||
|
dst,
|
||||||
|
l.conditions...,
|
||||||
|
)
|
||||||
|
if err == ErrConfirmationFailed {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
return release, 0, nil
|
||||||
|
}
|
||||||
|
// Section 10.4.1 says that "If this header is evaluated and all state lists
|
||||||
|
// fail, then the request must fail with a 412 (Precondition Failed) status."
|
||||||
|
// We follow the spec even though the cond_put_corrupt_token test case from
|
||||||
|
// the litmus test warns on seeing a 412 instead of a 423 (Locked).
|
||||||
|
return nil, http.StatusPreconditionFailed, ErrLocked
|
||||||
|
}
|
||||||
|
|
||||||
|
//OK
|
||||||
|
func (h *Handler) handleOptions(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
ctx := r.Context()
|
||||||
|
allow := "OPTIONS, LOCK, PUT, MKCOL"
|
||||||
|
if exist, fi := isPathExist(ctx, fs, reqPath); exist {
|
||||||
|
log.Debugf("fi: %+v", fi)
|
||||||
|
if fi.IsDir() {
|
||||||
|
allow = "OPTIONS, LOCK, DELETE, PROPPATCH, COPY, MOVE, UNLOCK, PROPFIND"
|
||||||
|
} else {
|
||||||
|
allow = "OPTIONS, LOCK, GET, HEAD, POST, DELETE, PROPPATCH, COPY, MOVE, UNLOCK, PROPFIND, PUT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
w.Header().Set("Allow", allow)
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#dav.compliance.classes
|
||||||
|
w.Header().Set("DAV", "1, 2")
|
||||||
|
// http://msdn.microsoft.com/en-au/library/cc250217.aspx
|
||||||
|
w.Header().Set("MS-Author-Via", "DAV")
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleGetHeadPost(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
exist, file := isPathExist(ctx, fs, reqPath)
|
||||||
|
if !exist {
|
||||||
|
return http.StatusNotFound, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
etag, err := findETag(ctx, fs, h.LockSystem, reqPath, file)
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
w.Header().Set("ETag", etag)
|
||||||
|
log.Debugf("url: %+v", r.URL)
|
||||||
|
link, err := fs.Link(r, reqPath)
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
http.Redirect(w, r, link, 302)
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleDelete(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
release, status, err := h.confirmLocks(r, reqPath, "", fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
//ctx := r.Context()
|
||||||
|
|
||||||
|
//// 尝试作为文件删除
|
||||||
|
//if ok, file := fs.IsFileExist(reqPath); ok {
|
||||||
|
// if err := fs.Delete(ctx, []uint{}, []uint{file.ID}, false); err != nil {
|
||||||
|
// return http.StatusMethodNotAllowed, err
|
||||||
|
// }
|
||||||
|
// return http.StatusNoContent, nil
|
||||||
|
//}
|
||||||
|
//
|
||||||
|
//// 尝试作为目录删除
|
||||||
|
//if ok, folder := fs.IsPathExist(reqPath); ok {
|
||||||
|
// if err := fs.Delete(ctx, []uint{folder.ID}, []uint{}, false); err != nil {
|
||||||
|
// return http.StatusMethodNotAllowed, err
|
||||||
|
// }
|
||||||
|
// return http.StatusNoContent, nil
|
||||||
|
//}
|
||||||
|
|
||||||
|
return http.StatusNotFound, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handlePut(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
release, status, err := h.confirmLocks(r, reqPath, "", fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
// TODO(rost): Support the If-Match, If-None-Match headers? See bradfitz'
|
||||||
|
// comments in http.checkEtag.
|
||||||
|
ctx, cancel := context.WithCancel(context.Background())
|
||||||
|
defer cancel()
|
||||||
|
|
||||||
|
etag, err := findETag(ctx, fs, h.LockSystem, reqPath, nil)
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
w.Header().Set("ETag", etag)
|
||||||
|
return http.StatusCreated, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleMkcol(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
release, status, err := h.confirmLocks(r, reqPath, "", fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
if r.ContentLength > 0 {
|
||||||
|
return http.StatusUnsupportedMediaType, nil
|
||||||
|
}
|
||||||
|
if strings.Contains(r.UserAgent(), "rclone") {
|
||||||
|
//if _, ok := ctx.Value(fsctx.IgnoreDirectoryConflictCtx).(bool); !ok {
|
||||||
|
// ctx = context.WithValue(ctx, fsctx.IgnoreDirectoryConflictCtx, true)
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
if _, err := fs.CreateDirectory(ctx, reqPath); err != nil {
|
||||||
|
return http.StatusConflict, err
|
||||||
|
}
|
||||||
|
return http.StatusCreated, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleCopyMove(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
|
||||||
|
hdr := r.Header.Get("Destination")
|
||||||
|
if hdr == "" {
|
||||||
|
return http.StatusBadRequest, errInvalidDestination
|
||||||
|
}
|
||||||
|
u, err := url.Parse(hdr)
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusBadRequest, errInvalidDestination
|
||||||
|
}
|
||||||
|
//if u.Host != "" && u.Host != r.Host {
|
||||||
|
// return http.StatusBadGateway, errInvalidDestination
|
||||||
|
//}
|
||||||
|
|
||||||
|
src, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
|
||||||
|
dst, status, err := h.stripPrefix(u.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if dst == "" {
|
||||||
|
return http.StatusBadGateway, errInvalidDestination
|
||||||
|
}
|
||||||
|
if dst == src {
|
||||||
|
return http.StatusForbidden, errDestinationEqualsSource
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
isExist, target := isPathExist(ctx, fs, src)
|
||||||
|
|
||||||
|
if !isExist {
|
||||||
|
return http.StatusNotFound, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Method == "COPY" {
|
||||||
|
// Section 7.5.1 says that a COPY only needs to lock the destination,
|
||||||
|
// not both destination and source. Strictly speaking, this is racy,
|
||||||
|
// even though a COPY doesn't modify the source, if a concurrent
|
||||||
|
// operation modifies the source. However, the litmus test explicitly
|
||||||
|
// checks that COPYing a locked-by-another source is OK.
|
||||||
|
release, status, err := h.confirmLocks(r, "", dst, fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
// Section 9.8.3 says that "The COPY method on a collection without a Depth
|
||||||
|
// header must act as if a Depth header with value "infinity" was included".
|
||||||
|
depth := infiniteDepth
|
||||||
|
if hdr := r.Header.Get("Depth"); hdr != "" {
|
||||||
|
depth = parseDepth(hdr)
|
||||||
|
if depth != 0 && depth != infiniteDepth {
|
||||||
|
// Section 9.8.3 says that "A client may submit a Depth header on a
|
||||||
|
// COPY on a collection with a value of "0" or "infinity"."
|
||||||
|
return http.StatusBadRequest, errInvalidDepth
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return copyFiles(ctx, fs, target, dst, r.Header.Get("Overwrite") != "F", depth, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// windows下,某些情况下(网盘根目录下)Office保存文件时附带的锁token只包含源文件,
|
||||||
|
// 此处暂时去除了对dst锁的检查
|
||||||
|
release, status, err := h.confirmLocks(r, src, "", fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
// Section 9.9.2 says that "The MOVE method on a collection must act as if
|
||||||
|
// a "Depth: infinity" header was used on it. A client must not submit a
|
||||||
|
// Depth header on a MOVE on a collection with any value but "infinity"."
|
||||||
|
if hdr := r.Header.Get("Depth"); hdr != "" {
|
||||||
|
if parseDepth(hdr) != infiniteDepth {
|
||||||
|
return http.StatusBadRequest, errInvalidDepth
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return moveFiles(ctx, fs, target, dst, r.Header.Get("Overwrite") == "T")
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleLock(w http.ResponseWriter, r *http.Request, fs *FileSystem) (retStatus int, retErr error) {
|
||||||
|
|
||||||
|
duration, err := parseTimeout(r.Header.Get("Timeout"))
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusBadRequest, err
|
||||||
|
}
|
||||||
|
li, status, err := readLockInfo(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
|
||||||
|
//ctx := r.Context()
|
||||||
|
token, ld, now, created := "", LockDetails{}, time.Now(), false
|
||||||
|
if li == (lockInfo{}) {
|
||||||
|
// An empty lockInfo means to refresh the lock.
|
||||||
|
ih, ok := parseIfHeader(r.Header.Get("If"))
|
||||||
|
if !ok {
|
||||||
|
return http.StatusBadRequest, errInvalidIfHeader
|
||||||
|
}
|
||||||
|
if len(ih.lists) == 1 && len(ih.lists[0].conditions) == 1 {
|
||||||
|
token = ih.lists[0].conditions[0].Token
|
||||||
|
}
|
||||||
|
if token == "" {
|
||||||
|
return http.StatusBadRequest, errInvalidLockToken
|
||||||
|
}
|
||||||
|
ld, err = h.LockSystem.Refresh(now, token, duration)
|
||||||
|
if err != nil {
|
||||||
|
if err == ErrNoSuchLock {
|
||||||
|
return http.StatusPreconditionFailed, err
|
||||||
|
}
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
// Section 9.10.3 says that "If no Depth header is submitted on a LOCK request,
|
||||||
|
// then the request MUST act as if a "Depth:infinity" had been submitted."
|
||||||
|
depth := infiniteDepth
|
||||||
|
if hdr := r.Header.Get("Depth"); hdr != "" {
|
||||||
|
depth = parseDepth(hdr)
|
||||||
|
if depth != 0 && depth != infiniteDepth {
|
||||||
|
// Section 9.10.3 says that "Values other than 0 or infinity must not be
|
||||||
|
// used with the Depth header on a LOCK method".
|
||||||
|
return http.StatusBadRequest, errInvalidDepth
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
ld = LockDetails{
|
||||||
|
Root: reqPath,
|
||||||
|
Duration: duration,
|
||||||
|
OwnerXML: li.Owner.InnerXML,
|
||||||
|
ZeroDepth: depth == 0,
|
||||||
|
}
|
||||||
|
token, err = h.LockSystem.Create(now, ld)
|
||||||
|
if err != nil {
|
||||||
|
if err == ErrLocked {
|
||||||
|
return StatusLocked, err
|
||||||
|
}
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
defer func() {
|
||||||
|
if retErr != nil {
|
||||||
|
h.LockSystem.Unlock(now, token)
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
|
// Create the resource if it didn't previously exist.
|
||||||
|
//if _, err := h.FileSystem.Stat(ctx, reqPath); err != nil {
|
||||||
|
// f, err := h.FileSystem.OpenFile(ctx, reqPath, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
|
||||||
|
// if err != nil {
|
||||||
|
// // TODO: detect missing intermediate dirs and return http.StatusConflict?
|
||||||
|
// return http.StatusInternalServerError, err
|
||||||
|
// }
|
||||||
|
// f.Close()
|
||||||
|
// created = true
|
||||||
|
//}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#HEADER_Lock-Token says that the
|
||||||
|
// Lock-Token value is a Coded-URL. We add angle brackets.
|
||||||
|
w.Header().Set("Lock-Token", "<"+token+">")
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Header().Set("Content-Type", "application/xml; charset=utf-8")
|
||||||
|
if created {
|
||||||
|
// This is "w.WriteHeader(http.StatusCreated)" and not "return
|
||||||
|
// http.StatusCreated, nil" because we write our own (XML) response to w
|
||||||
|
// and Handler.ServeHTTP would otherwise write "Created".
|
||||||
|
w.WriteHeader(http.StatusCreated)
|
||||||
|
}
|
||||||
|
writeLockInfo(w, token, ld)
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handleUnlock(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#HEADER_Lock-Token says that the
|
||||||
|
// Lock-Token value is a Coded-URL. We strip its angle brackets.
|
||||||
|
t := r.Header.Get("Lock-Token")
|
||||||
|
if len(t) < 2 || t[0] != '<' || t[len(t)-1] != '>' {
|
||||||
|
return http.StatusBadRequest, errInvalidLockToken
|
||||||
|
}
|
||||||
|
t = t[1 : len(t)-1]
|
||||||
|
|
||||||
|
switch err = h.LockSystem.Unlock(time.Now(), t); err {
|
||||||
|
case nil:
|
||||||
|
return http.StatusNoContent, err
|
||||||
|
case ErrForbidden:
|
||||||
|
return http.StatusForbidden, err
|
||||||
|
case ErrLocked:
|
||||||
|
return StatusLocked, err
|
||||||
|
case ErrNoSuchLock:
|
||||||
|
return http.StatusConflict, err
|
||||||
|
default:
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// OK
|
||||||
|
func (h *Handler) handlePropfind(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
ctx := r.Context()
|
||||||
|
ok, fi := isPathExist(ctx, fs, reqPath)
|
||||||
|
if !ok {
|
||||||
|
return http.StatusNotFound, err
|
||||||
|
}
|
||||||
|
|
||||||
|
depth := infiniteDepth
|
||||||
|
if hdr := r.Header.Get("Depth"); hdr != "" {
|
||||||
|
depth = parseDepth(hdr)
|
||||||
|
if depth == invalidDepth {
|
||||||
|
return http.StatusBadRequest, errInvalidDepth
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pf, status, err := readPropfind(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
|
||||||
|
mw := multistatusWriter{w: w}
|
||||||
|
|
||||||
|
walkFn := func(reqPath string, info FileInfo, err error) error {
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
var pstats []Propstat
|
||||||
|
if pf.Propname != nil {
|
||||||
|
pnames, err := propnames(ctx, fs, h.LockSystem, info)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
pstat := Propstat{Status: http.StatusOK}
|
||||||
|
for _, xmlname := range pnames {
|
||||||
|
pstat.Props = append(pstat.Props, Property{XMLName: xmlname})
|
||||||
|
}
|
||||||
|
pstats = append(pstats, pstat)
|
||||||
|
} else if pf.Allprop != nil {
|
||||||
|
pstats, err = allprop(ctx, fs, h.LockSystem, info, pf.Prop)
|
||||||
|
} else {
|
||||||
|
pstats, err = props(ctx, fs, h.LockSystem, info, pf.Prop)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
href := path.Join(h.Prefix, reqPath)
|
||||||
|
if href != "/" && info.IsDir() {
|
||||||
|
href += "/"
|
||||||
|
}
|
||||||
|
return mw.write(makePropstatResponse(href, pstats))
|
||||||
|
}
|
||||||
|
|
||||||
|
walkErr := walkFS(ctx, fs, depth, reqPath, fi, walkFn)
|
||||||
|
closeErr := mw.close()
|
||||||
|
if walkErr != nil {
|
||||||
|
return http.StatusInternalServerError, walkErr
|
||||||
|
}
|
||||||
|
if closeErr != nil {
|
||||||
|
return http.StatusInternalServerError, closeErr
|
||||||
|
}
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (h *Handler) handleProppatch(w http.ResponseWriter, r *http.Request, fs *FileSystem) (status int, err error) {
|
||||||
|
|
||||||
|
reqPath, status, err := h.stripPrefix(r.URL.Path)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
release, status, err := h.confirmLocks(r, reqPath, "", fs)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
defer release()
|
||||||
|
|
||||||
|
ctx := r.Context()
|
||||||
|
|
||||||
|
if exist, _ := isPathExist(ctx, fs, reqPath); !exist {
|
||||||
|
return http.StatusNotFound, nil
|
||||||
|
}
|
||||||
|
patches, status, err := readProppatch(r.Body)
|
||||||
|
if err != nil {
|
||||||
|
return status, err
|
||||||
|
}
|
||||||
|
pstats, err := patch(ctx, fs, h.LockSystem, reqPath, patches)
|
||||||
|
if err != nil {
|
||||||
|
return http.StatusInternalServerError, err
|
||||||
|
}
|
||||||
|
mw := multistatusWriter{w: w}
|
||||||
|
writeErr := mw.write(makePropstatResponse(r.URL.Path, pstats))
|
||||||
|
closeErr := mw.close()
|
||||||
|
if writeErr != nil {
|
||||||
|
return http.StatusInternalServerError, writeErr
|
||||||
|
}
|
||||||
|
if closeErr != nil {
|
||||||
|
return http.StatusInternalServerError, closeErr
|
||||||
|
}
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func makePropstatResponse(href string, pstats []Propstat) *response {
|
||||||
|
resp := response{
|
||||||
|
Href: []string{(&url.URL{Path: href}).EscapedPath()},
|
||||||
|
Propstat: make([]propstat, 0, len(pstats)),
|
||||||
|
}
|
||||||
|
for _, p := range pstats {
|
||||||
|
var xmlErr *xmlError
|
||||||
|
if p.XMLError != "" {
|
||||||
|
xmlErr = &xmlError{InnerXML: []byte(p.XMLError)}
|
||||||
|
}
|
||||||
|
resp.Propstat = append(resp.Propstat, propstat{
|
||||||
|
Status: fmt.Sprintf("HTTP/1.1 %d %s", p.Status, StatusText(p.Status)),
|
||||||
|
Prop: p.Props,
|
||||||
|
ResponseDescription: p.ResponseDescription,
|
||||||
|
Error: xmlErr,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return &resp
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
infiniteDepth = -1
|
||||||
|
invalidDepth = -2
|
||||||
|
)
|
||||||
|
|
||||||
|
// parseDepth maps the strings "0", "1" and "infinity" to 0, 1 and
|
||||||
|
// infiniteDepth. Parsing any other string returns invalidDepth.
|
||||||
|
//
|
||||||
|
// Different WebDAV methods have further constraints on valid depths:
|
||||||
|
// - PROPFIND has no further restrictions, as per section 9.1.
|
||||||
|
// - COPY accepts only "0" or "infinity", as per section 9.8.3.
|
||||||
|
// - MOVE accepts only "infinity", as per section 9.9.2.
|
||||||
|
// - LOCK accepts only "0" or "infinity", as per section 9.10.3.
|
||||||
|
// These constraints are enforced by the handleXxx methods.
|
||||||
|
func parseDepth(s string) int {
|
||||||
|
switch s {
|
||||||
|
case "0":
|
||||||
|
return 0
|
||||||
|
case "1":
|
||||||
|
return 1
|
||||||
|
case "infinity":
|
||||||
|
return infiniteDepth
|
||||||
|
}
|
||||||
|
return invalidDepth
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#status.code.extensions.to.http11
|
||||||
|
const (
|
||||||
|
StatusMulti = 207
|
||||||
|
StatusUnprocessableEntity = 422
|
||||||
|
StatusLocked = 423
|
||||||
|
StatusFailedDependency = 424
|
||||||
|
StatusInsufficientStorage = 507
|
||||||
|
)
|
||||||
|
|
||||||
|
func StatusText(code int) string {
|
||||||
|
switch code {
|
||||||
|
case StatusMulti:
|
||||||
|
return "Multi-Status"
|
||||||
|
case StatusUnprocessableEntity:
|
||||||
|
return "Unprocessable Entity"
|
||||||
|
case StatusLocked:
|
||||||
|
return "Locked"
|
||||||
|
case StatusFailedDependency:
|
||||||
|
return "Failed Dependency"
|
||||||
|
case StatusInsufficientStorage:
|
||||||
|
return "Insufficient Storage"
|
||||||
|
}
|
||||||
|
return http.StatusText(code)
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
errDestinationEqualsSource = errors.New("webdav: destination equals source")
|
||||||
|
errDirectoryNotEmpty = errors.New("webdav: directory not empty")
|
||||||
|
errInvalidDepth = errors.New("webdav: invalid depth")
|
||||||
|
errInvalidDestination = errors.New("webdav: invalid destination")
|
||||||
|
errInvalidIfHeader = errors.New("webdav: invalid If header")
|
||||||
|
errInvalidLockInfo = errors.New("webdav: invalid lock info")
|
||||||
|
errInvalidLockToken = errors.New("webdav: invalid lock token")
|
||||||
|
errInvalidPropfind = errors.New("webdav: invalid propfind")
|
||||||
|
errInvalidProppatch = errors.New("webdav: invalid proppatch")
|
||||||
|
errInvalidResponse = errors.New("webdav: invalid response")
|
||||||
|
errInvalidTimeout = errors.New("webdav: invalid timeout")
|
||||||
|
errNoFileSystem = errors.New("webdav: no file system")
|
||||||
|
errNoLockSystem = errors.New("webdav: no lock system")
|
||||||
|
errNotADirectory = errors.New("webdav: not a directory")
|
||||||
|
errPrefixMismatch = errors.New("webdav: prefix mismatch")
|
||||||
|
errRecursionTooDeep = errors.New("webdav: recursion too deep")
|
||||||
|
errUnsupportedLockInfo = errors.New("webdav: unsupported lock info")
|
||||||
|
errUnsupportedMethod = errors.New("webdav: unsupported method")
|
||||||
|
)
|
519
server/webdav/xml.go
Normal file
519
server/webdav/xml.go
Normal file
@ -0,0 +1,519 @@
|
|||||||
|
// Copyright 2014 The Go Authors. All rights reserved.
|
||||||
|
// Use of this source code is governed by a BSD-style
|
||||||
|
// license that can be found in the LICENSE file.
|
||||||
|
|
||||||
|
package webdav
|
||||||
|
|
||||||
|
// The XML encoding is covered by Section 14.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#xml.element.definitions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/xml"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
// As of https://go-review.googlesource.com/#/c/12772/ which was submitted
|
||||||
|
// in July 2015, this package uses an internal fork of the standard
|
||||||
|
// library's encoding/xml package, due to changes in the way namespaces
|
||||||
|
// were encoded. Such changes were introduced in the Go 1.5 cycle, but were
|
||||||
|
// rolled back in response to https://github.com/golang/go/issues/11841
|
||||||
|
//
|
||||||
|
// However, this package's exported API, specifically the Property and
|
||||||
|
// DeadPropsHolder types, need to refer to the standard library's version
|
||||||
|
// of the xml.Name type, as code that imports this package cannot refer to
|
||||||
|
// the internal version.
|
||||||
|
//
|
||||||
|
// This file therefore imports both the internal and external versions, as
|
||||||
|
// ixml and xml, and converts between them.
|
||||||
|
//
|
||||||
|
// In the long term, this package should use the standard library's version
|
||||||
|
// only, and the internal fork deleted, once
|
||||||
|
// https://github.com/golang/go/issues/13400 is resolved.
|
||||||
|
ixml "github.com/Xhofe/alist/server/webdav/internal/xml"
|
||||||
|
)
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_lockinfo
|
||||||
|
type lockInfo struct {
|
||||||
|
XMLName ixml.Name `xml:"lockinfo"`
|
||||||
|
Exclusive *struct{} `xml:"lockscope>exclusive"`
|
||||||
|
Shared *struct{} `xml:"lockscope>shared"`
|
||||||
|
Write *struct{} `xml:"locktype>write"`
|
||||||
|
Owner owner `xml:"owner"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_owner
|
||||||
|
type owner struct {
|
||||||
|
InnerXML string `xml:",innerxml"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func readLockInfo(r io.Reader) (li lockInfo, status int, err error) {
|
||||||
|
c := &countingReader{r: r}
|
||||||
|
if err = ixml.NewDecoder(c).Decode(&li); err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
if c.n == 0 {
|
||||||
|
// An empty body means to refresh the lock.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#refreshing-locks
|
||||||
|
return lockInfo{}, 0, nil
|
||||||
|
}
|
||||||
|
err = errInvalidLockInfo
|
||||||
|
}
|
||||||
|
return lockInfo{}, http.StatusBadRequest, err
|
||||||
|
}
|
||||||
|
// We only support exclusive (non-shared) write locks. In practice, these are
|
||||||
|
// the only types of locks that seem to matter.
|
||||||
|
if li.Exclusive == nil || li.Shared != nil || li.Write == nil {
|
||||||
|
return lockInfo{}, http.StatusNotImplemented, errUnsupportedLockInfo
|
||||||
|
}
|
||||||
|
return li, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type countingReader struct {
|
||||||
|
n int
|
||||||
|
r io.Reader
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *countingReader) Read(p []byte) (int, error) {
|
||||||
|
n, err := c.r.Read(p)
|
||||||
|
c.n += n
|
||||||
|
return n, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func writeLockInfo(w io.Writer, token string, ld LockDetails) (int, error) {
|
||||||
|
depth := "infinity"
|
||||||
|
if ld.ZeroDepth {
|
||||||
|
depth = "0"
|
||||||
|
}
|
||||||
|
timeout := ld.Duration / time.Second
|
||||||
|
return fmt.Fprintf(w, "<?xml version=\"1.0\" encoding=\"utf-8\"?>\n"+
|
||||||
|
"<D:prop xmlns:D=\"DAV:\"><D:lockdiscovery><D:activelock>\n"+
|
||||||
|
" <D:locktype><D:write/></D:locktype>\n"+
|
||||||
|
" <D:lockscope><D:exclusive/></D:lockscope>\n"+
|
||||||
|
" <D:depth>%s</D:depth>\n"+
|
||||||
|
" <D:owner>%s</D:owner>\n"+
|
||||||
|
" <D:timeout>Second-%d</D:timeout>\n"+
|
||||||
|
" <D:locktoken><D:href>%s</D:href></D:locktoken>\n"+
|
||||||
|
" <D:lockroot><D:href>%s</D:href></D:lockroot>\n"+
|
||||||
|
"</D:activelock></D:lockdiscovery></D:prop>",
|
||||||
|
depth, ld.OwnerXML, timeout, escape(token), escape(ld.Root),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
func escape(s string) string {
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
switch s[i] {
|
||||||
|
case '"', '&', '\'', '<', '>':
|
||||||
|
b := bytes.NewBuffer(nil)
|
||||||
|
ixml.EscapeText(b, []byte(s))
|
||||||
|
return b.String()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
|
||||||
|
// Next returns the next token, if any, in the XML stream of d.
|
||||||
|
// RFC 4918 requires to ignore comments, processing instructions
|
||||||
|
// and directives.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#property_values
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#xml-extensibility
|
||||||
|
func next(d *ixml.Decoder) (ixml.Token, error) {
|
||||||
|
for {
|
||||||
|
t, err := d.Token()
|
||||||
|
if err != nil {
|
||||||
|
return t, err
|
||||||
|
}
|
||||||
|
switch t.(type) {
|
||||||
|
case ixml.Comment, ixml.Directive, ixml.ProcInst:
|
||||||
|
continue
|
||||||
|
default:
|
||||||
|
return t, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_prop (for propfind)
|
||||||
|
type propfindProps []xml.Name
|
||||||
|
|
||||||
|
// UnmarshalXML appends the property names enclosed within start to pn.
|
||||||
|
//
|
||||||
|
// It returns an error if start does not contain any properties or if
|
||||||
|
// properties contain values. Character data between properties is ignored.
|
||||||
|
func (pn *propfindProps) UnmarshalXML(d *ixml.Decoder, start ixml.StartElement) error {
|
||||||
|
for {
|
||||||
|
t, err := next(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch t.(type) {
|
||||||
|
case ixml.EndElement:
|
||||||
|
if len(*pn) == 0 {
|
||||||
|
return fmt.Errorf("%s must not be empty", start.Name.Local)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case ixml.StartElement:
|
||||||
|
name := t.(ixml.StartElement).Name
|
||||||
|
t, err = next(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if _, ok := t.(ixml.EndElement); !ok {
|
||||||
|
return fmt.Errorf("unexpected token %T", t)
|
||||||
|
}
|
||||||
|
*pn = append(*pn, xml.Name(name))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_propfind
|
||||||
|
type propfind struct {
|
||||||
|
XMLName ixml.Name `xml:"DAV: propfind"`
|
||||||
|
Allprop *struct{} `xml:"DAV: allprop"`
|
||||||
|
Propname *struct{} `xml:"DAV: propname"`
|
||||||
|
Prop propfindProps `xml:"DAV: prop"`
|
||||||
|
Include propfindProps `xml:"DAV: include"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func readPropfind(r io.Reader) (pf propfind, status int, err error) {
|
||||||
|
c := countingReader{r: r}
|
||||||
|
if err = ixml.NewDecoder(&c).Decode(&pf); err != nil {
|
||||||
|
if err == io.EOF {
|
||||||
|
if c.n == 0 {
|
||||||
|
// An empty body means to propfind allprop.
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#METHOD_PROPFIND
|
||||||
|
return propfind{Allprop: new(struct{})}, 0, nil
|
||||||
|
}
|
||||||
|
err = errInvalidPropfind
|
||||||
|
}
|
||||||
|
return propfind{}, http.StatusBadRequest, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if pf.Allprop == nil && pf.Include != nil {
|
||||||
|
return propfind{}, http.StatusBadRequest, errInvalidPropfind
|
||||||
|
}
|
||||||
|
if pf.Allprop != nil && (pf.Prop != nil || pf.Propname != nil) {
|
||||||
|
return propfind{}, http.StatusBadRequest, errInvalidPropfind
|
||||||
|
}
|
||||||
|
if pf.Prop != nil && pf.Propname != nil {
|
||||||
|
return propfind{}, http.StatusBadRequest, errInvalidPropfind
|
||||||
|
}
|
||||||
|
if pf.Propname == nil && pf.Allprop == nil && pf.Prop == nil {
|
||||||
|
return propfind{}, http.StatusBadRequest, errInvalidPropfind
|
||||||
|
}
|
||||||
|
return pf, 0, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Property represents a single DAV resource property as defined in RFC 4918.
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#data.model.for.resource.properties
|
||||||
|
type Property struct {
|
||||||
|
// XMLName is the fully qualified name that identifies this property.
|
||||||
|
XMLName xml.Name
|
||||||
|
|
||||||
|
// Lang is an optional xml:lang attribute.
|
||||||
|
Lang string `xml:"xml:lang,attr,omitempty"`
|
||||||
|
|
||||||
|
// InnerXML contains the XML representation of the property value.
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#property_values
|
||||||
|
//
|
||||||
|
// Property values of complex type or mixed-content must have fully
|
||||||
|
// expanded XML namespaces or be self-contained with according
|
||||||
|
// XML namespace declarations. They must not rely on any XML
|
||||||
|
// namespace declarations within the scope of the XML document,
|
||||||
|
// even including the DAV: namespace.
|
||||||
|
InnerXML []byte `xml:",innerxml"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ixmlProperty is the same as the Property type except it holds an ixml.Name
|
||||||
|
// instead of an xml.Name.
|
||||||
|
type ixmlProperty struct {
|
||||||
|
XMLName ixml.Name
|
||||||
|
Lang string `xml:"xml:lang,attr,omitempty"`
|
||||||
|
InnerXML []byte `xml:",innerxml"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_error
|
||||||
|
// See multistatusWriter for the "D:" namespace prefix.
|
||||||
|
type xmlError struct {
|
||||||
|
XMLName ixml.Name `xml:"D:error"`
|
||||||
|
InnerXML []byte `xml:",innerxml"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_propstat
|
||||||
|
// See multistatusWriter for the "D:" namespace prefix.
|
||||||
|
type propstat struct {
|
||||||
|
Prop []Property `xml:"D:prop>_ignored_"`
|
||||||
|
Status string `xml:"D:status"`
|
||||||
|
Error *xmlError `xml:"D:error"`
|
||||||
|
ResponseDescription string `xml:"D:responsedescription,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ixmlPropstat is the same as the propstat type except it holds an ixml.Name
|
||||||
|
// instead of an xml.Name.
|
||||||
|
type ixmlPropstat struct {
|
||||||
|
Prop []ixmlProperty `xml:"D:prop>_ignored_"`
|
||||||
|
Status string `xml:"D:status"`
|
||||||
|
Error *xmlError `xml:"D:error"`
|
||||||
|
ResponseDescription string `xml:"D:responsedescription,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MarshalXML prepends the "D:" namespace prefix on properties in the DAV: namespace
|
||||||
|
// before encoding. See multistatusWriter.
|
||||||
|
func (ps propstat) MarshalXML(e *ixml.Encoder, start ixml.StartElement) error {
|
||||||
|
// Convert from a propstat to an ixmlPropstat.
|
||||||
|
ixmlPs := ixmlPropstat{
|
||||||
|
Prop: make([]ixmlProperty, len(ps.Prop)),
|
||||||
|
Status: ps.Status,
|
||||||
|
Error: ps.Error,
|
||||||
|
ResponseDescription: ps.ResponseDescription,
|
||||||
|
}
|
||||||
|
for k, prop := range ps.Prop {
|
||||||
|
ixmlPs.Prop[k] = ixmlProperty{
|
||||||
|
XMLName: ixml.Name(prop.XMLName),
|
||||||
|
Lang: prop.Lang,
|
||||||
|
InnerXML: prop.InnerXML,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for k, prop := range ixmlPs.Prop {
|
||||||
|
if prop.XMLName.Space == "DAV:" {
|
||||||
|
prop.XMLName = ixml.Name{Space: "", Local: "D:" + prop.XMLName.Local}
|
||||||
|
ixmlPs.Prop[k] = prop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Distinct type to avoid infinite recursion of MarshalXML.
|
||||||
|
type newpropstat ixmlPropstat
|
||||||
|
return e.EncodeElement(newpropstat(ixmlPs), start)
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_response
|
||||||
|
// See multistatusWriter for the "D:" namespace prefix.
|
||||||
|
type response struct {
|
||||||
|
XMLName ixml.Name `xml:"D:response"`
|
||||||
|
Href []string `xml:"D:href"`
|
||||||
|
Propstat []propstat `xml:"D:propstat"`
|
||||||
|
Status string `xml:"D:status,omitempty"`
|
||||||
|
Error *xmlError `xml:"D:error"`
|
||||||
|
ResponseDescription string `xml:"D:responsedescription,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// MultistatusWriter marshals one or more Responses into a XML
|
||||||
|
// multistatus response.
|
||||||
|
// See http://www.webdav.org/specs/rfc4918.html#ELEMENT_multistatus
|
||||||
|
// TODO(rsto, mpl): As a workaround, the "D:" namespace prefix, defined as
|
||||||
|
// "DAV:" on this element, is prepended on the nested response, as well as on all
|
||||||
|
// its nested elements. All property names in the DAV: namespace are prefixed as
|
||||||
|
// well. This is because some versions of Mini-Redirector (on windows 7) ignore
|
||||||
|
// elements with a default namespace (no prefixed namespace). A less intrusive fix
|
||||||
|
// should be possible after golang.org/cl/11074. See https://golang.org/issue/11177
|
||||||
|
type multistatusWriter struct {
|
||||||
|
// ResponseDescription contains the optional responsedescription
|
||||||
|
// of the multistatus XML element. Only the latest content before
|
||||||
|
// close will be emitted. Empty response descriptions are not
|
||||||
|
// written.
|
||||||
|
responseDescription string
|
||||||
|
|
||||||
|
w http.ResponseWriter
|
||||||
|
enc *ixml.Encoder
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write validates and emits a DAV response as part of a multistatus response
|
||||||
|
// element.
|
||||||
|
//
|
||||||
|
// It sets the HTTP status code of its underlying http.ResponseWriter to 207
|
||||||
|
// (Multi-Status) and populates the Content-Type header. If r is the
|
||||||
|
// first, valid response to be written, Write prepends the XML representation
|
||||||
|
// of r with a multistatus tag. Callers must call close after the last response
|
||||||
|
// has been written.
|
||||||
|
func (w *multistatusWriter) write(r *response) error {
|
||||||
|
switch len(r.Href) {
|
||||||
|
case 0:
|
||||||
|
return errInvalidResponse
|
||||||
|
case 1:
|
||||||
|
if len(r.Propstat) > 0 != (r.Status == "") {
|
||||||
|
return errInvalidResponse
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
if len(r.Propstat) > 0 || r.Status == "" {
|
||||||
|
return errInvalidResponse
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := w.writeHeader()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return w.enc.Encode(r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// writeHeader writes a XML multistatus start element on w's underlying
|
||||||
|
// http.ResponseWriter and returns the result of the write operation.
|
||||||
|
// After the first write attempt, writeHeader becomes a no-op.
|
||||||
|
func (w *multistatusWriter) writeHeader() error {
|
||||||
|
if w.enc != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
w.w.Header().Add("Content-Type", "text/xml; charset=utf-8")
|
||||||
|
w.w.WriteHeader(StatusMulti)
|
||||||
|
_, err := fmt.Fprintf(w.w, `<?xml version="1.0" encoding="UTF-8"?>`)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
w.enc = ixml.NewEncoder(w.w)
|
||||||
|
return w.enc.EncodeToken(ixml.StartElement{
|
||||||
|
Name: ixml.Name{
|
||||||
|
Space: "DAV:",
|
||||||
|
Local: "multistatus",
|
||||||
|
},
|
||||||
|
Attr: []ixml.Attr{{
|
||||||
|
Name: ixml.Name{Space: "xmlns", Local: "D"},
|
||||||
|
Value: "DAV:",
|
||||||
|
}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close completes the marshalling of the multistatus response. It returns
|
||||||
|
// an error if the multistatus response could not be completed. If both the
|
||||||
|
// return value and field enc of w are nil, then no multistatus response has
|
||||||
|
// been written.
|
||||||
|
func (w *multistatusWriter) close() error {
|
||||||
|
if w.enc == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var end []ixml.Token
|
||||||
|
if w.responseDescription != "" {
|
||||||
|
name := ixml.Name{Space: "DAV:", Local: "responsedescription"}
|
||||||
|
end = append(end,
|
||||||
|
ixml.StartElement{Name: name},
|
||||||
|
ixml.CharData(w.responseDescription),
|
||||||
|
ixml.EndElement{Name: name},
|
||||||
|
)
|
||||||
|
}
|
||||||
|
end = append(end, ixml.EndElement{
|
||||||
|
Name: ixml.Name{Space: "DAV:", Local: "multistatus"},
|
||||||
|
})
|
||||||
|
for _, t := range end {
|
||||||
|
err := w.enc.EncodeToken(t)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return w.enc.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
|
var xmlLangName = ixml.Name{Space: "http://www.w3.org/XML/1998/namespace", Local: "lang"}
|
||||||
|
|
||||||
|
func xmlLang(s ixml.StartElement, d string) string {
|
||||||
|
for _, attr := range s.Attr {
|
||||||
|
if attr.Name == xmlLangName {
|
||||||
|
return attr.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return d
|
||||||
|
}
|
||||||
|
|
||||||
|
type xmlValue []byte
|
||||||
|
|
||||||
|
func (v *xmlValue) UnmarshalXML(d *ixml.Decoder, start ixml.StartElement) error {
|
||||||
|
// The XML value of a property can be arbitrary, mixed-content XML.
|
||||||
|
// To make sure that the unmarshalled value contains all required
|
||||||
|
// namespaces, we encode all the property value XML tokens into a
|
||||||
|
// buffer. This forces the encoder to redeclare any used namespaces.
|
||||||
|
var b bytes.Buffer
|
||||||
|
e := ixml.NewEncoder(&b)
|
||||||
|
for {
|
||||||
|
t, err := next(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if e, ok := t.(ixml.EndElement); ok && e.Name == start.Name {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err = e.EncodeToken(t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
err := e.Flush()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*v = b.Bytes()
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_prop (for proppatch)
|
||||||
|
type proppatchProps []Property
|
||||||
|
|
||||||
|
// UnmarshalXML appends the property names and values enclosed within start
|
||||||
|
// to ps.
|
||||||
|
//
|
||||||
|
// An xml:lang attribute that is defined either on the DAV:prop or property
|
||||||
|
// name XML element is propagated to the property's Lang field.
|
||||||
|
//
|
||||||
|
// UnmarshalXML returns an error if start does not contain any properties or if
|
||||||
|
// property values contain syntactically incorrect XML.
|
||||||
|
func (ps *proppatchProps) UnmarshalXML(d *ixml.Decoder, start ixml.StartElement) error {
|
||||||
|
lang := xmlLang(start, "")
|
||||||
|
for {
|
||||||
|
t, err := next(d)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
switch elem := t.(type) {
|
||||||
|
case ixml.EndElement:
|
||||||
|
if len(*ps) == 0 {
|
||||||
|
return fmt.Errorf("%s must not be empty", start.Name.Local)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
case ixml.StartElement:
|
||||||
|
p := Property{
|
||||||
|
XMLName: xml.Name(t.(ixml.StartElement).Name),
|
||||||
|
Lang: xmlLang(t.(ixml.StartElement), lang),
|
||||||
|
}
|
||||||
|
err = d.DecodeElement(((*xmlValue)(&p.InnerXML)), &elem)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
*ps = append(*ps, p)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_set
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_remove
|
||||||
|
type setRemove struct {
|
||||||
|
XMLName ixml.Name
|
||||||
|
Lang string `xml:"xml:lang,attr,omitempty"`
|
||||||
|
Prop proppatchProps `xml:"DAV: prop"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// http://www.webdav.org/specs/rfc4918.html#ELEMENT_propertyupdate
|
||||||
|
type propertyupdate struct {
|
||||||
|
XMLName ixml.Name `xml:"DAV: propertyupdate"`
|
||||||
|
Lang string `xml:"xml:lang,attr,omitempty"`
|
||||||
|
SetRemove []setRemove `xml:",any"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func readProppatch(r io.Reader) (patches []Proppatch, status int, err error) {
|
||||||
|
var pu propertyupdate
|
||||||
|
if err = ixml.NewDecoder(r).Decode(&pu); err != nil {
|
||||||
|
return nil, http.StatusBadRequest, err
|
||||||
|
}
|
||||||
|
for _, op := range pu.SetRemove {
|
||||||
|
remove := false
|
||||||
|
switch op.XMLName {
|
||||||
|
case ixml.Name{Space: "DAV:", Local: "set"}:
|
||||||
|
// No-op.
|
||||||
|
case ixml.Name{Space: "DAV:", Local: "remove"}:
|
||||||
|
for _, p := range op.Prop {
|
||||||
|
if len(p.InnerXML) > 0 {
|
||||||
|
return nil, http.StatusBadRequest, errInvalidProppatch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
remove = true
|
||||||
|
default:
|
||||||
|
return nil, http.StatusBadRequest, errInvalidProppatch
|
||||||
|
}
|
||||||
|
patches = append(patches, Proppatch{Remove: remove, Props: op.Prop})
|
||||||
|
}
|
||||||
|
return patches, 0, nil
|
||||||
|
}
|
59
utils/code.go
Normal file
59
utils/code.go
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
package utils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"golang.org/x/text/encoding/simplifiedchinese"
|
||||||
|
"golang.org/x/text/transform"
|
||||||
|
"io/ioutil"
|
||||||
|
"unicode/utf8"
|
||||||
|
)
|
||||||
|
|
||||||
|
func IsGBK(data []byte) bool {
|
||||||
|
length := len(data)
|
||||||
|
var i = 0
|
||||||
|
for i < length {
|
||||||
|
if data[i] <= 0x7f {
|
||||||
|
//编码0~127,只有一个字节的编码,兼容ASCII码
|
||||||
|
i++
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
//大于127的使用双字节编码,落在gbk编码范围内的字符
|
||||||
|
if data[i] >= 0x81 &&
|
||||||
|
data[i] <= 0xfe &&
|
||||||
|
data[i+1] >= 0x40 &&
|
||||||
|
data[i+1] <= 0xfe &&
|
||||||
|
data[i+1] != 0xf7 {
|
||||||
|
i += 2
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
GBK string = "GBK"
|
||||||
|
UTF8 string = "UTF8"
|
||||||
|
UNKNOWN string = "UNKNOWN"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetStrCoding(data []byte) string {
|
||||||
|
if utf8.Valid(data) {
|
||||||
|
return UTF8
|
||||||
|
} else if IsGBK(data) {
|
||||||
|
return GBK
|
||||||
|
} else {
|
||||||
|
return UNKNOWN
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GbkToUtf8(s []byte) ([]byte, error) {
|
||||||
|
reader := transform.NewReader(bytes.NewReader(s), simplifiedchinese.GBK.NewDecoder())
|
||||||
|
d, e := ioutil.ReadAll(reader)
|
||||||
|
if e != nil {
|
||||||
|
return nil, e
|
||||||
|
}
|
||||||
|
return d, nil
|
||||||
|
}
|
@ -88,3 +88,21 @@ func ParsePath(path string) string {
|
|||||||
}
|
}
|
||||||
return path
|
return path
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RemoveLastSlash(path string) string {
|
||||||
|
if len(path) > 1 {
|
||||||
|
return strings.TrimSuffix(path, "/")
|
||||||
|
}
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
|
||||||
|
func Dir(path string) string {
|
||||||
|
idx := strings.LastIndex(path, "/")
|
||||||
|
if idx == 0 {
|
||||||
|
return "/"
|
||||||
|
}
|
||||||
|
if idx == -1 {
|
||||||
|
return path
|
||||||
|
}
|
||||||
|
return path[:idx]
|
||||||
|
}
|
Reference in New Issue
Block a user