forked from TrueCloudLab/rclone
acd: Download files >= 9GB with their tempLink direct from s3
This files the problem downloading files > 10GB. Fixes #204 Fixes #313
This commit is contained in:
parent
8ae424c5a3
commit
d4df3f2154
2 changed files with 40 additions and 10 deletions
|
@ -27,6 +27,7 @@ import (
|
||||||
"github.com/ncw/rclone/fs"
|
"github.com/ncw/rclone/fs"
|
||||||
"github.com/ncw/rclone/oauthutil"
|
"github.com/ncw/rclone/oauthutil"
|
||||||
"github.com/ncw/rclone/pacer"
|
"github.com/ncw/rclone/pacer"
|
||||||
|
"github.com/spf13/pflag"
|
||||||
"golang.org/x/oauth2"
|
"golang.org/x/oauth2"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -44,6 +45,8 @@ const (
|
||||||
|
|
||||||
// Globals
|
// Globals
|
||||||
var (
|
var (
|
||||||
|
// Flags
|
||||||
|
tempLinkThreshold = fs.SizeSuffix(9 << 30) // Download files bigger than this via the tempLink
|
||||||
// Description of how to auth for this app
|
// Description of how to auth for this app
|
||||||
acdConfig = &oauth2.Config{
|
acdConfig = &oauth2.Config{
|
||||||
Scopes: []string{"clouddrive:read_all", "clouddrive:write"},
|
Scopes: []string{"clouddrive:read_all", "clouddrive:write"},
|
||||||
|
@ -76,15 +79,17 @@ func init() {
|
||||||
Help: "Amazon Application Client Secret - leave blank normally.",
|
Help: "Amazon Application Client Secret - leave blank normally.",
|
||||||
}},
|
}},
|
||||||
})
|
})
|
||||||
|
pflag.VarP(&tempLinkThreshold, "acd-templink-threshold", "", "Files >= this size will be downloaded via their tempLink.")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fs represents a remote acd server
|
// Fs represents a remote acd server
|
||||||
type Fs struct {
|
type Fs struct {
|
||||||
name string // name of this remote
|
name string // name of this remote
|
||||||
c *acd.Client // the connection to the acd server
|
c *acd.Client // the connection to the acd server
|
||||||
root string // the path we are working on
|
noAuthClient *http.Client // unauthenticated http client
|
||||||
dirCache *dircache.DirCache // Map of directory path to directory id
|
root string // the path we are working on
|
||||||
pacer *pacer.Pacer // pacer for API calls
|
dirCache *dircache.DirCache // Map of directory path to directory id
|
||||||
|
pacer *pacer.Pacer // pacer for API calls
|
||||||
}
|
}
|
||||||
|
|
||||||
// Object describes a acd object
|
// Object describes a acd object
|
||||||
|
@ -146,10 +151,11 @@ func NewFs(name, root string) (fs.Fs, error) {
|
||||||
c := acd.NewClient(oAuthClient)
|
c := acd.NewClient(oAuthClient)
|
||||||
c.UserAgent = fs.UserAgent
|
c.UserAgent = fs.UserAgent
|
||||||
f := &Fs{
|
f := &Fs{
|
||||||
name: name,
|
name: name,
|
||||||
root: root,
|
root: root,
|
||||||
c: c,
|
c: c,
|
||||||
pacer: pacer.New().SetMinSleep(minSleep).SetPacer(pacer.AmazonCloudDrivePacer),
|
pacer: pacer.New().SetMinSleep(minSleep).SetPacer(pacer.AmazonCloudDrivePacer),
|
||||||
|
noAuthClient: fs.Config.Client(),
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update endpoints
|
// Update endpoints
|
||||||
|
@ -741,10 +747,18 @@ func (o *Object) Storable() bool {
|
||||||
|
|
||||||
// Open an object for read
|
// Open an object for read
|
||||||
func (o *Object) Open() (in io.ReadCloser, err error) {
|
func (o *Object) Open() (in io.ReadCloser, err error) {
|
||||||
|
bigObject := o.Size() >= int64(tempLinkThreshold)
|
||||||
|
if bigObject {
|
||||||
|
fs.Debug(o, "Dowloading large object via tempLink")
|
||||||
|
}
|
||||||
file := acd.File{Node: o.info}
|
file := acd.File{Node: o.info}
|
||||||
var resp *http.Response
|
var resp *http.Response
|
||||||
err = o.fs.pacer.Call(func() (bool, error) {
|
err = o.fs.pacer.Call(func() (bool, error) {
|
||||||
in, resp, err = file.Open()
|
if !bigObject {
|
||||||
|
in, resp, err = file.Open()
|
||||||
|
} else {
|
||||||
|
in, resp, err = file.OpenTempURL(o.fs.noAuthClient)
|
||||||
|
}
|
||||||
return shouldRetry(resp, err)
|
return shouldRetry(resp, err)
|
||||||
})
|
})
|
||||||
return in, err
|
return in, err
|
||||||
|
|
|
@ -96,6 +96,22 @@ don't provide an API to permanently delete files, nor to empty the
|
||||||
trash, so you will have to do that with one of Amazon's apps or via
|
trash, so you will have to do that with one of Amazon's apps or via
|
||||||
the Amazon cloud drive website.
|
the Amazon cloud drive website.
|
||||||
|
|
||||||
|
### Specific options ###
|
||||||
|
|
||||||
|
Here are the command line options specific to this cloud storage
|
||||||
|
system.
|
||||||
|
|
||||||
|
#### --acd-templink-threshold=SIZE ####
|
||||||
|
|
||||||
|
Files this size or more will be downloaded via their `tempLink`. This
|
||||||
|
is to work around a problem with Amazon Cloud Drive which blocks
|
||||||
|
downloads of files bigger than about 10GB. The default for this is
|
||||||
|
9GB which shouldn't need to be changed.
|
||||||
|
|
||||||
|
To download files above this threshold, rclone requests a `tempLink`
|
||||||
|
which downloads the file through a temporary URL directly from the
|
||||||
|
underlying S3 storage.
|
||||||
|
|
||||||
### Limitations ###
|
### Limitations ###
|
||||||
|
|
||||||
Note that Amazon cloud drive is case insensitive so you can't have a
|
Note that Amazon cloud drive is case insensitive so you can't have a
|
||||||
|
|
Loading…
Reference in a new issue