forked from TrueCloudLab/rclone
s3: retry RequestTimeout errors
See: https://forum.rclone.org/t/s3-failed-upload-large-files-bad-request-400/27695
This commit is contained in:
parent
4ac875a811
commit
fa48b880c2
1 changed files with 4 additions and 0 deletions
|
@ -2121,6 +2121,10 @@ func (f *Fs) shouldRetry(ctx context.Context, err error) (bool, error) {
|
||||||
if fserrors.ShouldRetry(awsError.OrigErr()) {
|
if fserrors.ShouldRetry(awsError.OrigErr()) {
|
||||||
return true, err
|
return true, err
|
||||||
}
|
}
|
||||||
|
// If it is a timeout then we want to retry that
|
||||||
|
if awsError.Code() == "RequestTimeout" {
|
||||||
|
return true, err
|
||||||
|
}
|
||||||
// Failing that, if it's a RequestFailure it's probably got an http status code we can check
|
// Failing that, if it's a RequestFailure it's probably got an http status code we can check
|
||||||
if reqErr, ok := err.(awserr.RequestFailure); ok {
|
if reqErr, ok := err.(awserr.RequestFailure); ok {
|
||||||
// 301 if wrong region for bucket - can only update if running from a bucket
|
// 301 if wrong region for bucket - can only update if running from a bucket
|
||||||
|
|
Loading…
Add table
Reference in a new issue