forked from TrueCloudLab/rclone
swift: ensure partially uploaded large files are uploaded unless --swift-leave-parts-on-error
This makes sure that partially uploaded large files are removed unless the `--swift-leave-parts-on-error` flag is supplied. - refactor swift.go - add unit test for swift with chunk - add unit test for large object with fail case - add "-" to white list char during encode.
This commit is contained in:
parent
6272ca74bc
commit
671dd047f7
2 changed files with 152 additions and 87 deletions
|
@ -4,15 +4,19 @@ package swift
|
|||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/ncw/swift"
|
||||
"github.com/rclone/rclone/fs"
|
||||
"github.com/rclone/rclone/fs/hash"
|
||||
"github.com/rclone/rclone/fs/object"
|
||||
"github.com/rclone/rclone/fstest"
|
||||
"github.com/rclone/rclone/fstest/fstests"
|
||||
"github.com/rclone/rclone/lib/random"
|
||||
"github.com/rclone/rclone/lib/readers"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
@ -74,6 +78,80 @@ func (f *Fs) testNoChunk(t *testing.T) {
|
|||
// Additional tests that aren't in the framework
|
||||
func (f *Fs) InternalTest(t *testing.T) {
|
||||
t.Run("NoChunk", f.testNoChunk)
|
||||
t.Run("WithChunk", f.testWithChunk)
|
||||
t.Run("WithChunkFail", f.testWithChunkFail)
|
||||
}
|
||||
|
||||
func (f *Fs) testWithChunk(t *testing.T) {
|
||||
preConfChunkSize := f.opt.ChunkSize
|
||||
preConfChunk := f.opt.NoChunk
|
||||
f.opt.NoChunk = false
|
||||
f.opt.ChunkSize = 1024 * fs.Byte
|
||||
defer func() {
|
||||
//restore old config after test
|
||||
f.opt.ChunkSize = preConfChunkSize
|
||||
f.opt.NoChunk = preConfChunk
|
||||
}()
|
||||
|
||||
file := fstest.Item{
|
||||
ModTime: fstest.Time("2020-12-31T04:05:06.499999999Z"),
|
||||
Path: "piped data chunk.txt",
|
||||
Size: -1, // use unknown size during upload
|
||||
}
|
||||
const contentSize = 2048
|
||||
contents := random.String(contentSize)
|
||||
buf := bytes.NewBufferString(contents)
|
||||
uploadHash := hash.NewMultiHasher()
|
||||
in := io.TeeReader(buf, uploadHash)
|
||||
|
||||
file.Size = -1
|
||||
obji := object.NewStaticObjectInfo(file.Path, file.ModTime, file.Size, true, nil, nil)
|
||||
ctx := context.TODO()
|
||||
obj, err := f.Features().PutStream(ctx, in, obji)
|
||||
require.NoError(t, err)
|
||||
require.NotEmpty(t, obj)
|
||||
}
|
||||
|
||||
func (f *Fs) testWithChunkFail(t *testing.T) {
|
||||
preConfChunkSize := f.opt.ChunkSize
|
||||
preConfChunk := f.opt.NoChunk
|
||||
f.opt.NoChunk = false
|
||||
f.opt.ChunkSize = 1024 * fs.Byte
|
||||
segmentContainer := f.root + "_segments"
|
||||
defer func() {
|
||||
//restore config
|
||||
f.opt.ChunkSize = preConfChunkSize
|
||||
f.opt.NoChunk = preConfChunk
|
||||
}()
|
||||
path := "piped data chunk with error.txt"
|
||||
file := fstest.Item{
|
||||
ModTime: fstest.Time("2021-01-04T03:46:00.499999999Z"),
|
||||
Path: path,
|
||||
Size: -1, // use unknown size during upload
|
||||
}
|
||||
const contentSize = 4096
|
||||
const errPosition = 3072
|
||||
contents := random.String(contentSize)
|
||||
buf := bytes.NewBufferString(contents[:errPosition])
|
||||
errMessage := "potato"
|
||||
er := &readers.ErrorReader{Err: errors.New(errMessage)}
|
||||
in := ioutil.NopCloser(io.MultiReader(buf, er))
|
||||
|
||||
file.Size = contentSize
|
||||
obji := object.NewStaticObjectInfo(file.Path, file.ModTime, file.Size, true, nil, nil)
|
||||
ctx := context.TODO()
|
||||
_, err := f.Features().PutStream(ctx, in, obji)
|
||||
// error is potato
|
||||
require.NotNil(t, err)
|
||||
require.Equal(t, errMessage, err.Error())
|
||||
_, _, err = f.c.Object(f.rootContainer, path)
|
||||
assert.Equal(t, swift.ObjectNotFound, err)
|
||||
prefix := path
|
||||
objs, err := f.c.Objects(segmentContainer, &swift.ObjectsOpts{
|
||||
Prefix: prefix,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
require.Empty(t, objs)
|
||||
}
|
||||
|
||||
var _ fstests.InternalTester = (*Fs)(nil)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue