Merge pull request #1131 from restic/test-cleanups

Rework withTestEnvironment
This commit is contained in:
Alexander Neumann 2017-07-26 22:06:06 +02:00
commit b6790c491b
4 changed files with 759 additions and 735 deletions

View file

@ -142,44 +142,45 @@ func TestMount(t *testing.T) {
t.Skip("Skipping fuse tests") t.Skip("Skipping fuse tests")
} }
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
repo, err := OpenRepository(gopts) testRunInit(t, env.gopts)
repo, err := OpenRepository(env.gopts)
OK(t, err) OK(t, err)
// We remove the mountpoint now to check that cmdMount creates it // We remove the mountpoint now to check that cmdMount creates it
RemoveAll(t, env.mountpoint) RemoveAll(t, env.mountpoint)
checkSnapshots(t, gopts, repo, env.mountpoint, env.repo, []restic.ID{}) checkSnapshots(t, env.gopts, repo, env.mountpoint, env.repo, []restic.ID{})
SetupTarTestFixture(t, env.testdata, filepath.Join("testdata", "backup-data.tar.gz")) SetupTarTestFixture(t, env.testdata, filepath.Join("testdata", "backup-data.tar.gz"))
// first backup // first backup
testRunBackup(t, []string{env.testdata}, BackupOptions{}, gopts) testRunBackup(t, []string{env.testdata}, BackupOptions{}, env.gopts)
snapshotIDs := testRunList(t, "snapshots", gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 1, Assert(t, len(snapshotIDs) == 1,
"expected one snapshot, got %v", snapshotIDs) "expected one snapshot, got %v", snapshotIDs)
checkSnapshots(t, gopts, repo, env.mountpoint, env.repo, snapshotIDs) checkSnapshots(t, env.gopts, repo, env.mountpoint, env.repo, snapshotIDs)
// second backup, implicit incremental // second backup, implicit incremental
testRunBackup(t, []string{env.testdata}, BackupOptions{}, gopts) testRunBackup(t, []string{env.testdata}, BackupOptions{}, env.gopts)
snapshotIDs = testRunList(t, "snapshots", gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 2, Assert(t, len(snapshotIDs) == 2,
"expected two snapshots, got %v", snapshotIDs) "expected two snapshots, got %v", snapshotIDs)
checkSnapshots(t, gopts, repo, env.mountpoint, env.repo, snapshotIDs) checkSnapshots(t, env.gopts, repo, env.mountpoint, env.repo, snapshotIDs)
// third backup, explicit incremental // third backup, explicit incremental
bopts := BackupOptions{Parent: snapshotIDs[0].String()} bopts := BackupOptions{Parent: snapshotIDs[0].String()}
testRunBackup(t, []string{env.testdata}, bopts, gopts) testRunBackup(t, []string{env.testdata}, bopts, env.gopts)
snapshotIDs = testRunList(t, "snapshots", gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 3, Assert(t, len(snapshotIDs) == 3,
"expected three snapshots, got %v", snapshotIDs) "expected three snapshots, got %v", snapshotIDs)
checkSnapshots(t, gopts, repo, env.mountpoint, env.repo, snapshotIDs) checkSnapshots(t, env.gopts, repo, env.mountpoint, env.repo, snapshotIDs)
})
} }
func TestMountSameTimestamps(t *testing.T) { func TestMountSameTimestamps(t *testing.T) {
@ -187,10 +188,12 @@ func TestMountSameTimestamps(t *testing.T) {
t.Skip("Skipping fuse tests") t.Skip("Skipping fuse tests")
} }
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
SetupTarTestFixture(t, env.base, filepath.Join("testdata", "repo-same-timestamps.tar.gz")) SetupTarTestFixture(t, env.base, filepath.Join("testdata", "repo-same-timestamps.tar.gz"))
repo, err := OpenRepository(gopts) repo, err := OpenRepository(env.gopts)
OK(t, err) OK(t, err)
ids := []restic.ID{ ids := []restic.ID{
@ -199,6 +202,5 @@ func TestMountSameTimestamps(t *testing.T) {
restic.TestParseID("5fd0d8b2ef0fa5d23e58f1e460188abb0f525c0f0c4af8365a1280c807a80a1b"), restic.TestParseID("5fd0d8b2ef0fa5d23e58f1e460188abb0f525c0f0c4af8365a1280c807a80a1b"),
} }
checkSnapshots(t, gopts, repo, env.mountpoint, env.repo, ids) checkSnapshots(t, env.gopts, repo, env.mountpoint, env.repo, ids)
})
} }

View file

@ -168,11 +168,12 @@ func dirStats(dir string) (stat dirStat) {
type testEnvironment struct { type testEnvironment struct {
base, cache, repo, mountpoint, testdata string base, cache, repo, mountpoint, testdata string
gopts GlobalOptions
} }
// withTestEnvironment creates a test environment and calls f with it. After f has // withTestEnvironment creates a test environment and returns a cleanup
// returned, the temporary directory is removed. // function which removes it.
func withTestEnvironment(t testing.TB, f func(*testEnvironment, GlobalOptions)) { func withTestEnvironment(t testing.TB) (env *testEnvironment, cleanup func()) {
if !RunIntegrationTest { if !RunIntegrationTest {
t.Skip("integration tests disabled") t.Skip("integration tests disabled")
} }
@ -182,7 +183,7 @@ func withTestEnvironment(t testing.TB, f func(*testEnvironment, GlobalOptions))
tempdir, err := ioutil.TempDir(TestTempDir, "restic-test-") tempdir, err := ioutil.TempDir(TestTempDir, "restic-test-")
OK(t, err) OK(t, err)
env := testEnvironment{ env = &testEnvironment{
base: tempdir, base: tempdir,
cache: filepath.Join(tempdir, "cache"), cache: filepath.Join(tempdir, "cache"),
repo: filepath.Join(tempdir, "repo"), repo: filepath.Join(tempdir, "repo"),
@ -195,7 +196,7 @@ func withTestEnvironment(t testing.TB, f func(*testEnvironment, GlobalOptions))
OK(t, os.MkdirAll(env.cache, 0700)) OK(t, os.MkdirAll(env.cache, 0700))
OK(t, os.MkdirAll(env.repo, 0700)) OK(t, os.MkdirAll(env.repo, 0700))
gopts := GlobalOptions{ env.gopts = GlobalOptions{
Repo: env.repo, Repo: env.repo,
Quiet: true, Quiet: true,
ctx: context.Background(), ctx: context.Background(),
@ -206,14 +207,15 @@ func withTestEnvironment(t testing.TB, f func(*testEnvironment, GlobalOptions))
} }
// always overwrite global options // always overwrite global options
globalOptions = gopts globalOptions = env.gopts
f(&env, gopts)
cleanup = func() {
if !TestCleanupTempDirs { if !TestCleanupTempDirs {
t.Logf("leaving temporary directory %v used for test", tempdir) t.Logf("leaving temporary directory %v used for test", tempdir)
return return
} }
RemoveAll(t, tempdir) RemoveAll(t, tempdir)
}
return env, cleanup
} }

View file

@ -201,7 +201,9 @@ func testRunPrune(t testing.TB, gopts GlobalOptions) {
} }
func TestBackup(t *testing.T) { func TestBackup(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -211,23 +213,23 @@ func TestBackup(t *testing.T) {
OK(t, err) OK(t, err)
OK(t, fd.Close()) OK(t, fd.Close())
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
opts := BackupOptions{} opts := BackupOptions{}
// first backup // first backup
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
snapshotIDs := testRunList(t, "snapshots", gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 1, Assert(t, len(snapshotIDs) == 1,
"expected one snapshot, got %v", snapshotIDs) "expected one snapshot, got %v", snapshotIDs)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
stat1 := dirStats(env.repo) stat1 := dirStats(env.repo)
// second backup, implicit incremental // second backup, implicit incremental
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
snapshotIDs = testRunList(t, "snapshots", gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 2, Assert(t, len(snapshotIDs) == 2,
"expected two snapshots, got %v", snapshotIDs) "expected two snapshots, got %v", snapshotIDs)
@ -237,11 +239,11 @@ func TestBackup(t *testing.T) {
} }
t.Logf("repository grown by %d bytes", stat2.size-stat1.size) t.Logf("repository grown by %d bytes", stat2.size-stat1.size)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
// third backup, explicit incremental // third backup, explicit incremental
opts.Parent = snapshotIDs[0].String() opts.Parent = snapshotIDs[0].String()
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
snapshotIDs = testRunList(t, "snapshots", gopts) snapshotIDs = testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 3, Assert(t, len(snapshotIDs) == 3,
"expected three snapshots, got %v", snapshotIDs) "expected three snapshots, got %v", snapshotIDs)
@ -255,17 +257,18 @@ func TestBackup(t *testing.T) {
for i, snapshotID := range snapshotIDs { for i, snapshotID := range snapshotIDs {
restoredir := filepath.Join(env.base, fmt.Sprintf("restore%d", i)) restoredir := filepath.Join(env.base, fmt.Sprintf("restore%d", i))
t.Logf("restoring snapshot %v to %v", snapshotID.Str(), restoredir) t.Logf("restoring snapshot %v to %v", snapshotID.Str(), restoredir)
testRunRestore(t, gopts, restoredir, snapshotIDs[0]) testRunRestore(t, env.gopts, restoredir, snapshotIDs[0])
Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, "testdata")), Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, "testdata")),
"directories are not equal") "directories are not equal")
} }
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
})
} }
func TestBackupNonExistingFile(t *testing.T) { func TestBackupNonExistingFile(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -277,7 +280,7 @@ func TestBackupNonExistingFile(t *testing.T) {
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunInit(t, gopts) testRunInit(t, env.gopts)
globalOptions.stderr = ioutil.Discard globalOptions.stderr = ioutil.Discard
defer func() { defer func() {
globalOptions.stderr = os.Stderr globalOptions.stderr = os.Stderr
@ -293,12 +296,13 @@ func TestBackupNonExistingFile(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, dirs, opts, gopts) testRunBackup(t, dirs, opts, env.gopts)
})
} }
func TestBackupMissingFile1(t *testing.T) { func TestBackupMissingFile1(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -310,7 +314,7 @@ func TestBackupMissingFile1(t *testing.T) {
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunInit(t, gopts) testRunInit(t, env.gopts)
globalOptions.stderr = ioutil.Discard globalOptions.stderr = ioutil.Discard
defer func() { defer func() {
globalOptions.stderr = os.Stderr globalOptions.stderr = os.Stderr
@ -332,16 +336,17 @@ func TestBackupMissingFile1(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
Assert(t, ranHook, "hook did not run") Assert(t, ranHook, "hook did not run")
debug.RemoveHook("pipe.walk1") debug.RemoveHook("pipe.walk1")
})
} }
func TestBackupMissingFile2(t *testing.T) { func TestBackupMissingFile2(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -353,7 +358,7 @@ func TestBackupMissingFile2(t *testing.T) {
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunInit(t, gopts) testRunInit(t, env.gopts)
globalOptions.stderr = ioutil.Discard globalOptions.stderr = ioutil.Discard
defer func() { defer func() {
@ -376,16 +381,17 @@ func TestBackupMissingFile2(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
Assert(t, ranHook, "hook did not run") Assert(t, ranHook, "hook did not run")
debug.RemoveHook("pipe.walk2") debug.RemoveHook("pipe.walk2")
})
} }
func TestBackupChangedFile(t *testing.T) { func TestBackupChangedFile(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -397,7 +403,7 @@ func TestBackupChangedFile(t *testing.T) {
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunInit(t, gopts) testRunInit(t, env.gopts)
globalOptions.stderr = ioutil.Discard globalOptions.stderr = ioutil.Discard
defer func() { defer func() {
@ -422,16 +428,17 @@ func TestBackupChangedFile(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
Assert(t, ranHook, "hook did not run") Assert(t, ranHook, "hook did not run")
debug.RemoveHook("archiver.SaveFile") debug.RemoveHook("archiver.SaveFile")
})
} }
func TestBackupDirectoryError(t *testing.T) { func TestBackupDirectoryError(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -443,7 +450,7 @@ func TestBackupDirectoryError(t *testing.T) {
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunInit(t, gopts) testRunInit(t, env.gopts)
globalOptions.stderr = ioutil.Discard globalOptions.stderr = ioutil.Discard
defer func() { defer func() {
@ -468,20 +475,19 @@ func TestBackupDirectoryError(t *testing.T) {
OK(t, os.RemoveAll(testdir)) OK(t, os.RemoveAll(testdir))
}) })
testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0")}, BackupOptions{}, gopts) testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0")}, BackupOptions{}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
Assert(t, ranHook, "hook did not run") Assert(t, ranHook, "hook did not run")
debug.RemoveHook("pipe.walk2") debug.RemoveHook("pipe.walk2")
snapshots := testRunList(t, "snapshots", gopts) snapshots := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshots) > 0, Assert(t, len(snapshots) > 0,
"no snapshots found in repo (%v)", datafile) "no snapshots found in repo (%v)", datafile)
files := testRunLs(t, gopts, snapshots[0].String()) files := testRunLs(t, env.gopts, snapshots[0].String())
Assert(t, len(files) > 1, "snapshot is empty") Assert(t, len(files) > 1, "snapshot is empty")
})
} }
func includes(haystack []string, needle string) bool { func includes(haystack []string, needle string) bool {
@ -524,8 +530,10 @@ var backupExcludeFilenames = []string{
} }
func TestBackupExclude(t *testing.T) { func TestBackupExclude(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunInit(t, env.gopts)
datadir := filepath.Join(env.base, "testdata") datadir := filepath.Join(env.base, "testdata")
@ -544,28 +552,27 @@ func TestBackupExclude(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
snapshots, snapshotID := lastSnapshot(snapshots, loadSnapshotMap(t, gopts)) snapshots, snapshotID := lastSnapshot(snapshots, loadSnapshotMap(t, env.gopts))
files := testRunLs(t, gopts, snapshotID) files := testRunLs(t, env.gopts, snapshotID)
Assert(t, includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")), Assert(t, includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")),
"expected file %q in first snapshot, but it's not included", "foo.tar.gz") "expected file %q in first snapshot, but it's not included", "foo.tar.gz")
opts.Excludes = []string{"*.tar.gz"} opts.Excludes = []string{"*.tar.gz"}
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
snapshots, snapshotID = lastSnapshot(snapshots, loadSnapshotMap(t, gopts)) snapshots, snapshotID = lastSnapshot(snapshots, loadSnapshotMap(t, env.gopts))
files = testRunLs(t, gopts, snapshotID) files = testRunLs(t, env.gopts, snapshotID)
Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")), Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")),
"expected file %q not in first snapshot, but it's included", "foo.tar.gz") "expected file %q not in first snapshot, but it's included", "foo.tar.gz")
opts.Excludes = []string{"*.tar.gz", "private/secret"} opts.Excludes = []string{"*.tar.gz", "private/secret"}
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
_, snapshotID = lastSnapshot(snapshots, loadSnapshotMap(t, gopts)) _, snapshotID = lastSnapshot(snapshots, loadSnapshotMap(t, env.gopts))
files = testRunLs(t, gopts, snapshotID) files = testRunLs(t, env.gopts, snapshotID)
Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")), Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "foo.tar.gz")),
"expected file %q not in first snapshot, but it's included", "foo.tar.gz") "expected file %q not in first snapshot, but it's included", "foo.tar.gz")
Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "private", "secret", "passwords.txt")), Assert(t, !includes(files, filepath.Join(string(filepath.Separator), "testdata", "private", "secret", "passwords.txt")),
"expected file %q not in first snapshot, but it's included", "passwords.txt") "expected file %q not in first snapshot, but it's included", "passwords.txt")
})
} }
const ( const (
@ -597,8 +604,10 @@ func appendRandomData(filename string, bytes uint) error {
} }
func TestIncrementalBackup(t *testing.T) { func TestIncrementalBackup(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunInit(t, env.gopts)
datadir := filepath.Join(env.base, "testdata") datadir := filepath.Join(env.base, "testdata")
testfile := filepath.Join(datadir, "testfile") testfile := filepath.Join(datadir, "testfile")
@ -607,14 +616,14 @@ func TestIncrementalBackup(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
stat1 := dirStats(env.repo) stat1 := dirStats(env.repo)
OK(t, appendRandomData(testfile, incrementalSecondWrite)) OK(t, appendRandomData(testfile, incrementalSecondWrite))
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
stat2 := dirStats(env.repo) stat2 := dirStats(env.repo)
if stat2.size-stat1.size > incrementalFirstWrite { if stat2.size-stat1.size > incrementalFirstWrite {
t.Errorf("repository size has grown by more than %d bytes", incrementalFirstWrite) t.Errorf("repository size has grown by more than %d bytes", incrementalFirstWrite)
@ -623,39 +632,39 @@ func TestIncrementalBackup(t *testing.T) {
OK(t, appendRandomData(testfile, incrementalThirdWrite)) OK(t, appendRandomData(testfile, incrementalThirdWrite))
testRunBackup(t, []string{datadir}, opts, gopts) testRunBackup(t, []string{datadir}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
stat3 := dirStats(env.repo) stat3 := dirStats(env.repo)
if stat3.size-stat2.size > incrementalFirstWrite { if stat3.size-stat2.size > incrementalFirstWrite {
t.Errorf("repository size has grown by more than %d bytes", incrementalFirstWrite) t.Errorf("repository size has grown by more than %d bytes", incrementalFirstWrite)
} }
t.Logf("repository grown by %d bytes", stat3.size-stat2.size) t.Logf("repository grown by %d bytes", stat3.size-stat2.size)
})
} }
func TestBackupTags(t *testing.T) { func TestBackupTags(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ := testRunSnapshots(t, gopts) newest, _ := testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 0, Assert(t, len(newest.Tags) == 0,
"expected no tags, got %v", newest.Tags) "expected no tags, got %v", newest.Tags)
opts.Tags = []string{"NL"} opts.Tags = []string{"NL"}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "NL", Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "NL",
"expected one NL tag, got %v", newest.Tags) "expected one NL tag, got %v", newest.Tags)
})
} }
func testRunTag(t testing.TB, opts TagOptions, gopts GlobalOptions) { func testRunTag(t testing.TB, opts TagOptions, gopts GlobalOptions) {
@ -663,14 +672,16 @@ func testRunTag(t testing.TB, opts TagOptions, gopts GlobalOptions) {
} }
func TestTag(t *testing.T) { func TestTag(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
testRunBackup(t, []string{env.testdata}, BackupOptions{}, gopts) testRunBackup(t, []string{env.testdata}, BackupOptions{}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ := testRunSnapshots(t, gopts) newest, _ := testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 0, Assert(t, len(newest.Tags) == 0,
"expected no tags, got %v", newest.Tags) "expected no tags, got %v", newest.Tags)
@ -678,9 +689,9 @@ func TestTag(t *testing.T) {
"expected original ID to be nil, got %v", newest.Original) "expected original ID to be nil, got %v", newest.Original)
originalID := *newest.ID originalID := *newest.ID
testRunTag(t, TagOptions{SetTags: []string{"NL"}}, gopts) testRunTag(t, TagOptions{SetTags: []string{"NL"}}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "NL", Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "NL",
"set failed, expected one NL tag, got %v", newest.Tags) "set failed, expected one NL tag, got %v", newest.Tags)
@ -688,9 +699,9 @@ func TestTag(t *testing.T) {
Assert(t, *newest.Original == originalID, Assert(t, *newest.Original == originalID,
"expected original ID to be set to the first snapshot id") "expected original ID to be set to the first snapshot id")
testRunTag(t, TagOptions{AddTags: []string{"CH"}}, gopts) testRunTag(t, TagOptions{AddTags: []string{"CH"}}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 2 && newest.Tags[0] == "NL" && newest.Tags[1] == "CH", Assert(t, len(newest.Tags) == 2 && newest.Tags[0] == "NL" && newest.Tags[1] == "CH",
"add failed, expected CH,NL tags, got %v", newest.Tags) "add failed, expected CH,NL tags, got %v", newest.Tags)
@ -698,9 +709,9 @@ func TestTag(t *testing.T) {
Assert(t, *newest.Original == originalID, Assert(t, *newest.Original == originalID,
"expected original ID to be set to the first snapshot id") "expected original ID to be set to the first snapshot id")
testRunTag(t, TagOptions{RemoveTags: []string{"NL"}}, gopts) testRunTag(t, TagOptions{RemoveTags: []string{"NL"}}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "CH", Assert(t, len(newest.Tags) == 1 && newest.Tags[0] == "CH",
"remove failed, expected one CH tag, got %v", newest.Tags) "remove failed, expected one CH tag, got %v", newest.Tags)
@ -708,10 +719,10 @@ func TestTag(t *testing.T) {
Assert(t, *newest.Original == originalID, Assert(t, *newest.Original == originalID,
"expected original ID to be set to the first snapshot id") "expected original ID to be set to the first snapshot id")
testRunTag(t, TagOptions{AddTags: []string{"US", "RU"}}, gopts) testRunTag(t, TagOptions{AddTags: []string{"US", "RU"}}, env.gopts)
testRunTag(t, TagOptions{RemoveTags: []string{"CH", "US", "RU"}}, gopts) testRunTag(t, TagOptions{RemoveTags: []string{"CH", "US", "RU"}}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 0, Assert(t, len(newest.Tags) == 0,
"expected no tags, got %v", newest.Tags) "expected no tags, got %v", newest.Tags)
@ -720,16 +731,15 @@ func TestTag(t *testing.T) {
"expected original ID to be set to the first snapshot id") "expected original ID to be set to the first snapshot id")
// Check special case of removing all tags. // Check special case of removing all tags.
testRunTag(t, TagOptions{SetTags: []string{""}}, gopts) testRunTag(t, TagOptions{SetTags: []string{""}}, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
newest, _ = testRunSnapshots(t, gopts) newest, _ = testRunSnapshots(t, env.gopts)
Assert(t, newest != nil, "expected a new backup, got nil") Assert(t, newest != nil, "expected a new backup, got nil")
Assert(t, len(newest.Tags) == 0, Assert(t, len(newest.Tags) == 0,
"expected no tags, got %v", newest.Tags) "expected no tags, got %v", newest.Tags)
Assert(t, newest.Original != nil, "expected original snapshot id, got nil") Assert(t, newest.Original != nil, "expected original snapshot id, got nil")
Assert(t, *newest.Original == originalID, Assert(t, *newest.Original == originalID,
"expected original ID to be set to the first snapshot id") "expected original ID to be set to the first snapshot id")
})
} }
func testRunKeyListOtherIDs(t testing.TB, gopts GlobalOptions) []string { func testRunKeyListOtherIDs(t testing.TB, gopts GlobalOptions) []string {
@ -786,25 +796,26 @@ func TestKeyAddRemove(t *testing.T) {
"raicneirvOjEfEigonOmLasOd", "raicneirvOjEfEigonOmLasOd",
} }
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunKeyPasswd(t, "geheim2", gopts) testRunInit(t, env.gopts)
gopts.password = "geheim2"
t.Logf("changed password to %q", gopts.password) testRunKeyPasswd(t, "geheim2", env.gopts)
env.gopts.password = "geheim2"
t.Logf("changed password to %q", env.gopts.password)
for _, newPassword := range passwordList { for _, newPassword := range passwordList {
testRunKeyAddNewKey(t, newPassword, gopts) testRunKeyAddNewKey(t, newPassword, env.gopts)
t.Logf("added new password %q", newPassword) t.Logf("added new password %q", newPassword)
gopts.password = newPassword env.gopts.password = newPassword
testRunKeyRemove(t, gopts, testRunKeyListOtherIDs(t, gopts)) testRunKeyRemove(t, env.gopts, testRunKeyListOtherIDs(t, env.gopts))
} }
gopts.password = passwordList[len(passwordList)-1] env.gopts.password = passwordList[len(passwordList)-1]
t.Logf("testing access with last password %q\n", gopts.password) t.Logf("testing access with last password %q\n", env.gopts.password)
OK(t, runKey(gopts, []string{"list"})) OK(t, runKey(env.gopts, []string{"list"}))
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
})
} }
func testFileSize(filename string, size int64) error { func testFileSize(filename string, size int64) error {
@ -831,8 +842,10 @@ func TestRestoreFilter(t *testing.T) {
{"subdir1/subdir2/testfile4.c", 102}, {"subdir1/subdir2/testfile4.c", 102},
} }
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunInit(t, env.gopts)
for _, test := range testfiles { for _, test := range testfiles {
p := filepath.Join(env.testdata, test.name) p := filepath.Join(env.testdata, test.name)
@ -842,20 +855,20 @@ func TestRestoreFilter(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
snapshotID := testRunList(t, "snapshots", gopts)[0] snapshotID := testRunList(t, "snapshots", env.gopts)[0]
// no restore filter should restore all files // no restore filter should restore all files
testRunRestore(t, gopts, filepath.Join(env.base, "restore0"), snapshotID) testRunRestore(t, env.gopts, filepath.Join(env.base, "restore0"), snapshotID)
for _, test := range testfiles { for _, test := range testfiles {
OK(t, testFileSize(filepath.Join(env.base, "restore0", "testdata", test.name), int64(test.size))) OK(t, testFileSize(filepath.Join(env.base, "restore0", "testdata", test.name), int64(test.size)))
} }
for i, pat := range []string{"*.c", "*.exe", "*", "*file3*"} { for i, pat := range []string{"*.c", "*.exe", "*", "*file3*"} {
base := filepath.Join(env.base, fmt.Sprintf("restore%d", i+1)) base := filepath.Join(env.base, fmt.Sprintf("restore%d", i+1))
testRunRestoreExcludes(t, gopts, base, snapshotID, []string{pat}) testRunRestoreExcludes(t, env.gopts, base, snapshotID, []string{pat})
for _, test := range testfiles { for _, test := range testfiles {
err := testFileSize(filepath.Join(base, "testdata", test.name), int64(test.size)) err := testFileSize(filepath.Join(base, "testdata", test.name), int64(test.size))
if ok, _ := filter.Match(pat, filepath.Base(test.name)); !ok { if ok, _ := filter.Match(pat, filepath.Base(test.name)); !ok {
@ -866,13 +879,13 @@ func TestRestoreFilter(t *testing.T) {
} }
} }
} }
})
} }
func TestRestore(t *testing.T) { func TestRestore(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunInit(t, env.gopts)
for i := 0; i < 10; i++ { for i := 0; i < 10; i++ {
p := filepath.Join(env.testdata, fmt.Sprintf("foo/bar/testfile%v", i)) p := filepath.Join(env.testdata, fmt.Sprintf("foo/bar/testfile%v", i))
@ -882,22 +895,22 @@ func TestRestore(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
// Restore latest without any filters // Restore latest without any filters
restoredir := filepath.Join(env.base, "restore") restoredir := filepath.Join(env.base, "restore")
testRunRestoreLatest(t, gopts, restoredir, nil, "") testRunRestoreLatest(t, env.gopts, restoredir, nil, "")
Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, filepath.Base(env.testdata))), Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, filepath.Base(env.testdata))),
"directories are not equal") "directories are not equal")
})
} }
func TestRestoreLatest(t *testing.T) { func TestRestoreLatest(t *testing.T) {
env, cleanup := withTestEnvironment(t)
defer cleanup()
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { testRunInit(t, env.gopts)
testRunInit(t, gopts)
p := filepath.Join(env.testdata, "testfile.c") p := filepath.Join(env.testdata, "testfile.c")
OK(t, os.MkdirAll(filepath.Dir(p), 0755)) OK(t, os.MkdirAll(filepath.Dir(p), 0755))
@ -905,57 +918,57 @@ func TestRestoreLatest(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
os.Remove(p) os.Remove(p)
OK(t, appendRandomData(p, 101)) OK(t, appendRandomData(p, 101))
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
// Restore latest without any filters // Restore latest without any filters
testRunRestoreLatest(t, gopts, filepath.Join(env.base, "restore0"), nil, "") testRunRestoreLatest(t, env.gopts, filepath.Join(env.base, "restore0"), nil, "")
OK(t, testFileSize(filepath.Join(env.base, "restore0", "testdata", "testfile.c"), int64(101))) OK(t, testFileSize(filepath.Join(env.base, "restore0", "testdata", "testfile.c"), int64(101)))
// Setup test files in different directories backed up in different snapshots // Setup test files in different directories backed up in different snapshots
p1 := filepath.Join(env.testdata, "p1/testfile.c") p1 := filepath.Join(env.testdata, "p1/testfile.c")
OK(t, os.MkdirAll(filepath.Dir(p1), 0755)) OK(t, os.MkdirAll(filepath.Dir(p1), 0755))
OK(t, appendRandomData(p1, 102)) OK(t, appendRandomData(p1, 102))
testRunBackup(t, []string{filepath.Dir(p1)}, opts, gopts) testRunBackup(t, []string{filepath.Dir(p1)}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
p2 := filepath.Join(env.testdata, "p2/testfile.c") p2 := filepath.Join(env.testdata, "p2/testfile.c")
OK(t, os.MkdirAll(filepath.Dir(p2), 0755)) OK(t, os.MkdirAll(filepath.Dir(p2), 0755))
OK(t, appendRandomData(p2, 103)) OK(t, appendRandomData(p2, 103))
testRunBackup(t, []string{filepath.Dir(p2)}, opts, gopts) testRunBackup(t, []string{filepath.Dir(p2)}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
p1rAbs := filepath.Join(env.base, "restore1", "p1/testfile.c") p1rAbs := filepath.Join(env.base, "restore1", "p1/testfile.c")
p2rAbs := filepath.Join(env.base, "restore2", "p2/testfile.c") p2rAbs := filepath.Join(env.base, "restore2", "p2/testfile.c")
testRunRestoreLatest(t, gopts, filepath.Join(env.base, "restore1"), []string{filepath.Dir(p1)}, "") testRunRestoreLatest(t, env.gopts, filepath.Join(env.base, "restore1"), []string{filepath.Dir(p1)}, "")
OK(t, testFileSize(p1rAbs, int64(102))) OK(t, testFileSize(p1rAbs, int64(102)))
if _, err := os.Stat(p2rAbs); os.IsNotExist(errors.Cause(err)) { if _, err := os.Stat(p2rAbs); os.IsNotExist(errors.Cause(err)) {
Assert(t, os.IsNotExist(errors.Cause(err)), Assert(t, os.IsNotExist(errors.Cause(err)),
"expected %v to not exist in restore, but it exists, err %v", p2rAbs, err) "expected %v to not exist in restore, but it exists, err %v", p2rAbs, err)
} }
testRunRestoreLatest(t, gopts, filepath.Join(env.base, "restore2"), []string{filepath.Dir(p2)}, "") testRunRestoreLatest(t, env.gopts, filepath.Join(env.base, "restore2"), []string{filepath.Dir(p2)}, "")
OK(t, testFileSize(p2rAbs, int64(103))) OK(t, testFileSize(p2rAbs, int64(103)))
if _, err := os.Stat(p1rAbs); os.IsNotExist(errors.Cause(err)) { if _, err := os.Stat(p1rAbs); os.IsNotExist(errors.Cause(err)) {
Assert(t, os.IsNotExist(errors.Cause(err)), Assert(t, os.IsNotExist(errors.Cause(err)),
"expected %v to not exist in restore, but it exists, err %v", p1rAbs, err) "expected %v to not exist in restore, but it exists, err %v", p1rAbs, err)
} }
})
} }
func TestRestoreWithPermissionFailure(t *testing.T) { func TestRestoreWithPermissionFailure(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "repo-restore-permissions-test.tar.gz") datafile := filepath.Join("testdata", "repo-restore-permissions-test.tar.gz")
SetupTarTestFixture(t, env.base, datafile) SetupTarTestFixture(t, env.base, datafile)
snapshots := testRunList(t, "snapshots", gopts) snapshots := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshots) > 0, Assert(t, len(snapshots) > 0,
"no snapshots found in repo (%v)", datafile) "no snapshots found in repo (%v)", datafile)
@ -964,11 +977,11 @@ func TestRestoreWithPermissionFailure(t *testing.T) {
globalOptions.stderr = os.Stderr globalOptions.stderr = os.Stderr
}() }()
testRunRestore(t, gopts, filepath.Join(env.base, "restore"), snapshots[0]) testRunRestore(t, env.gopts, filepath.Join(env.base, "restore"), snapshots[0])
// make sure that all files have been restored, regardless of any // make sure that all files have been restored, regardless of any
// permission errors // permission errors
files := testRunLs(t, gopts, snapshots[0].String()) files := testRunLs(t, env.gopts, snapshots[0].String())
for _, filename := range files { for _, filename := range files {
fi, err := os.Lstat(filepath.Join(env.base, "restore", filename)) fi, err := os.Lstat(filepath.Join(env.base, "restore", filename))
OK(t, err) OK(t, err)
@ -976,7 +989,6 @@ func TestRestoreWithPermissionFailure(t *testing.T) {
Assert(t, !isFile(fi) || fi.Size() > 0, Assert(t, !isFile(fi) || fi.Size() > 0,
"file %v restored, but filesize is 0", filename) "file %v restored, but filesize is 0", filename)
} }
})
} }
func setZeroModTime(filename string) error { func setZeroModTime(filename string) error {
@ -989,8 +1001,10 @@ func setZeroModTime(filename string) error {
} }
func TestRestoreNoMetadataOnIgnoredIntermediateDirs(t *testing.T) { func TestRestoreNoMetadataOnIgnoredIntermediateDirs(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
testRunInit(t, gopts) defer cleanup()
testRunInit(t, env.gopts)
p := filepath.Join(env.testdata, "subdir1", "subdir2", "subdir3", "file.ext") p := filepath.Join(env.testdata, "subdir1", "subdir2", "subdir3", "file.ext")
OK(t, os.MkdirAll(filepath.Dir(p), 0755)) OK(t, os.MkdirAll(filepath.Dir(p), 0755))
@ -999,15 +1013,15 @@ func TestRestoreNoMetadataOnIgnoredIntermediateDirs(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
snapshotID := testRunList(t, "snapshots", gopts)[0] snapshotID := testRunList(t, "snapshots", env.gopts)[0]
// restore with filter "*.ext", this should restore "file.ext", but // restore with filter "*.ext", this should restore "file.ext", but
// since the directories are ignored and only created because of // since the directories are ignored and only created because of
// "file.ext", no meta data should be restored for them. // "file.ext", no meta data should be restored for them.
testRunRestoreIncludes(t, gopts, filepath.Join(env.base, "restore0"), snapshotID, []string{"*.ext"}) testRunRestoreIncludes(t, env.gopts, filepath.Join(env.base, "restore0"), snapshotID, []string{"*.ext"})
f1 := filepath.Join(env.base, "restore0", "testdata", "subdir1", "subdir2") f1 := filepath.Join(env.base, "restore0", "testdata", "subdir1", "subdir2")
fi, err := os.Stat(f1) fi, err := os.Stat(f1)
@ -1017,7 +1031,7 @@ func TestRestoreNoMetadataOnIgnoredIntermediateDirs(t *testing.T) {
"meta data of intermediate directory has been restore although it was ignored") "meta data of intermediate directory has been restore although it was ignored")
// restore with filter "*", this should restore meta data on everything. // restore with filter "*", this should restore meta data on everything.
testRunRestoreIncludes(t, gopts, filepath.Join(env.base, "restore1"), snapshotID, []string{"*"}) testRunRestoreIncludes(t, env.gopts, filepath.Join(env.base, "restore1"), snapshotID, []string{"*"})
f2 := filepath.Join(env.base, "restore1", "testdata", "subdir1", "subdir2") f2 := filepath.Join(env.base, "restore1", "testdata", "subdir1", "subdir2")
fi, err = os.Stat(f2) fi, err = os.Stat(f2)
@ -1025,31 +1039,31 @@ func TestRestoreNoMetadataOnIgnoredIntermediateDirs(t *testing.T) {
Assert(t, fi.ModTime() == time.Unix(0, 0), Assert(t, fi.ModTime() == time.Unix(0, 0),
"meta data of intermediate directory hasn't been restore") "meta data of intermediate directory hasn't been restore")
})
} }
func TestFind(t *testing.T) { func TestFind(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
results := testRunFind(t, false, gopts, "unexistingfile") results := testRunFind(t, false, env.gopts, "unexistingfile")
Assert(t, len(results) == 0, "unexisting file found in repo (%v)", datafile) Assert(t, len(results) == 0, "unexisting file found in repo (%v)", datafile)
results = testRunFind(t, false, gopts, "testfile") results = testRunFind(t, false, env.gopts, "testfile")
lines := strings.Split(string(results), "\n") lines := strings.Split(string(results), "\n")
Assert(t, len(lines) == 2, "expected one file found in repo (%v)", datafile) Assert(t, len(lines) == 2, "expected one file found in repo (%v)", datafile)
results = testRunFind(t, false, gopts, "testfile*") results = testRunFind(t, false, env.gopts, "testfile*")
lines = strings.Split(string(results), "\n") lines = strings.Split(string(results), "\n")
Assert(t, len(lines) == 4, "expected three files found in repo (%v)", datafile) Assert(t, len(lines) == 4, "expected three files found in repo (%v)", datafile)
})
} }
type testMatch struct { type testMatch struct {
@ -1068,41 +1082,44 @@ type testMatches struct {
} }
func TestFindJSON(t *testing.T) { func TestFindJSON(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
results := testRunFind(t, true, gopts, "unexistingfile") results := testRunFind(t, true, env.gopts, "unexistingfile")
matches := []testMatches{} matches := []testMatches{}
OK(t, json.Unmarshal(results, &matches)) OK(t, json.Unmarshal(results, &matches))
Assert(t, len(matches) == 0, "expected no match in repo (%v)", datafile) Assert(t, len(matches) == 0, "expected no match in repo (%v)", datafile)
results = testRunFind(t, true, gopts, "testfile") results = testRunFind(t, true, env.gopts, "testfile")
OK(t, json.Unmarshal(results, &matches)) OK(t, json.Unmarshal(results, &matches))
Assert(t, len(matches) == 1, "expected a single snapshot in repo (%v)", datafile) Assert(t, len(matches) == 1, "expected a single snapshot in repo (%v)", datafile)
Assert(t, len(matches[0].Matches) == 1, "expected a single file to match (%v)", datafile) Assert(t, len(matches[0].Matches) == 1, "expected a single file to match (%v)", datafile)
Assert(t, matches[0].Hits == 1, "expected hits to show 1 match (%v)", datafile) Assert(t, matches[0].Hits == 1, "expected hits to show 1 match (%v)", datafile)
results = testRunFind(t, true, gopts, "testfile*") results = testRunFind(t, true, env.gopts, "testfile*")
OK(t, json.Unmarshal(results, &matches)) OK(t, json.Unmarshal(results, &matches))
Assert(t, len(matches) == 1, "expected a single snapshot in repo (%v)", datafile) Assert(t, len(matches) == 1, "expected a single snapshot in repo (%v)", datafile)
Assert(t, len(matches[0].Matches) == 3, "expected 3 files to match (%v)", datafile) Assert(t, len(matches[0].Matches) == 3, "expected 3 files to match (%v)", datafile)
Assert(t, matches[0].Hits == 3, "expected hits to show 3 matches (%v)", datafile) Assert(t, matches[0].Hits == 3, "expected hits to show 3 matches (%v)", datafile)
})
} }
func TestRebuildIndex(t *testing.T) { func TestRebuildIndex(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("..", "..", "internal", "checker", "testdata", "duplicate-packs-in-index-test-repo.tar.gz") datafile := filepath.Join("..", "..", "internal", "checker", "testdata", "duplicate-packs-in-index-test-repo.tar.gz")
SetupTarTestFixture(t, env.base, datafile) SetupTarTestFixture(t, env.base, datafile)
out, err := testRunCheckOutput(gopts) out, err := testRunCheckOutput(env.gopts)
if !strings.Contains(out, "contained in several indexes") { if !strings.Contains(out, "contained in several indexes") {
t.Fatalf("did not find checker hint for packs in several indexes") t.Fatalf("did not find checker hint for packs in several indexes")
} }
@ -1115,9 +1132,9 @@ func TestRebuildIndex(t *testing.T) {
t.Fatalf("did not find hint for rebuild-index command") t.Fatalf("did not find hint for rebuild-index command")
} }
testRunRebuildIndex(t, gopts) testRunRebuildIndex(t, env.gopts)
out, err = testRunCheckOutput(gopts) out, err = testRunCheckOutput(env.gopts)
if len(out) != 0 { if len(out) != 0 {
t.Fatalf("expected no output from the checker, got: %v", out) t.Fatalf("expected no output from the checker, got: %v", out)
} }
@ -1125,7 +1142,6 @@ func TestRebuildIndex(t *testing.T) {
if err != nil { if err != nil {
t.Fatalf("expected no error from checker after rebuild-index, got: %v", err) t.Fatalf("expected no error from checker after rebuild-index, got: %v", err)
} }
})
} }
func TestRebuildIndexAlwaysFull(t *testing.T) { func TestRebuildIndexAlwaysFull(t *testing.T) {
@ -1134,7 +1150,9 @@ func TestRebuildIndexAlwaysFull(t *testing.T) {
} }
func TestCheckRestoreNoLock(t *testing.T) { func TestCheckRestoreNoLock(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "small-repo.tar.gz") datafile := filepath.Join("testdata", "small-repo.tar.gz")
SetupTarTestFixture(t, env.base, datafile) SetupTarTestFixture(t, env.base, datafile)
@ -1146,21 +1164,22 @@ func TestCheckRestoreNoLock(t *testing.T) {
}) })
OK(t, err) OK(t, err)
gopts.NoLock = true env.gopts.NoLock = true
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
snapshotIDs := testRunList(t, "snapshots", gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts)
if len(snapshotIDs) == 0 { if len(snapshotIDs) == 0 {
t.Fatalf("found no snapshots") t.Fatalf("found no snapshots")
} }
testRunRestore(t, gopts, filepath.Join(env.base, "restore"), snapshotIDs[0]) testRunRestore(t, env.gopts, filepath.Join(env.base, "restore"), snapshotIDs[0])
})
} }
func TestPrune(t *testing.T) { func TestPrune(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "backup-data.tar.gz") datafile := filepath.Join("testdata", "backup-data.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -1170,32 +1189,33 @@ func TestPrune(t *testing.T) {
OK(t, err) OK(t, err)
OK(t, fd.Close()) OK(t, fd.Close())
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
opts := BackupOptions{} opts := BackupOptions{}
testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0")}, opts, gopts) testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0")}, opts, env.gopts)
firstSnapshot := testRunList(t, "snapshots", gopts) firstSnapshot := testRunList(t, "snapshots", env.gopts)
Assert(t, len(firstSnapshot) == 1, Assert(t, len(firstSnapshot) == 1,
"expected one snapshot, got %v", firstSnapshot) "expected one snapshot, got %v", firstSnapshot)
testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0", "2")}, opts, gopts) testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0", "2")}, opts, env.gopts)
testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0", "3")}, opts, gopts) testRunBackup(t, []string{filepath.Join(env.testdata, "0", "0", "3")}, opts, env.gopts)
snapshotIDs := testRunList(t, "snapshots", gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 3, Assert(t, len(snapshotIDs) == 3,
"expected 3 snapshot, got %v", snapshotIDs) "expected 3 snapshot, got %v", snapshotIDs)
testRunForget(t, gopts, firstSnapshot[0].String()) testRunForget(t, env.gopts, firstSnapshot[0].String())
testRunPrune(t, gopts) testRunPrune(t, env.gopts)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
})
} }
func TestHardLink(t *testing.T) { func TestHardLink(t *testing.T) {
// this test assumes a test set with a single directory containing hard linked files // this test assumes a test set with a single directory containing hard linked files
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
datafile := filepath.Join("testdata", "test.hl.tar.gz") datafile := filepath.Join("testdata", "test.hl.tar.gz")
fd, err := os.Open(datafile) fd, err := os.Open(datafile)
if os.IsNotExist(errors.Cause(err)) { if os.IsNotExist(errors.Cause(err)) {
@ -1205,7 +1225,7 @@ func TestHardLink(t *testing.T) {
OK(t, err) OK(t, err)
OK(t, fd.Close()) OK(t, fd.Close())
testRunInit(t, gopts) testRunInit(t, env.gopts)
SetupTarTestFixture(t, env.testdata, datafile) SetupTarTestFixture(t, env.testdata, datafile)
@ -1214,18 +1234,18 @@ func TestHardLink(t *testing.T) {
opts := BackupOptions{} opts := BackupOptions{}
// first backup // first backup
testRunBackup(t, []string{env.testdata}, opts, gopts) testRunBackup(t, []string{env.testdata}, opts, env.gopts)
snapshotIDs := testRunList(t, "snapshots", gopts) snapshotIDs := testRunList(t, "snapshots", env.gopts)
Assert(t, len(snapshotIDs) == 1, Assert(t, len(snapshotIDs) == 1,
"expected one snapshot, got %v", snapshotIDs) "expected one snapshot, got %v", snapshotIDs)
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
// restore all backups and compare // restore all backups and compare
for i, snapshotID := range snapshotIDs { for i, snapshotID := range snapshotIDs {
restoredir := filepath.Join(env.base, fmt.Sprintf("restore%d", i)) restoredir := filepath.Join(env.base, fmt.Sprintf("restore%d", i))
t.Logf("restoring snapshot %v to %v", snapshotID.Str(), restoredir) t.Logf("restoring snapshot %v to %v", snapshotID.Str(), restoredir)
testRunRestore(t, gopts, restoredir, snapshotIDs[0]) testRunRestore(t, env.gopts, restoredir, snapshotIDs[0])
Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, "testdata")), Assert(t, directoriesEqualContents(env.testdata, filepath.Join(restoredir, "testdata")),
"directories are not equal") "directories are not equal")
@ -1234,8 +1254,7 @@ func TestHardLink(t *testing.T) {
"links are not equal") "links are not equal")
} }
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
})
} }
func linksEqual(source, dest map[uint64][]string) bool { func linksEqual(source, dest map[uint64][]string) bool {

View file

@ -8,7 +8,9 @@ import (
) )
func TestRestoreLocalLayout(t *testing.T) { func TestRestoreLocalLayout(t *testing.T) {
withTestEnvironment(t, func(env *testEnvironment, gopts GlobalOptions) { env, cleanup := withTestEnvironment(t)
defer cleanup()
var tests = []struct { var tests = []struct {
filename string filename string
layout string layout string
@ -24,17 +26,16 @@ func TestRestoreLocalLayout(t *testing.T) {
SetupTarTestFixture(t, env.base, datafile) SetupTarTestFixture(t, env.base, datafile)
gopts.extended["local.layout"] = test.layout env.gopts.extended["local.layout"] = test.layout
// check the repo // check the repo
testRunCheck(t, gopts) testRunCheck(t, env.gopts)
// restore latest snapshot // restore latest snapshot
target := filepath.Join(env.base, "restore") target := filepath.Join(env.base, "restore")
testRunRestoreLatest(t, gopts, target, nil, "") testRunRestoreLatest(t, env.gopts, target, nil, "")
RemoveAll(t, filepath.Join(env.base, "repo")) RemoveAll(t, filepath.Join(env.base, "repo"))
RemoveAll(t, target) RemoveAll(t, target)
} }
})
} }