digest: Minor refactoring

Docker-DCO-1.1-Signed-off-by: Josh Hawn <josh.hawn@docker.com> (github: jlhawn)
pull/251/head
Josh Hawn 2015-03-10 14:40:58 -07:00
parent cbdc3621cf
commit 87959abe8f
9 changed files with 54 additions and 26 deletions

View File

@ -49,7 +49,7 @@ func NewDigestFromHex(alg, hex string) Digest {
} }
// DigestRegexp matches valid digest types. // DigestRegexp matches valid digest types.
var DigestRegexp = regexp.MustCompile(`[a-zA-Z0-9-_+.]+:[a-zA-Z0-9-_+.=]+`) var DigestRegexp = regexp.MustCompile(`[a-zA-Z0-9-_+.]+:[a-fA-F0-9]+`)
var ( var (
// ErrDigestInvalidFormat returned when digest format invalid. // ErrDigestInvalidFormat returned when digest format invalid.
@ -158,7 +158,7 @@ func (d Digest) sepIndex() int {
i := strings.Index(string(d), ":") i := strings.Index(string(d), ":")
if i < 0 { if i < 0 {
panic("invalid digest: " + d) panic("could not find ':' in digest: " + d)
} }
return i return i

View File

@ -27,12 +27,10 @@ type TarSumInfo struct {
// InvalidTarSumError provides informations about a TarSum that cannot be parsed // InvalidTarSumError provides informations about a TarSum that cannot be parsed
// by ParseTarSum. // by ParseTarSum.
type InvalidTarSumError struct { type InvalidTarSumError string
TarSum string
}
func (e InvalidTarSumError) Error() string { func (e InvalidTarSumError) Error() string {
return fmt.Sprintf("invalid tarsum: %q", e.TarSum) return fmt.Sprintf("invalid tarsum: %q", string(e))
} }
// ParseTarSum parses a tarsum string into its components of interest. For // ParseTarSum parses a tarsum string into its components of interest. For
@ -52,7 +50,7 @@ func ParseTarSum(tarSum string) (tsi TarSumInfo, err error) {
components := TarsumRegexpCapturing.FindStringSubmatch(tarSum) components := TarsumRegexpCapturing.FindStringSubmatch(tarSum)
if len(components) != 1+TarsumRegexpCapturing.NumSubexp() { if len(components) != 1+TarsumRegexpCapturing.NumSubexp() {
return TarSumInfo{}, InvalidTarSumError{TarSum: tarSum} return TarSumInfo{}, InvalidTarSumError(tarSum)
} }
return TarSumInfo{ return TarSumInfo{

View File

@ -21,11 +21,11 @@ func TestParseTarSumComponents(t *testing.T) {
}, },
{ {
input: "", input: "",
err: InvalidTarSumError{}, err: InvalidTarSumError(""),
}, },
{ {
input: "purejunk", input: "purejunk",
err: InvalidTarSumError{TarSum: "purejunk"}, err: InvalidTarSumError("purejunk"),
}, },
{ {
input: "tarsum.v23+test:12341234123412341effefefe", input: "tarsum.v23+test:12341234123412341effefefe",

View File

@ -20,27 +20,27 @@ type Verifier interface {
// Verified will return true if the content written to Verifier matches // Verified will return true if the content written to Verifier matches
// the digest. // the digest.
Verified() bool Verified() bool
// Planned methods:
// Err() error
// Reset()
} }
// NewDigestVerifier returns a verifier that compares the written bytes // NewDigestVerifier returns a verifier that compares the written bytes
// against a passed in digest. // against a passed in digest.
func NewDigestVerifier(d Digest) Verifier { func NewDigestVerifier(d Digest) (Verifier, error) {
if err := d.Validate(); err != nil {
return nil, err
}
alg := d.Algorithm() alg := d.Algorithm()
switch alg { switch alg {
case "sha256", "sha384", "sha512": case "sha256", "sha384", "sha512":
return hashVerifier{ return hashVerifier{
hash: newHash(alg), hash: newHash(alg),
digest: d, digest: d,
} }, nil
default: default:
// Assume we have a tarsum. // Assume we have a tarsum.
version, err := tarsum.GetVersionFromTarsum(string(d)) version, err := tarsum.GetVersionFromTarsum(string(d))
if err != nil { if err != nil {
panic(err) // Always assume valid tarsum at this point. return nil, err
} }
pr, pw := io.Pipe() pr, pw := io.Pipe()
@ -50,7 +50,7 @@ func NewDigestVerifier(d Digest) Verifier {
ts, err := tarsum.NewTarSum(pr, true, version) ts, err := tarsum.NewTarSum(pr, true, version)
if err != nil { if err != nil {
panic(err) return nil, err
} }
// TODO(sday): Ick! A goroutine per digest verification? We'll have to // TODO(sday): Ick! A goroutine per digest verification? We'll have to
@ -65,7 +65,7 @@ func NewDigestVerifier(d Digest) Verifier {
ts: ts, ts: ts,
pr: pr, pr: pr,
pw: pw, pw: pw,
} }, nil
} }
} }

View File

@ -18,7 +18,11 @@ func TestDigestVerifier(t *testing.T) {
t.Fatalf("unexpected error digesting bytes: %#v", err) t.Fatalf("unexpected error digesting bytes: %#v", err)
} }
verifier := NewDigestVerifier(digest) verifier, err := NewDigestVerifier(digest)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
io.Copy(verifier, bytes.NewReader(p)) io.Copy(verifier, bytes.NewReader(p))
if !verifier.Verified() { if !verifier.Verified() {
@ -45,7 +49,11 @@ func TestDigestVerifier(t *testing.T) {
// This is the most relevant example for the registry application. It's // This is the most relevant example for the registry application. It's
// effectively a read through pipeline, where the final sink is the digest // effectively a read through pipeline, where the final sink is the digest
// verifier. // verifier.
verifier = NewDigestVerifier(digest) verifier, err = NewDigestVerifier(digest)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
lengthVerifier := NewLengthVerifier(expectedSize) lengthVerifier := NewLengthVerifier(expectedSize)
rd := io.TeeReader(tf, lengthVerifier) rd := io.TeeReader(tf, lengthVerifier)
io.Copy(verifier, rd) io.Copy(verifier, rd)

View File

@ -236,7 +236,10 @@ func TestLayerAPI(t *testing.T) {
}) })
// Verify the body // Verify the body
verifier := digest.NewDigestVerifier(layerDigest) verifier, err := digest.NewDigestVerifier(layerDigest)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
io.Copy(verifier, resp.Body) io.Copy(verifier, resp.Body)
if !verifier.Verified() { if !verifier.Verified() {

View File

@ -41,7 +41,11 @@ func TestSimpleRead(t *testing.T) {
t.Fatalf("error allocating file reader: %v", err) t.Fatalf("error allocating file reader: %v", err)
} }
verifier := digest.NewDigestVerifier(dgst) verifier, err := digest.NewDigestVerifier(dgst)
if err != nil {
t.Fatalf("error getting digest verifier: %s", err)
}
io.Copy(verifier, fr) io.Copy(verifier, fr)
if !verifier.Verified() { if !verifier.Verified() {

View File

@ -55,7 +55,11 @@ func TestSimpleWrite(t *testing.T) {
} }
defer fr.Close() defer fr.Close()
verifier := digest.NewDigestVerifier(dgst) verifier, err := digest.NewDigestVerifier(dgst)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
io.Copy(verifier, fr) io.Copy(verifier, fr)
if !verifier.Verified() { if !verifier.Verified() {
@ -94,7 +98,11 @@ func TestSimpleWrite(t *testing.T) {
} }
defer fr.Close() defer fr.Close()
verifier = digest.NewDigestVerifier(doubledgst) verifier, err = digest.NewDigestVerifier(doubledgst)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
io.Copy(verifier, fr) io.Copy(verifier, fr)
if !verifier.Verified() { if !verifier.Verified() {
@ -141,7 +149,11 @@ func TestSimpleWrite(t *testing.T) {
} }
defer fr.Close() defer fr.Close()
verifier = digest.NewDigestVerifier(doubledgst) verifier, err = digest.NewDigestVerifier(doubledgst)
if err != nil {
t.Fatalf("unexpected error getting digest verifier: %s", err)
}
io.Copy(verifier, fr) io.Copy(verifier, fr)
if !verifier.Verified() { if !verifier.Verified() {

View File

@ -85,7 +85,10 @@ func (luc *layerUploadController) Cancel() error {
// validateLayer checks the layer data against the digest, returning an error // validateLayer checks the layer data against the digest, returning an error
// if it does not match. The canonical digest is returned. // if it does not match. The canonical digest is returned.
func (luc *layerUploadController) validateLayer(dgst digest.Digest) (digest.Digest, error) { func (luc *layerUploadController) validateLayer(dgst digest.Digest) (digest.Digest, error) {
digestVerifier := digest.NewDigestVerifier(dgst) digestVerifier, err := digest.NewDigestVerifier(dgst)
if err != nil {
return "", err
}
// TODO(stevvooe): Store resumable hash calculations in upload directory // TODO(stevvooe): Store resumable hash calculations in upload directory
// in driver. Something like a file at path <uuid>/resumablehash/<offest> // in driver. Something like a file at path <uuid>/resumablehash/<offest>