[#217] Consider Copy-Source-SSE-* headers during copy

Signed-off-by: Marina Biryukova <m.biryukova@yadro.com>
This commit is contained in:
Marina Biryukova 2023-10-19 17:22:26 +03:00 committed by Alexey Vanin
parent 5ee73fad6a
commit fe796ba538
15 changed files with 355 additions and 55 deletions

View file

@ -36,6 +36,7 @@ This document outlines major changes between releases.
- Add new `frostfs.client_cut` config param (#192) - Add new `frostfs.client_cut` config param (#192)
- Add new `frostfs.buffer_max_size_for_put` config param and sync TZ hash for PUT operations (#197) - Add new `frostfs.buffer_max_size_for_put` config param and sync TZ hash for PUT operations (#197)
- Add `X-Amz-Version-Id` header after complete multipart upload (#227) - Add `X-Amz-Version-Id` header after complete multipart upload (#227)
- Add handling of `X-Amz-Copy-Source-Server-Side-Encryption-Customer-*` headers during copy (#217)
### Changed ### Changed
- Update prometheus to v1.15.0 (#94) - Update prometheus to v1.15.0 (#94)

View file

@ -149,6 +149,7 @@ const (
ErrInvalidEncryptionAlgorithm ErrInvalidEncryptionAlgorithm
ErrInvalidSSECustomerKey ErrInvalidSSECustomerKey
ErrMissingSSECustomerKey ErrMissingSSECustomerKey
ErrMissingSSECustomerAlgorithm
ErrMissingSSECustomerKeyMD5 ErrMissingSSECustomerKeyMD5
ErrSSECustomerKeyMD5Mismatch ErrSSECustomerKeyMD5Mismatch
ErrInvalidSSECustomerParameters ErrInvalidSSECustomerParameters
@ -1062,6 +1063,12 @@ var errorCodes = errorCodeMap{
Description: "Requests specifying Server Side Encryption with Customer provided keys must provide an appropriate secret key.", Description: "Requests specifying Server Side Encryption with Customer provided keys must provide an appropriate secret key.",
HTTPStatusCode: http.StatusBadRequest, HTTPStatusCode: http.StatusBadRequest,
}, },
ErrMissingSSECustomerAlgorithm: {
ErrCode: ErrMissingSSECustomerAlgorithm,
Code: "InvalidArgument",
Description: "Requests specifying Server Side Encryption with Customer provided keys must provide a valid encryption algorithm.",
HTTPStatusCode: http.StatusBadRequest,
},
ErrMissingSSECustomerKeyMD5: { ErrMissingSSECustomerKeyMD5: {
ErrCode: ErrMissingSSECustomerKeyMD5, ErrCode: ErrMissingSSECustomerKeyMD5,
Code: "InvalidArgument", Code: "InvalidArgument",

View file

@ -107,23 +107,36 @@ func (h *handler) CopyObjectHandler(w http.ResponseWriter, r *http.Request) {
} }
srcObjInfo := extendedSrcObjInfo.ObjectInfo srcObjInfo := extendedSrcObjInfo.ObjectInfo
encryptionParams, err := formEncryptionParams(r) srcEncryptionParams, err := formCopySourceEncryptionParams(r)
if err != nil {
h.logAndSendError(w, "invalid sse headers", reqInfo, err)
return
}
dstEncryptionParams, err := formEncryptionParams(r)
if err != nil { if err != nil {
h.logAndSendError(w, "invalid sse headers", reqInfo, err) h.logAndSendError(w, "invalid sse headers", reqInfo, err)
return return
} }
if err = encryptionParams.MatchObjectEncryption(layer.FormEncryptionInfo(srcObjInfo.Headers)); err != nil { if err = srcEncryptionParams.MatchObjectEncryption(layer.FormEncryptionInfo(srcObjInfo.Headers)); err != nil {
if errors.IsS3Error(err, errors.ErrInvalidEncryptionParameters) || errors.IsS3Error(err, errors.ErrSSEEncryptedObject) ||
errors.IsS3Error(err, errors.ErrInvalidSSECustomerParameters) {
h.logAndSendError(w, "encryption doesn't match object", reqInfo, err, zap.Error(err))
return
}
h.logAndSendError(w, "encryption doesn't match object", reqInfo, errors.GetAPIError(errors.ErrBadRequest), zap.Error(err)) h.logAndSendError(w, "encryption doesn't match object", reqInfo, errors.GetAPIError(errors.ErrBadRequest), zap.Error(err))
return return
} }
var dstSize uint64
if srcSize, err := layer.GetObjectSize(srcObjInfo); err != nil { if srcSize, err := layer.GetObjectSize(srcObjInfo); err != nil {
h.logAndSendError(w, "failed to get source object size", reqInfo, err) h.logAndSendError(w, "failed to get source object size", reqInfo, err)
return return
} else if srcSize > layer.UploadMaxSize { //https://docs.aws.amazon.com/AmazonS3/latest/API/API_CopyObject.html } else if srcSize > layer.UploadMaxSize { //https://docs.aws.amazon.com/AmazonS3/latest/API/API_CopyObject.html
h.logAndSendError(w, "too bid object to copy with single copy operation, use multipart upload copy instead", reqInfo, errors.GetAPIError(errors.ErrInvalidRequestLargeCopy)) h.logAndSendError(w, "too bid object to copy with single copy operation, use multipart upload copy instead", reqInfo, errors.GetAPIError(errors.ErrInvalidRequestLargeCopy))
return return
} else {
dstSize = srcSize
} }
args, err := parseCopyObjectArgs(r.Header) args, err := parseCopyObjectArgs(r.Header)
@ -174,20 +187,21 @@ func (h *handler) CopyObjectHandler(w http.ResponseWriter, r *http.Request) {
srcObjInfo.Headers[api.ContentType] = srcObjInfo.ContentType srcObjInfo.Headers[api.ContentType] = srcObjInfo.ContentType
} }
metadata = makeCopyMap(srcObjInfo.Headers) metadata = makeCopyMap(srcObjInfo.Headers)
delete(metadata, layer.MultipartObjectSize) // object payload will be real one rather than list of compound parts filterMetadataMap(metadata)
} else if contentType := r.Header.Get(api.ContentType); len(contentType) > 0 { } else if contentType := r.Header.Get(api.ContentType); len(contentType) > 0 {
metadata[api.ContentType] = contentType metadata[api.ContentType] = contentType
} }
params := &layer.CopyObjectParams{ params := &layer.CopyObjectParams{
SrcVersioned: srcObjPrm.Versioned(), SrcVersioned: srcObjPrm.Versioned(),
SrcObject: srcObjInfo, SrcObject: srcObjInfo,
ScrBktInfo: srcObjPrm.BktInfo, ScrBktInfo: srcObjPrm.BktInfo,
DstBktInfo: dstBktInfo, DstBktInfo: dstBktInfo,
DstObject: reqInfo.ObjectName, DstObject: reqInfo.ObjectName,
SrcSize: srcObjInfo.Size, DstSize: dstSize,
Header: metadata, Header: metadata,
Encryption: encryptionParams, SrcEncryption: srcEncryptionParams,
DstEncryption: dstEncryptionParams,
} }
params.CopiesNumbers, err = h.pickCopiesNumbers(metadata, dstBktInfo.LocationConstraint) params.CopiesNumbers, err = h.pickCopiesNumbers(metadata, dstBktInfo.LocationConstraint)
@ -262,7 +276,7 @@ func (h *handler) CopyObjectHandler(w http.ResponseWriter, r *http.Request) {
h.reqLogger(ctx).Error(logs.CouldntSendNotification, zap.Error(err)) h.reqLogger(ctx).Error(logs.CouldntSendNotification, zap.Error(err))
} }
if encryptionParams.Enabled() { if dstEncryptionParams.Enabled() {
addSSECHeaders(w.Header(), r.Header) addSSECHeaders(w.Header(), r.Header)
} }
} }
@ -275,6 +289,13 @@ func makeCopyMap(headers map[string]string) map[string]string {
return res return res
} }
func filterMetadataMap(metadata map[string]string) {
delete(metadata, layer.MultipartObjectSize) // object payload will be real one rather than list of compound parts
for key := range layer.EncryptionMetadata {
delete(metadata, key)
}
}
func isCopyingToItselfForbidden(reqInfo *middleware.ReqInfo, srcBucket string, srcObject string, settings *data.BucketSettings, args *copyObjectArgs) bool { func isCopyingToItselfForbidden(reqInfo *middleware.ReqInfo, srcBucket string, srcObject string, settings *data.BucketSettings, args *copyObjectArgs) bool {
if reqInfo.BucketName != srcBucket || reqInfo.ObjectName != srcObject { if reqInfo.BucketName != srcBucket || reqInfo.ObjectName != srcObject {
return false return false

View file

@ -1,13 +1,19 @@
package handler package handler
import ( import (
"crypto/md5"
"crypto/tls"
"encoding/base64"
"encoding/xml" "encoding/xml"
"net/http" "net/http"
"net/url" "net/url"
"strconv"
"testing" "testing"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -98,6 +104,165 @@ func TestCopyMultipart(t *testing.T) {
equalDataSlices(t, data, copiedData) equalDataSlices(t, data, copiedData)
} }
func TestCopyEncryptedToUnencrypted(t *testing.T) {
tc := prepareHandlerContext(t)
bktName, srcObjName := "bucket-for-copy", "object-for-copy"
key1 := []byte("firstencriptionkeyofsourceobject")
key1Md5 := md5.Sum(key1)
key2 := []byte("anotherencriptionkeysourceobject")
key2Md5 := md5.Sum(key2)
bktInfo := createTestBucket(tc, bktName)
srcEnc, err := encryption.NewParams(key1)
require.NoError(t, err)
srcObjInfo := createTestObject(tc, bktInfo, srcObjName, *srcEnc)
require.True(t, containEncryptionMetadataHeaders(srcObjInfo.Headers))
dstObjName := "copy-object"
// empty copy-source-sse headers
w, r := prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusBadRequest)
assertS3Error(t, w, errors.GetAPIError(errors.ErrSSEEncryptedObject))
// empty copy-source-sse-custom-key
w, r = prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusBadRequest)
assertS3Error(t, w, errors.GetAPIError(errors.ErrMissingSSECustomerKey))
// empty copy-source-sse-custom-algorithm
w, r = prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key1))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusBadRequest)
assertS3Error(t, w, errors.GetAPIError(errors.ErrMissingSSECustomerAlgorithm))
// invalid copy-source-sse-custom-key
w, r = prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key2))
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(key2Md5[:]))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusBadRequest)
assertS3Error(t, w, errors.GetAPIError(errors.ErrInvalidSSECustomerParameters))
// success copy
w, r = prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key1))
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(key1Md5[:]))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusOK)
dstObjInfo, err := tc.Layer().GetObjectInfo(tc.Context(), &layer.HeadObjectParams{BktInfo: bktInfo, Object: dstObjName})
require.NoError(t, err)
require.Equal(t, srcObjInfo.Headers[layer.AttributeDecryptedSize], strconv.Itoa(int(dstObjInfo.Size)))
require.False(t, containEncryptionMetadataHeaders(dstObjInfo.Headers))
}
func TestCopyUnencryptedToEncrypted(t *testing.T) {
tc := prepareHandlerContext(t)
bktName, srcObjName := "bucket-for-copy", "object-for-copy"
key := []byte("firstencriptionkeyofsourceobject")
keyMd5 := md5.Sum(key)
bktInfo := createTestBucket(tc, bktName)
srcObjInfo := createTestObject(tc, bktInfo, srcObjName, encryption.Params{})
require.False(t, containEncryptionMetadataHeaders(srcObjInfo.Headers))
dstObjName := "copy-object"
// invalid copy-source-sse headers
w, r := prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key))
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(keyMd5[:]))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusBadRequest)
assertS3Error(t, w, errors.GetAPIError(errors.ErrInvalidEncryptionParameters))
// success copy
w, r = prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key))
r.Header.Set(api.AmzServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(keyMd5[:]))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusOK)
dstObjInfo, err := tc.Layer().GetObjectInfo(tc.Context(), &layer.HeadObjectParams{BktInfo: bktInfo, Object: dstObjName})
require.NoError(t, err)
require.True(t, containEncryptionMetadataHeaders(dstObjInfo.Headers))
require.Equal(t, strconv.Itoa(int(srcObjInfo.Size)), dstObjInfo.Headers[layer.AttributeDecryptedSize])
}
func TestCopyEncryptedToEncryptedWithAnotherKey(t *testing.T) {
tc := prepareHandlerContext(t)
bktName, srcObjName := "bucket-for-copy", "object-for-copy"
key1 := []byte("firstencriptionkeyofsourceobject")
key1Md5 := md5.Sum(key1)
key2 := []byte("anotherencriptionkeysourceobject")
key2Md5 := md5.Sum(key2)
bktInfo := createTestBucket(tc, bktName)
srcEnc, err := encryption.NewParams(key1)
require.NoError(t, err)
srcObjInfo := createTestObject(tc, bktInfo, srcObjName, *srcEnc)
require.True(t, containEncryptionMetadataHeaders(srcObjInfo.Headers))
dstObjName := "copy-object"
w, r := prepareTestRequest(tc, bktName, dstObjName, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, bktName+"/"+srcObjName)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key1))
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(key1Md5[:]))
r.Header.Set(api.AmzServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key2))
r.Header.Set(api.AmzServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(key2Md5[:]))
tc.Handler().CopyObjectHandler(w, r)
assertStatus(t, w, http.StatusOK)
dstObjInfo, err := tc.Layer().GetObjectInfo(tc.Context(), &layer.HeadObjectParams{BktInfo: bktInfo, Object: dstObjName})
require.NoError(t, err)
require.True(t, containEncryptionMetadataHeaders(dstObjInfo.Headers))
require.Equal(t, srcObjInfo.Headers[layer.AttributeDecryptedSize], dstObjInfo.Headers[layer.AttributeDecryptedSize])
}
func containEncryptionMetadataHeaders(headers map[string]string) bool {
for k := range headers {
if _, ok := layer.EncryptionMetadata[k]; ok {
return true
}
}
return false
}
func copyObject(hc *handlerContext, bktName, fromObject, toObject string, copyMeta CopyMeta, statusCode int) { func copyObject(hc *handlerContext, bktName, fromObject, toObject string, copyMeta CopyMeta, statusCode int) {
w, r := prepareTestRequest(hc, bktName, toObject, nil) w, r := prepareTestRequest(hc, bktName, toObject, nil)
r.Header.Set(api.AmzCopySource, bktName+"/"+fromObject) r.Header.Set(api.AmzCopySource, bktName+"/"+fromObject)

View file

@ -11,6 +11,7 @@ import (
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors" apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
apistatus "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client/status" apistatus "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client/status"
oid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object/id" oid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object/id"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws"
@ -427,7 +428,7 @@ func TestDeleteObjectCheckMarkerReturn(t *testing.T) {
func createBucketAndObject(tc *handlerContext, bktName, objName string) (*data.BucketInfo, *data.ObjectInfo) { func createBucketAndObject(tc *handlerContext, bktName, objName string) (*data.BucketInfo, *data.ObjectInfo) {
bktInfo := createTestBucket(tc, bktName) bktInfo := createTestBucket(tc, bktName)
objInfo := createTestObject(tc, bktInfo, objName) objInfo := createTestObject(tc, bktInfo, objName, encryption.Params{})
return bktInfo, objInfo return bktInfo, objInfo
} }
@ -438,7 +439,7 @@ func createVersionedBucketAndObject(t *testing.T, tc *handlerContext, bktName, o
require.NoError(t, err) require.NoError(t, err)
putBucketVersioning(t, tc, bktName, true) putBucketVersioning(t, tc, bktName, true)
objInfo := createTestObject(tc, bktInfo, objName) objInfo := createTestObject(tc, bktInfo, objName, encryption.Params{})
return bktInfo, objInfo return bktInfo, objInfo
} }

View file

@ -16,6 +16,7 @@ import (
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/cache" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/cache"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/middleware" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/middleware"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/resolver" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/resolver"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/pkg/service/tree" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/pkg/service/tree"
@ -249,7 +250,7 @@ func createTestBucketWithLock(hc *handlerContext, bktName string, conf *data.Obj
return bktInfo return bktInfo
} }
func createTestObject(hc *handlerContext, bktInfo *data.BucketInfo, objName string) *data.ObjectInfo { func createTestObject(hc *handlerContext, bktInfo *data.BucketInfo, objName string, encryption encryption.Params) *data.ObjectInfo {
content := make([]byte, 1024) content := make([]byte, 1024)
_, err := rand.Read(content) _, err := rand.Read(content)
require.NoError(hc.t, err) require.NoError(hc.t, err)
@ -259,11 +260,12 @@ func createTestObject(hc *handlerContext, bktInfo *data.BucketInfo, objName stri
} }
extObjInfo, err := hc.Layer().PutObject(hc.Context(), &layer.PutObjectParams{ extObjInfo, err := hc.Layer().PutObject(hc.Context(), &layer.PutObjectParams{
BktInfo: bktInfo, BktInfo: bktInfo,
Object: objName, Object: objName,
Size: uint64(len(content)), Size: uint64(len(content)),
Reader: bytes.NewReader(content), Reader: bytes.NewReader(content),
Header: header, Header: header,
Encryption: encryption,
}) })
require.NoError(hc.t, err) require.NoError(hc.t, err)

View file

@ -13,6 +13,7 @@ import (
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors" apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/middleware" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/middleware"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -426,7 +427,7 @@ func TestObjectLegalHold(t *testing.T) {
bktInfo := createTestBucketWithLock(hc, bktName, nil) bktInfo := createTestBucketWithLock(hc, bktName, nil)
objName := "obj-for-legal-hold" objName := "obj-for-legal-hold"
createTestObject(hc, bktInfo, objName) createTestObject(hc, bktInfo, objName, encryption.Params{})
getObjectLegalHold(hc, bktName, objName, legalHoldOff) getObjectLegalHold(hc, bktName, objName, legalHoldOff)
@ -470,7 +471,7 @@ func TestObjectRetention(t *testing.T) {
bktInfo := createTestBucketWithLock(hc, bktName, nil) bktInfo := createTestBucketWithLock(hc, bktName, nil)
objName := "obj-for-retention" objName := "obj-for-retention"
createTestObject(hc, bktInfo, objName) createTestObject(hc, bktInfo, objName, encryption.Params{})
getObjectRetention(hc, bktName, objName, nil, apiErrors.ErrNoSuchKey) getObjectRetention(hc, bktName, objName, nil, apiErrors.ErrNoSuchKey)

View file

@ -345,6 +345,17 @@ func (h *handler) UploadPartCopy(w http.ResponseWriter, r *http.Request) {
return return
} }
srcEncryptionParams, err := formCopySourceEncryptionParams(r)
if err != nil {
h.logAndSendError(w, "invalid sse headers", reqInfo, err)
return
}
if err = srcEncryptionParams.MatchObjectEncryption(layer.FormEncryptionInfo(srcInfo.Headers)); err != nil {
h.logAndSendError(w, "encryption doesn't match object", reqInfo, fmt.Errorf("%w: %s", errors.GetAPIError(errors.ErrBadRequest), err), additional...)
return
}
p := &layer.UploadCopyParams{ p := &layer.UploadCopyParams{
Versioned: headPrm.Versioned(), Versioned: headPrm.Versioned(),
Info: &layer.UploadInfoParams{ Info: &layer.UploadInfoParams{
@ -352,10 +363,11 @@ func (h *handler) UploadPartCopy(w http.ResponseWriter, r *http.Request) {
Bkt: bktInfo, Bkt: bktInfo,
Key: reqInfo.ObjectName, Key: reqInfo.ObjectName,
}, },
SrcObjInfo: srcInfo, SrcObjInfo: srcInfo,
SrcBktInfo: srcBktInfo, SrcBktInfo: srcBktInfo,
PartNumber: partNumber, SrcEncryption: srcEncryptionParams,
Range: srcRange, PartNumber: partNumber,
Range: srcRange,
} }
p.Info.Encryption, err = formEncryptionParams(r) p.Info.Encryption, err = formEncryptionParams(r)
@ -364,11 +376,6 @@ func (h *handler) UploadPartCopy(w http.ResponseWriter, r *http.Request) {
return return
} }
if err = p.Info.Encryption.MatchObjectEncryption(layer.FormEncryptionInfo(srcInfo.Headers)); err != nil {
h.logAndSendError(w, "encryption doesn't match object", reqInfo, fmt.Errorf("%w: %s", errors.GetAPIError(errors.ErrBadRequest), err), additional...)
return
}
info, err := h.obj.UploadPartCopy(ctx, p) info, err := h.obj.UploadPartCopy(ctx, p)
if err != nil { if err != nil {
h.logAndSendError(w, "could not upload part copy", reqInfo, err, additional...) h.logAndSendError(w, "could not upload part copy", reqInfo, err, additional...)

View file

@ -2,6 +2,8 @@ package handler
import ( import (
"crypto/md5" "crypto/md5"
"crypto/tls"
"encoding/base64"
"encoding/hex" "encoding/hex"
"encoding/xml" "encoding/xml"
"fmt" "fmt"
@ -13,6 +15,7 @@ import (
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api"
s3Errors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors" s3Errors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -183,6 +186,51 @@ func TestMultipartUploadSize(t *testing.T) {
uploadPartCopy(hc, bktName, objName2, uploadInfo.UploadID, 1, sourceCopy, 0, 0) uploadPartCopy(hc, bktName, objName2, uploadInfo.UploadID, 1, sourceCopy, 0, 0)
uploadPartCopy(hc, bktName, objName2, uploadInfo.UploadID, 2, sourceCopy, 0, partSize) uploadPartCopy(hc, bktName, objName2, uploadInfo.UploadID, 2, sourceCopy, 0, partSize)
}) })
t.Run("check correct size when copy part from encrypted source", func(t *testing.T) {
newBucket, newObjName := "new-bucket", "new-object-multipart"
bktInfo := createTestBucket(hc, newBucket)
srcObjName := "source-object"
key := []byte("firstencriptionkeyofsourceobject")
keyMd5 := md5.Sum(key)
srcEnc, err := encryption.NewParams(key)
require.NoError(t, err)
srcObjInfo := createTestObject(hc, bktInfo, srcObjName, *srcEnc)
multipartInfo := createMultipartUpload(hc, newBucket, newObjName, headers)
sourceCopy := newBucket + "/" + srcObjName
query := make(url.Values)
query.Set(uploadIDQuery, multipartInfo.UploadID)
query.Set(partNumberQuery, "1")
// empty copy-source-sse headers
w, r := prepareTestRequestWithQuery(hc, newBucket, newObjName, query, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, sourceCopy)
hc.Handler().UploadPartCopy(w, r)
assertStatus(t, w, http.StatusBadRequest)
// success copy
w, r = prepareTestRequestWithQuery(hc, newBucket, newObjName, query, nil)
r.TLS = &tls.ConnectionState{}
r.Header.Set(api.AmzCopySource, sourceCopy)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm, layer.AESEncryptionAlgorithm)
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKey, base64.StdEncoding.EncodeToString(key))
r.Header.Set(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5, base64.StdEncoding.EncodeToString(keyMd5[:]))
hc.Handler().UploadPartCopy(w, r)
uploadPartCopyResponse := &UploadPartCopyResponse{}
readResponse(hc.t, w, http.StatusOK, uploadPartCopyResponse)
completeMultipartUpload(hc, newBucket, newObjName, multipartInfo.UploadID, []string{uploadPartCopyResponse.ETag})
attr := getObjectAttributes(hc, newBucket, newObjName, objectParts)
require.Equal(t, 1, attr.ObjectParts.PartsCount)
require.Equal(t, srcObjInfo.Headers[layer.AttributeDecryptedSize], strconv.Itoa(attr.ObjectParts.Parts[0].Size))
})
} }
func TestListParts(t *testing.T) { func TestListParts(t *testing.T) {

View file

@ -8,6 +8,7 @@ import (
"testing" "testing"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -95,7 +96,7 @@ func TestS3CompatibilityBucketListV2BothContinuationTokenStartAfter(t *testing.T
bktInfo, _ := createBucketAndObject(tc, bktName, objects[0]) bktInfo, _ := createBucketAndObject(tc, bktName, objects[0])
for _, objName := range objects[1:] { for _, objName := range objects[1:] {
createTestObject(tc, bktInfo, objName) createTestObject(tc, bktInfo, objName, encryption.Params{})
} }
listV2Response1 := listObjectsV2(tc, bktName, "", "", "bar", "", 1) listV2Response1 := listObjectsV2(tc, bktName, "", "", "bar", "", 1)
@ -120,7 +121,7 @@ func TestS3BucketListV2EncodingBasic(t *testing.T) {
objects := []string{"foo+1/bar", "foo/bar/xyzzy", "quux ab/thud", "asdf+b"} objects := []string{"foo+1/bar", "foo/bar/xyzzy", "quux ab/thud", "asdf+b"}
for _, objName := range objects { for _, objName := range objects {
createTestObject(hc, bktInfo, objName) createTestObject(hc, bktInfo, objName, encryption.Params{})
} }
query := make(url.Values) query := make(url.Values)
@ -150,7 +151,7 @@ func TestS3BucketListDelimiterBasic(t *testing.T) {
bktInfo, _ := createBucketAndObject(tc, bktName, objects[0]) bktInfo, _ := createBucketAndObject(tc, bktName, objects[0])
for _, objName := range objects[1:] { for _, objName := range objects[1:] {
createTestObject(tc, bktInfo, objName) createTestObject(tc, bktInfo, objName, encryption.Params{})
} }
listV1Response := listObjectsV1(tc, bktName, "", "/", "", -1) listV1Response := listObjectsV1(tc, bktName, "", "/", "", -1)
@ -169,7 +170,7 @@ func TestS3BucketListV2DelimiterPercentage(t *testing.T) {
bktInfo, _ := createBucketAndObject(tc, bktName, objects[0]) bktInfo, _ := createBucketAndObject(tc, bktName, objects[0])
for _, objName := range objects[1:] { for _, objName := range objects[1:] {
createTestObject(tc, bktInfo, objName) createTestObject(tc, bktInfo, objName, encryption.Params{})
} }
listV2Response := listObjectsV2(tc, bktName, "", "%", "", "", -1) listV2Response := listObjectsV2(tc, bktName, "", "%", "", "", -1)
@ -189,7 +190,7 @@ func TestS3BucketListV2DelimiterPrefix(t *testing.T) {
bktInfo, _ := createBucketAndObject(tc, bktName, objects[0]) bktInfo, _ := createBucketAndObject(tc, bktName, objects[0])
for _, objName := range objects[1:] { for _, objName := range objects[1:] {
createTestObject(tc, bktInfo, objName) createTestObject(tc, bktInfo, objName, encryption.Params{})
} }
var empty []string var empty []string

View file

@ -6,7 +6,6 @@ import (
"encoding/base64" "encoding/base64"
"encoding/json" "encoding/json"
"encoding/xml" "encoding/xml"
errorsStd "errors"
"fmt" "fmt"
"io" "io"
"net" "net"
@ -376,16 +375,38 @@ func (h *handler) getBodyReader(r *http.Request) (io.ReadCloser, error) {
} }
func formEncryptionParams(r *http.Request) (enc encryption.Params, err error) { func formEncryptionParams(r *http.Request) (enc encryption.Params, err error) {
sseCustomerAlgorithm := r.Header.Get(api.AmzServerSideEncryptionCustomerAlgorithm) return formEncryptionParamsBase(r, false)
sseCustomerKey := r.Header.Get(api.AmzServerSideEncryptionCustomerKey) }
sseCustomerKeyMD5 := r.Header.Get(api.AmzServerSideEncryptionCustomerKeyMD5)
func formCopySourceEncryptionParams(r *http.Request) (enc encryption.Params, err error) {
return formEncryptionParamsBase(r, true)
}
func formEncryptionParamsBase(r *http.Request, isCopySource bool) (enc encryption.Params, err error) {
var sseCustomerAlgorithm, sseCustomerKey, sseCustomerKeyMD5 string
if isCopySource {
sseCustomerAlgorithm = r.Header.Get(api.AmzCopySourceServerSideEncryptionCustomerAlgorithm)
sseCustomerKey = r.Header.Get(api.AmzCopySourceServerSideEncryptionCustomerKey)
sseCustomerKeyMD5 = r.Header.Get(api.AmzCopySourceServerSideEncryptionCustomerKeyMD5)
} else {
sseCustomerAlgorithm = r.Header.Get(api.AmzServerSideEncryptionCustomerAlgorithm)
sseCustomerKey = r.Header.Get(api.AmzServerSideEncryptionCustomerKey)
sseCustomerKeyMD5 = r.Header.Get(api.AmzServerSideEncryptionCustomerKeyMD5)
}
if len(sseCustomerAlgorithm) == 0 && len(sseCustomerKey) == 0 && len(sseCustomerKeyMD5) == 0 { if len(sseCustomerAlgorithm) == 0 && len(sseCustomerKey) == 0 && len(sseCustomerKeyMD5) == 0 {
return return
} }
if r.TLS == nil { if r.TLS == nil {
return enc, errorsStd.New("encryption available only when TLS is enabled") return enc, errors.GetAPIError(errors.ErrInsecureSSECustomerRequest)
}
if len(sseCustomerKey) > 0 && len(sseCustomerAlgorithm) == 0 {
return enc, errors.GetAPIError(errors.ErrMissingSSECustomerAlgorithm)
}
if len(sseCustomerAlgorithm) > 0 && len(sseCustomerKey) == 0 {
return enc, errors.GetAPIError(errors.ErrMissingSSECustomerKey)
} }
if sseCustomerAlgorithm != layer.AESEncryptionAlgorithm { if sseCustomerAlgorithm != layer.AESEncryptionAlgorithm {
@ -394,10 +415,16 @@ func formEncryptionParams(r *http.Request) (enc encryption.Params, err error) {
key, err := base64.StdEncoding.DecodeString(sseCustomerKey) key, err := base64.StdEncoding.DecodeString(sseCustomerKey)
if err != nil { if err != nil {
if isCopySource {
return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerParameters)
}
return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerKey) return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerKey)
} }
if len(key) != layer.AESKeySize { if len(key) != layer.AESKeySize {
if isCopySource {
return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerParameters)
}
return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerKey) return enc, errors.GetAPIError(errors.ErrInvalidSSECustomerKey)
} }

View file

@ -67,6 +67,10 @@ const (
AmzServerSideEncryptionCustomerKey = "x-amz-server-side-encryption-customer-key" AmzServerSideEncryptionCustomerKey = "x-amz-server-side-encryption-customer-key"
AmzServerSideEncryptionCustomerKeyMD5 = "x-amz-server-side-encryption-customer-key-MD5" AmzServerSideEncryptionCustomerKeyMD5 = "x-amz-server-side-encryption-customer-key-MD5"
AmzCopySourceServerSideEncryptionCustomerAlgorithm = "x-amz-copy-source-server-side-encryption-customer-algorithm"
AmzCopySourceServerSideEncryptionCustomerKey = "x-amz-copy-source-server-side-encryption-customer-key"
AmzCopySourceServerSideEncryptionCustomerKeyMD5 = "x-amz-copy-source-server-side-encryption-customer-key-MD5"
OwnerID = "X-Owner-Id" OwnerID = "X-Owner-Id"
ContainerID = "X-Container-Id" ContainerID = "X-Container-Id"
ContainerName = "X-Container-Name" ContainerName = "X-Container-Name"

View file

@ -10,6 +10,7 @@ import (
"fmt" "fmt"
"io" "io"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"github.com/minio/sio" "github.com/minio/sio"
) )
@ -100,8 +101,11 @@ func (p Params) HMAC() ([]byte, []byte, error) {
// MatchObjectEncryption checks if encryption params are valid for provided object. // MatchObjectEncryption checks if encryption params are valid for provided object.
func (p Params) MatchObjectEncryption(encInfo ObjectEncryption) error { func (p Params) MatchObjectEncryption(encInfo ObjectEncryption) error {
if p.Enabled() != encInfo.Enabled { if p.Enabled() && !encInfo.Enabled {
return errorsStd.New("invalid encryption view") return errors.GetAPIError(errors.ErrInvalidEncryptionParameters)
}
if !p.Enabled() && encInfo.Enabled {
return errors.GetAPIError(errors.ErrSSEEncryptedObject)
} }
if !encInfo.Enabled { if !encInfo.Enabled {
@ -122,7 +126,7 @@ func (p Params) MatchObjectEncryption(encInfo ObjectEncryption) error {
mac.Write(hmacSalt) mac.Write(hmacSalt)
expectedHmacKey := mac.Sum(nil) expectedHmacKey := mac.Sum(nil)
if !bytes.Equal(expectedHmacKey, hmacKey) { if !bytes.Equal(expectedHmacKey, hmacKey) {
return errorsStd.New("mismatched hmac key") return errors.GetAPIError(errors.ErrInvalidSSECustomerParameters)
} }
return nil return nil

View file

@ -160,11 +160,12 @@ type (
ScrBktInfo *data.BucketInfo ScrBktInfo *data.BucketInfo
DstBktInfo *data.BucketInfo DstBktInfo *data.BucketInfo
DstObject string DstObject string
SrcSize uint64 DstSize uint64
Header map[string]string Header map[string]string
Range *RangeParams Range *RangeParams
Lock *data.ObjectLock Lock *data.ObjectLock
Encryption encryption.Params SrcEncryption encryption.Params
DstEncryption encryption.Params
CopiesNumbers []uint32 CopiesNumbers []uint32
} }
// CreateBucketParams stores bucket create request parameters. // CreateBucketParams stores bucket create request parameters.
@ -291,6 +292,13 @@ const (
AttributeFrostfsCopiesNumber = "frostfs-copies-number" // such format to match X-Amz-Meta-Frostfs-Copies-Number header AttributeFrostfsCopiesNumber = "frostfs-copies-number" // such format to match X-Amz-Meta-Frostfs-Copies-Number header
) )
var EncryptionMetadata = map[string]struct{}{
AttributeEncryptionAlgorithm: {},
AttributeDecryptedSize: {},
AttributeHMACSalt: {},
AttributeHMACKey: {},
}
func (t *VersionedObject) String() string { func (t *VersionedObject) String() string {
return t.Name + ":" + t.VersionID return t.Name + ":" + t.VersionID
} }
@ -583,7 +591,7 @@ func (n *layer) CopyObject(ctx context.Context, p *CopyObjectParams) (*data.Exte
Versioned: p.SrcVersioned, Versioned: p.SrcVersioned,
Range: p.Range, Range: p.Range,
BucketInfo: p.ScrBktInfo, BucketInfo: p.ScrBktInfo,
Encryption: p.Encryption, Encryption: p.SrcEncryption,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("get object to copy: %w", err) return nil, fmt.Errorf("get object to copy: %w", err)
@ -592,10 +600,10 @@ func (n *layer) CopyObject(ctx context.Context, p *CopyObjectParams) (*data.Exte
return n.PutObject(ctx, &PutObjectParams{ return n.PutObject(ctx, &PutObjectParams{
BktInfo: p.DstBktInfo, BktInfo: p.DstBktInfo,
Object: p.DstObject, Object: p.DstObject,
Size: p.SrcSize, Size: p.DstSize,
Reader: objPayload, Reader: objPayload,
Header: p.Header, Header: p.Header,
Encryption: p.Encryption, Encryption: p.DstEncryption,
CopiesNumbers: p.CopiesNumbers, CopiesNumbers: p.CopiesNumbers,
}) })
} }

View file

@ -74,12 +74,13 @@ type (
} }
UploadCopyParams struct { UploadCopyParams struct {
Versioned bool Versioned bool
Info *UploadInfoParams Info *UploadInfoParams
SrcObjInfo *data.ObjectInfo SrcObjInfo *data.ObjectInfo
SrcBktInfo *data.BucketInfo SrcBktInfo *data.BucketInfo
PartNumber int SrcEncryption encryption.Params
Range *RangeParams PartNumber int
Range *RangeParams
} }
CompleteMultipartParams struct { CompleteMultipartParams struct {
@ -316,6 +317,7 @@ func (n *layer) UploadPartCopy(ctx context.Context, p *UploadCopyParams) (*data.
if objSize, err := GetObjectSize(p.SrcObjInfo); err == nil { if objSize, err := GetObjectSize(p.SrcObjInfo); err == nil {
srcObjectSize = objSize srcObjectSize = objSize
size = objSize
} }
if p.Range != nil { if p.Range != nil {
@ -333,6 +335,7 @@ func (n *layer) UploadPartCopy(ctx context.Context, p *UploadCopyParams) (*data.
Versioned: p.Versioned, Versioned: p.Versioned,
Range: p.Range, Range: p.Range,
BucketInfo: p.SrcBktInfo, BucketInfo: p.SrcBktInfo,
Encryption: p.SrcEncryption,
}) })
if err != nil { if err != nil {
return nil, fmt.Errorf("get object to upload copy: %w", err) return nil, fmt.Errorf("get object to upload copy: %w", err)