2021-11-25 15:05:58 +00:00
|
|
|
package layer
|
|
|
|
|
|
|
|
import (
|
2023-06-27 12:49:20 +00:00
|
|
|
"bytes"
|
2021-11-25 15:05:58 +00:00
|
|
|
"context"
|
2023-10-02 08:52:07 +00:00
|
|
|
"crypto/md5"
|
|
|
|
"encoding/base64"
|
2022-05-24 11:30:37 +00:00
|
|
|
"encoding/hex"
|
2023-06-27 12:49:20 +00:00
|
|
|
"encoding/json"
|
2023-06-30 09:03:55 +00:00
|
|
|
"errors"
|
2022-03-02 16:09:02 +00:00
|
|
|
"fmt"
|
2021-11-25 15:05:58 +00:00
|
|
|
"io"
|
|
|
|
"sort"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"time"
|
|
|
|
|
2023-11-13 08:01:47 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/auth"
|
2023-03-07 14:38:08 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
|
2023-06-30 09:03:55 +00:00
|
|
|
s3errors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
|
2023-03-07 14:38:08 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/encryption"
|
2024-09-27 08:14:45 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/frostfs"
|
2024-09-27 08:13:13 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/layer/tree"
|
2023-08-23 11:07:52 +00:00
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/internal/logs"
|
2023-03-07 14:38:08 +00:00
|
|
|
oid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object/id"
|
|
|
|
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/user"
|
2022-08-01 16:52:09 +00:00
|
|
|
"github.com/minio/sio"
|
2021-11-25 15:05:58 +00:00
|
|
|
"go.uber.org/zap"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
UploadIDAttributeName = "S3-Upload-Id"
|
|
|
|
UploadPartNumberAttributeName = "S3-Upload-Part-Number"
|
2022-06-02 15:26:26 +00:00
|
|
|
UploadCompletedParts = "S3-Completed-Parts"
|
2023-07-12 09:08:56 +00:00
|
|
|
|
|
|
|
// MultipartObjectSize contains the real object size if object is combined (payload contains list of parts).
|
|
|
|
// This header is used to determine if object is combined.
|
|
|
|
MultipartObjectSize = "S3-Multipart-Object-Size"
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2022-05-23 14:34:13 +00:00
|
|
|
metaPrefix = "meta-"
|
|
|
|
|
2021-11-25 15:05:58 +00:00
|
|
|
MaxSizeUploadsList = 1000
|
|
|
|
MaxSizePartsList = 1000
|
|
|
|
UploadMinPartNumber = 1
|
|
|
|
UploadMaxPartNumber = 10000
|
2023-07-12 09:08:56 +00:00
|
|
|
UploadMinSize = 5 * 1024 * 1024 // 5MB
|
|
|
|
UploadMaxSize = 1024 * UploadMinSize // 5GB
|
2021-11-25 15:05:58 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
type (
|
|
|
|
UploadInfoParams struct {
|
2022-08-01 16:52:09 +00:00
|
|
|
UploadID string
|
|
|
|
Bkt *data.BucketInfo
|
|
|
|
Key string
|
2022-08-11 08:48:58 +00:00
|
|
|
Encryption encryption.Params
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2022-05-23 14:34:13 +00:00
|
|
|
CreateMultipartParams struct {
|
2023-04-24 23:49:12 +00:00
|
|
|
Info *UploadInfoParams
|
|
|
|
Header map[string]string
|
|
|
|
Data *UploadData
|
|
|
|
CopiesNumbers []uint32
|
2022-05-24 14:55:56 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
UploadData struct {
|
2024-05-28 12:50:34 +00:00
|
|
|
TagSet map[string]string
|
2022-05-23 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2021-11-25 15:05:58 +00:00
|
|
|
UploadPartParams struct {
|
2023-11-13 08:01:47 +00:00
|
|
|
Info *UploadInfoParams
|
|
|
|
PartNumber int
|
|
|
|
Size uint64
|
|
|
|
Reader io.Reader
|
|
|
|
ContentMD5 string
|
|
|
|
ContentSHA256Hash string
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
UploadCopyParams struct {
|
2023-10-19 14:22:26 +00:00
|
|
|
Versioned bool
|
|
|
|
Info *UploadInfoParams
|
|
|
|
SrcObjInfo *data.ObjectInfo
|
|
|
|
SrcBktInfo *data.BucketInfo
|
|
|
|
SrcEncryption encryption.Params
|
|
|
|
PartNumber int
|
|
|
|
Range *RangeParams
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
CompleteMultipartParams struct {
|
|
|
|
Info *UploadInfoParams
|
|
|
|
Parts []*CompletedPart
|
|
|
|
}
|
|
|
|
|
|
|
|
CompletedPart struct {
|
|
|
|
ETag string
|
|
|
|
PartNumber int
|
|
|
|
}
|
|
|
|
|
2022-08-01 16:52:09 +00:00
|
|
|
EncryptedPart struct {
|
|
|
|
Part
|
|
|
|
EncryptedSize int64
|
|
|
|
}
|
|
|
|
|
2021-11-25 15:05:58 +00:00
|
|
|
Part struct {
|
|
|
|
ETag string
|
|
|
|
LastModified string
|
|
|
|
PartNumber int
|
2023-06-01 13:45:28 +00:00
|
|
|
Size uint64
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ListMultipartUploadsParams struct {
|
|
|
|
Bkt *data.BucketInfo
|
|
|
|
Delimiter string
|
|
|
|
EncodingType string
|
|
|
|
KeyMarker string
|
|
|
|
MaxUploads int
|
|
|
|
Prefix string
|
|
|
|
UploadIDMarker string
|
|
|
|
}
|
|
|
|
|
|
|
|
ListPartsParams struct {
|
|
|
|
Info *UploadInfoParams
|
|
|
|
MaxParts int
|
|
|
|
PartNumberMarker int
|
|
|
|
}
|
|
|
|
|
|
|
|
ListPartsInfo struct {
|
|
|
|
Parts []*Part
|
2022-05-25 17:25:43 +00:00
|
|
|
Owner user.ID
|
2021-11-25 15:05:58 +00:00
|
|
|
NextPartNumberMarker int
|
|
|
|
IsTruncated bool
|
|
|
|
}
|
|
|
|
|
|
|
|
ListMultipartUploadsInfo struct {
|
|
|
|
Prefixes []string
|
|
|
|
Uploads []*UploadInfo
|
|
|
|
IsTruncated bool
|
|
|
|
NextKeyMarker string
|
|
|
|
NextUploadIDMarker string
|
|
|
|
}
|
|
|
|
UploadInfo struct {
|
|
|
|
IsDir bool
|
|
|
|
Key string
|
|
|
|
UploadID string
|
2022-05-25 17:25:43 +00:00
|
|
|
Owner user.ID
|
2021-11-25 15:05:58 +00:00
|
|
|
Created time.Time
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) CreateMultipartUpload(ctx context.Context, p *CreateMultipartParams) error {
|
2022-05-24 14:55:56 +00:00
|
|
|
metaSize := len(p.Header)
|
|
|
|
if p.Data != nil {
|
|
|
|
metaSize += len(p.Data.TagSet)
|
|
|
|
}
|
|
|
|
|
2024-09-05 12:36:40 +00:00
|
|
|
networkInfo, err := n.GetNetworkInfo(ctx)
|
2024-07-22 09:00:17 +00:00
|
|
|
if err != nil {
|
2024-09-05 12:36:40 +00:00
|
|
|
return err
|
2024-07-22 09:00:17 +00:00
|
|
|
}
|
|
|
|
|
2022-05-23 14:34:13 +00:00
|
|
|
info := &data.MultipartInfo{
|
2023-04-24 23:49:12 +00:00
|
|
|
Key: p.Info.Key,
|
|
|
|
UploadID: p.Info.UploadID,
|
2023-08-03 12:08:22 +00:00
|
|
|
Owner: n.gateOwner,
|
2023-04-24 23:49:12 +00:00
|
|
|
Created: TimeNow(ctx),
|
|
|
|
Meta: make(map[string]string, metaSize),
|
|
|
|
CopiesNumbers: p.CopiesNumbers,
|
2024-07-22 09:00:17 +00:00
|
|
|
CreationEpoch: networkInfo.CurrentEpoch(),
|
2022-05-23 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for key, val := range p.Header {
|
|
|
|
info.Meta[metaPrefix+key] = val
|
|
|
|
}
|
|
|
|
|
2022-05-24 14:55:56 +00:00
|
|
|
if p.Data != nil {
|
|
|
|
for key, val := range p.Data.TagSet {
|
|
|
|
info.Meta[tagPrefix+key] = val
|
|
|
|
}
|
2022-05-23 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2022-08-01 16:52:09 +00:00
|
|
|
if p.Info.Encryption.Enabled() {
|
|
|
|
if err := addEncryptionHeaders(info.Meta, p.Info.Encryption); err != nil {
|
|
|
|
return fmt.Errorf("add encryption header: %w", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-09-13 09:44:18 +00:00
|
|
|
return n.treeService.CreateMultipartUpload(ctx, p.Info.Bkt, info)
|
2022-05-23 14:34:13 +00:00
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) UploadPart(ctx context.Context, p *UploadPartParams) (string, error) {
|
2022-09-13 09:44:18 +00:00
|
|
|
multipartInfo, err := n.treeService.GetMultipartUpload(ctx, p.Info.Bkt, p.Info.Key, p.Info.UploadID)
|
2022-05-24 11:30:37 +00:00
|
|
|
if err != nil {
|
2024-09-27 08:13:13 +00:00
|
|
|
if errors.Is(err, tree.ErrNodeNotFound) {
|
2023-06-30 09:03:55 +00:00
|
|
|
return "", fmt.Errorf("%w: %s", s3errors.GetAPIError(s3errors.ErrNoSuchUpload), err.Error())
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2022-05-24 11:30:37 +00:00
|
|
|
return "", err
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2023-07-12 09:08:56 +00:00
|
|
|
if p.Size > UploadMaxSize {
|
|
|
|
return "", fmt.Errorf("%w: %d/%d", s3errors.GetAPIError(s3errors.ErrEntityTooLarge), p.Size, UploadMaxSize)
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
objInfo, err := n.uploadPart(ctx, multipartInfo, p)
|
|
|
|
if err != nil {
|
|
|
|
return "", err
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2023-10-02 08:52:07 +00:00
|
|
|
return objInfo.ETag(n.features.MD5Enabled()), nil
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) uploadPart(ctx context.Context, multipartInfo *data.MultipartInfo, p *UploadPartParams) (*data.ObjectInfo, error) {
|
2022-08-11 08:48:58 +00:00
|
|
|
encInfo := FormEncryptionInfo(multipartInfo.Meta)
|
2022-08-01 16:52:09 +00:00
|
|
|
if err := p.Info.Encryption.MatchObjectEncryption(encInfo); err != nil {
|
2023-08-23 11:07:52 +00:00
|
|
|
n.reqLogger(ctx).Warn(logs.MismatchedObjEncryptionInfo, zap.Error(err))
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrInvalidEncryptionParameters)
|
2022-08-01 16:52:09 +00:00
|
|
|
}
|
|
|
|
|
2022-05-24 11:30:37 +00:00
|
|
|
bktInfo := p.Info.Bkt
|
2024-09-27 08:14:45 +00:00
|
|
|
prm := frostfs.PrmObjectCreate{
|
2022-08-11 22:48:56 +00:00
|
|
|
Container: bktInfo.CID,
|
|
|
|
Attributes: make([][2]string, 2),
|
|
|
|
Payload: p.Reader,
|
2022-11-08 09:12:55 +00:00
|
|
|
CreationTime: TimeNow(ctx),
|
2023-04-24 23:49:12 +00:00
|
|
|
CopiesNumber: multipartInfo.CopiesNumbers,
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2022-08-01 16:52:09 +00:00
|
|
|
decSize := p.Size
|
|
|
|
if p.Info.Encryption.Enabled() {
|
2023-06-01 13:45:28 +00:00
|
|
|
r, encSize, err := encryptionReader(p.Reader, p.Size, p.Info.Encryption.Key())
|
2022-08-01 16:52:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("failed to create ecnrypted reader: %w", err)
|
|
|
|
}
|
2023-06-01 13:45:28 +00:00
|
|
|
prm.Attributes = append(prm.Attributes, [2]string{AttributeDecryptedSize, strconv.FormatUint(p.Size, 10)})
|
2022-08-01 16:52:09 +00:00
|
|
|
prm.Payload = r
|
2023-06-01 13:45:28 +00:00
|
|
|
p.Size = encSize
|
2022-08-01 16:52:09 +00:00
|
|
|
}
|
|
|
|
|
2022-05-24 11:30:37 +00:00
|
|
|
prm.Attributes[0][0], prm.Attributes[0][1] = UploadIDAttributeName, p.Info.UploadID
|
|
|
|
prm.Attributes[1][0], prm.Attributes[1][1] = UploadPartNumberAttributeName, strconv.Itoa(p.PartNumber)
|
|
|
|
|
2024-07-22 09:00:17 +00:00
|
|
|
createdObj, err := n.objectPutAndHash(ctx, prm, bktInfo)
|
2022-05-24 11:30:37 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2023-10-02 08:52:07 +00:00
|
|
|
if len(p.ContentMD5) > 0 {
|
|
|
|
hashBytes, err := base64.StdEncoding.DecodeString(p.ContentMD5)
|
|
|
|
if err != nil {
|
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrInvalidDigest)
|
|
|
|
}
|
2024-07-22 09:00:17 +00:00
|
|
|
if hex.EncodeToString(hashBytes) != hex.EncodeToString(createdObj.MD5Sum) {
|
2024-09-27 08:14:45 +00:00
|
|
|
prm := frostfs.PrmObjectDelete{
|
2024-07-22 09:00:17 +00:00
|
|
|
Object: createdObj.ID,
|
2023-10-02 08:52:07 +00:00
|
|
|
Container: bktInfo.CID,
|
|
|
|
}
|
|
|
|
n.prepareAuthParameters(ctx, &prm.PrmAuth, bktInfo.Owner)
|
|
|
|
err = n.frostFS.DeleteObject(ctx, prm)
|
|
|
|
if err != nil {
|
2024-07-22 09:00:17 +00:00
|
|
|
n.reqLogger(ctx).Debug(logs.FailedToDeleteObject, zap.Stringer("cid", bktInfo.CID), zap.Stringer("oid", createdObj.ID))
|
2023-10-02 08:52:07 +00:00
|
|
|
}
|
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrInvalidDigest)
|
|
|
|
}
|
|
|
|
}
|
2023-06-01 13:45:28 +00:00
|
|
|
if p.Info.Encryption.Enabled() {
|
2024-07-22 09:00:17 +00:00
|
|
|
createdObj.Size = decSize
|
2023-06-01 13:45:28 +00:00
|
|
|
}
|
2022-05-24 11:30:37 +00:00
|
|
|
|
2023-11-13 08:01:47 +00:00
|
|
|
if !p.Info.Encryption.Enabled() && len(p.ContentSHA256Hash) > 0 && !auth.IsStandardContentSHA256(p.ContentSHA256Hash) {
|
|
|
|
contentHashBytes, err := hex.DecodeString(p.ContentSHA256Hash)
|
|
|
|
if err != nil {
|
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrContentSHA256Mismatch)
|
|
|
|
}
|
2024-07-22 09:00:17 +00:00
|
|
|
if !bytes.Equal(contentHashBytes, createdObj.HashSum) {
|
|
|
|
err = n.objectDelete(ctx, bktInfo, createdObj.ID)
|
2023-11-13 08:01:47 +00:00
|
|
|
if err != nil {
|
2024-07-22 09:00:17 +00:00
|
|
|
n.reqLogger(ctx).Debug(logs.FailedToDeleteObject, zap.Stringer("cid", bktInfo.CID), zap.Stringer("oid", createdObj.ID))
|
2023-11-13 08:01:47 +00:00
|
|
|
}
|
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrContentSHA256Mismatch)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-23 11:07:52 +00:00
|
|
|
n.reqLogger(ctx).Debug(logs.UploadPart,
|
2023-06-09 13:19:23 +00:00
|
|
|
zap.String("multipart upload", p.Info.UploadID), zap.Int("part number", p.PartNumber),
|
2024-07-22 09:00:17 +00:00
|
|
|
zap.Stringer("cid", bktInfo.CID), zap.Stringer("oid", createdObj.ID))
|
2022-12-09 13:30:51 +00:00
|
|
|
|
2022-05-24 11:30:37 +00:00
|
|
|
partInfo := &data.PartInfo{
|
|
|
|
Key: p.Info.Key,
|
|
|
|
UploadID: p.Info.UploadID,
|
|
|
|
Number: p.PartNumber,
|
2024-07-22 09:00:17 +00:00
|
|
|
OID: createdObj.ID,
|
|
|
|
Size: createdObj.Size,
|
|
|
|
ETag: hex.EncodeToString(createdObj.HashSum),
|
2022-11-08 09:12:55 +00:00
|
|
|
Created: prm.CreationTime,
|
2024-07-22 09:00:17 +00:00
|
|
|
MD5: hex.EncodeToString(createdObj.MD5Sum),
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
|
|
|
|
2024-07-18 13:40:55 +00:00
|
|
|
oldPartIDs, err := n.treeService.AddPart(ctx, bktInfo, multipartInfo.ID, partInfo)
|
2024-09-27 08:13:13 +00:00
|
|
|
oldPartIDNotFound := errors.Is(err, tree.ErrNoNodeToRemove)
|
2022-06-27 09:33:36 +00:00
|
|
|
if err != nil && !oldPartIDNotFound {
|
2022-05-24 11:30:37 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
2022-06-27 09:33:36 +00:00
|
|
|
if !oldPartIDNotFound {
|
2024-07-18 13:40:55 +00:00
|
|
|
for _, oldPartID := range oldPartIDs {
|
|
|
|
if err = n.objectDelete(ctx, bktInfo, oldPartID); err != nil {
|
|
|
|
n.reqLogger(ctx).Error(logs.CouldntDeleteOldPartObject, zap.Error(err),
|
|
|
|
zap.String("cid", bktInfo.CID.EncodeToString()),
|
|
|
|
zap.String("oid", oldPartID.EncodeToString()))
|
|
|
|
}
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
objInfo := &data.ObjectInfo{
|
2024-07-22 09:00:17 +00:00
|
|
|
ID: createdObj.ID,
|
2022-05-24 11:30:37 +00:00
|
|
|
CID: bktInfo.CID,
|
|
|
|
|
|
|
|
Owner: bktInfo.Owner,
|
|
|
|
Bucket: bktInfo.Name,
|
2022-05-31 12:38:06 +00:00
|
|
|
Size: partInfo.Size,
|
|
|
|
Created: partInfo.Created,
|
|
|
|
HashSum: partInfo.ETag,
|
2023-10-02 08:52:07 +00:00
|
|
|
MD5Sum: partInfo.MD5,
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return objInfo, nil
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) UploadPartCopy(ctx context.Context, p *UploadCopyParams) (*data.ObjectInfo, error) {
|
2022-09-13 09:44:18 +00:00
|
|
|
multipartInfo, err := n.treeService.GetMultipartUpload(ctx, p.Info.Bkt, p.Info.Key, p.Info.UploadID)
|
2022-05-24 11:30:37 +00:00
|
|
|
if err != nil {
|
2024-09-27 08:13:13 +00:00
|
|
|
if errors.Is(err, tree.ErrNodeNotFound) {
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, fmt.Errorf("%w: %s", s3errors.GetAPIError(s3errors.ErrNoSuchUpload), err.Error())
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
2021-11-25 15:05:58 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-05-25 15:59:36 +00:00
|
|
|
size := p.SrcObjInfo.Size
|
2023-08-16 13:54:12 +00:00
|
|
|
srcObjectSize := p.SrcObjInfo.Size
|
|
|
|
|
|
|
|
if objSize, err := GetObjectSize(p.SrcObjInfo); err == nil {
|
|
|
|
srcObjectSize = objSize
|
2023-10-19 14:22:26 +00:00
|
|
|
size = objSize
|
2023-08-16 13:54:12 +00:00
|
|
|
}
|
|
|
|
|
2021-11-25 15:05:58 +00:00
|
|
|
if p.Range != nil {
|
2023-06-01 13:45:28 +00:00
|
|
|
size = p.Range.End - p.Range.Start + 1
|
2023-08-16 13:54:12 +00:00
|
|
|
if p.Range.End > srcObjectSize {
|
|
|
|
return nil, fmt.Errorf("%w: %d-%d/%d", s3errors.GetAPIError(s3errors.ErrInvalidCopyPartRangeSource), p.Range.Start, p.Range.End, srcObjectSize)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2022-05-25 15:59:36 +00:00
|
|
|
}
|
2023-07-12 09:08:56 +00:00
|
|
|
if size > UploadMaxSize {
|
|
|
|
return nil, fmt.Errorf("%w: %d/%d", s3errors.GetAPIError(s3errors.ErrEntityTooLarge), size, UploadMaxSize)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2023-07-06 13:37:53 +00:00
|
|
|
objPayload, err := n.GetObject(ctx, &GetObjectParams{
|
|
|
|
ObjectInfo: p.SrcObjInfo,
|
|
|
|
Versioned: p.Versioned,
|
|
|
|
Range: p.Range,
|
|
|
|
BucketInfo: p.SrcBktInfo,
|
2023-10-19 14:22:26 +00:00
|
|
|
Encryption: p.SrcEncryption,
|
2023-07-06 13:37:53 +00:00
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("get object to upload copy: %w", err)
|
|
|
|
}
|
2022-04-18 12:16:01 +00:00
|
|
|
|
2022-05-24 11:30:37 +00:00
|
|
|
params := &UploadPartParams{
|
2022-08-17 11:18:36 +00:00
|
|
|
Info: p.Info,
|
|
|
|
PartNumber: p.PartNumber,
|
|
|
|
Size: size,
|
2023-07-06 13:37:53 +00:00
|
|
|
Reader: objPayload,
|
2022-05-24 11:30:37 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return n.uploadPart(ctx, multipartInfo, params)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) CompleteMultipartUpload(ctx context.Context, p *CompleteMultipartParams) (*UploadData, *data.ExtendedObjectInfo, error) {
|
2021-11-25 15:05:58 +00:00
|
|
|
for i := 1; i < len(p.Parts); i++ {
|
|
|
|
if p.Parts[i].PartNumber <= p.Parts[i-1].PartNumber {
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, nil, s3errors.GetAPIError(s3errors.ErrInvalidPartOrder)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-31 12:38:06 +00:00
|
|
|
multipartInfo, partsInfo, err := n.getUploadParts(ctx, p.Info)
|
2021-11-25 15:05:58 +00:00
|
|
|
if err != nil {
|
2022-05-24 14:55:56 +00:00
|
|
|
return nil, nil, err
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2022-08-11 08:48:58 +00:00
|
|
|
encInfo := FormEncryptionInfo(multipartInfo.Meta)
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2022-05-31 12:38:06 +00:00
|
|
|
if len(partsInfo) < len(p.Parts) {
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, nil, fmt.Errorf("%w: found %d parts, need %d", s3errors.GetAPIError(s3errors.ErrInvalidPart), len(partsInfo), len(p.Parts))
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2023-06-01 13:45:28 +00:00
|
|
|
var multipartObjetSize uint64
|
2022-08-01 16:52:09 +00:00
|
|
|
var encMultipartObjectSize uint64
|
2024-07-18 13:40:55 +00:00
|
|
|
parts := make([]*data.PartInfoExtended, 0, len(p.Parts))
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2022-06-02 15:26:26 +00:00
|
|
|
var completedPartsHeader strings.Builder
|
2023-10-02 08:52:07 +00:00
|
|
|
md5Hash := md5.New()
|
2021-11-25 15:05:58 +00:00
|
|
|
for i, part := range p.Parts {
|
2024-07-18 13:40:55 +00:00
|
|
|
partInfo := partsInfo.Extract(part.PartNumber, data.UnQuote(part.ETag), n.features.MD5Enabled())
|
|
|
|
if partInfo == nil {
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, nil, fmt.Errorf("%w: unknown part %d or etag mismatched", s3errors.GetAPIError(s3errors.ErrInvalidPart), part.PartNumber)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2023-06-27 12:49:20 +00:00
|
|
|
|
2021-11-25 15:05:58 +00:00
|
|
|
// for the last part we have no minimum size limit
|
2023-07-12 09:08:56 +00:00
|
|
|
if i != len(p.Parts)-1 && partInfo.Size < UploadMinSize {
|
|
|
|
return nil, nil, fmt.Errorf("%w: %d/%d", s3errors.GetAPIError(s3errors.ErrEntityTooSmall), partInfo.Size, UploadMinSize)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2022-05-31 12:38:06 +00:00
|
|
|
parts = append(parts, partInfo)
|
2022-08-01 16:52:09 +00:00
|
|
|
multipartObjetSize += partInfo.Size // even if encryption is enabled size is actual (decrypted)
|
|
|
|
|
|
|
|
if encInfo.Enabled {
|
2023-06-01 13:45:28 +00:00
|
|
|
encPartSize, err := sio.EncryptedSize(partInfo.Size)
|
2022-08-01 16:52:09 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, fmt.Errorf("compute encrypted size: %w", err)
|
|
|
|
}
|
|
|
|
encMultipartObjectSize += encPartSize
|
|
|
|
}
|
2022-06-02 15:26:26 +00:00
|
|
|
|
|
|
|
partInfoStr := partInfo.ToHeaderString()
|
|
|
|
if i != len(p.Parts)-1 {
|
|
|
|
partInfoStr += ","
|
|
|
|
}
|
|
|
|
if _, err = completedPartsHeader.WriteString(partInfoStr); err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
2023-10-02 08:52:07 +00:00
|
|
|
|
|
|
|
bytesHash, err := hex.DecodeString(partInfo.MD5)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, fmt.Errorf("couldn't decode MD5 checksum of part: %w", err)
|
|
|
|
}
|
|
|
|
md5Hash.Write(bytesHash)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2022-06-01 09:54:27 +00:00
|
|
|
initMetadata := make(map[string]string, len(multipartInfo.Meta)+1)
|
2022-06-02 15:26:26 +00:00
|
|
|
initMetadata[UploadCompletedParts] = completedPartsHeader.String()
|
2023-06-27 12:49:20 +00:00
|
|
|
initMetadata[MultipartObjectSize] = strconv.FormatUint(multipartObjetSize, 10)
|
2022-06-02 15:26:26 +00:00
|
|
|
|
2022-05-24 14:55:56 +00:00
|
|
|
uploadData := &UploadData{
|
2024-05-28 12:50:34 +00:00
|
|
|
TagSet: make(map[string]string),
|
2022-05-24 14:55:56 +00:00
|
|
|
}
|
|
|
|
for key, val := range multipartInfo.Meta {
|
|
|
|
if strings.HasPrefix(key, metaPrefix) {
|
|
|
|
initMetadata[strings.TrimPrefix(key, metaPrefix)] = val
|
|
|
|
} else if strings.HasPrefix(key, tagPrefix) {
|
|
|
|
uploadData.TagSet[strings.TrimPrefix(key, tagPrefix)] = val
|
|
|
|
}
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
2022-05-12 21:15:08 +00:00
|
|
|
|
2022-08-01 16:52:09 +00:00
|
|
|
if encInfo.Enabled {
|
|
|
|
initMetadata[AttributeEncryptionAlgorithm] = encInfo.Algorithm
|
|
|
|
initMetadata[AttributeHMACKey] = encInfo.HMACKey
|
|
|
|
initMetadata[AttributeHMACSalt] = encInfo.HMACSalt
|
2023-06-01 13:45:28 +00:00
|
|
|
initMetadata[AttributeDecryptedSize] = strconv.FormatUint(multipartObjetSize, 10)
|
|
|
|
multipartObjetSize = encMultipartObjectSize
|
2022-08-01 16:52:09 +00:00
|
|
|
}
|
|
|
|
|
2023-06-27 12:49:20 +00:00
|
|
|
partsData, err := json.Marshal(parts)
|
|
|
|
if err != nil {
|
|
|
|
return nil, nil, fmt.Errorf("marshal parst for combined object: %w", err)
|
2022-03-02 16:09:02 +00:00
|
|
|
}
|
2022-02-08 16:54:04 +00:00
|
|
|
|
2022-10-14 14:36:43 +00:00
|
|
|
extObjInfo, err := n.PutObject(ctx, &PutObjectParams{
|
2023-10-02 08:52:07 +00:00
|
|
|
BktInfo: p.Info.Bkt,
|
|
|
|
Object: p.Info.Key,
|
|
|
|
Reader: bytes.NewReader(partsData),
|
|
|
|
Header: initMetadata,
|
2024-09-12 15:48:00 +00:00
|
|
|
Size: &multipartObjetSize,
|
2023-10-02 08:52:07 +00:00
|
|
|
Encryption: p.Info.Encryption,
|
|
|
|
CopiesNumbers: multipartInfo.CopiesNumbers,
|
|
|
|
CompleteMD5Hash: hex.EncodeToString(md5Hash.Sum(nil)) + "-" + strconv.Itoa(len(p.Parts)),
|
2022-03-02 16:09:02 +00:00
|
|
|
})
|
|
|
|
if err != nil {
|
2023-08-23 11:07:52 +00:00
|
|
|
n.reqLogger(ctx).Error(logs.CouldNotPutCompletedObject,
|
2022-07-27 15:02:52 +00:00
|
|
|
zap.String("uploadID", p.Info.UploadID),
|
2022-03-02 16:09:02 +00:00
|
|
|
zap.String("uploadKey", p.Info.Key),
|
|
|
|
zap.Error(err))
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, nil, s3errors.GetAPIError(s3errors.ErrInternalError)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2022-05-24 14:55:56 +00:00
|
|
|
var addr oid.Address
|
|
|
|
addr.SetContainer(p.Info.Bkt.CID)
|
2024-07-18 13:40:55 +00:00
|
|
|
for _, prts := range partsInfo {
|
|
|
|
for _, partInfo := range prts {
|
|
|
|
if err = n.objectDelete(ctx, p.Info.Bkt, partInfo.OID); err != nil {
|
|
|
|
n.reqLogger(ctx).Warn(logs.CouldNotDeleteUploadPart,
|
|
|
|
zap.Stringer("cid", p.Info.Bkt.CID), zap.Stringer("oid", &partInfo.OID),
|
|
|
|
zap.Error(err))
|
|
|
|
}
|
|
|
|
addr.SetObject(partInfo.OID)
|
|
|
|
n.cache.DeleteObject(addr)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-04 06:42:25 +00:00
|
|
|
return uploadData, extObjInfo, n.treeService.DeleteMultipartUpload(ctx, p.Info.Bkt, multipartInfo)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) ListMultipartUploads(ctx context.Context, p *ListMultipartUploadsParams) (*ListMultipartUploadsInfo, error) {
|
2021-11-25 15:05:58 +00:00
|
|
|
var result ListMultipartUploadsInfo
|
|
|
|
if p.MaxUploads == 0 {
|
|
|
|
return &result, nil
|
|
|
|
}
|
|
|
|
|
2022-09-13 09:44:18 +00:00
|
|
|
multipartInfos, err := n.treeService.GetMultipartUploadsByPrefix(ctx, p.Bkt, p.Prefix)
|
2021-11-25 15:05:58 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-05-24 08:41:10 +00:00
|
|
|
uploads := make([]*UploadInfo, 0, len(multipartInfos))
|
2021-11-25 15:05:58 +00:00
|
|
|
uniqDirs := make(map[string]struct{})
|
|
|
|
|
2022-05-24 08:41:10 +00:00
|
|
|
for _, multipartInfo := range multipartInfos {
|
|
|
|
info := uploadInfoFromMultipartInfo(multipartInfo, p.Prefix, p.Delimiter)
|
2021-11-25 15:05:58 +00:00
|
|
|
if info != nil {
|
|
|
|
if info.IsDir {
|
|
|
|
if _, ok := uniqDirs[info.Key]; ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
uniqDirs[info.Key] = struct{}{}
|
|
|
|
}
|
|
|
|
uploads = append(uploads, info)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Slice(uploads, func(i, j int) bool {
|
|
|
|
if uploads[i].Key == uploads[j].Key {
|
|
|
|
return uploads[i].UploadID < uploads[j].UploadID
|
|
|
|
}
|
|
|
|
return uploads[i].Key < uploads[j].Key
|
|
|
|
})
|
|
|
|
|
|
|
|
if p.KeyMarker != "" {
|
|
|
|
if p.UploadIDMarker != "" {
|
|
|
|
uploads = trimAfterUploadIDAndKey(p.KeyMarker, p.UploadIDMarker, uploads)
|
|
|
|
} else {
|
|
|
|
uploads = trimAfterUploadKey(p.KeyMarker, uploads)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(uploads) > p.MaxUploads {
|
|
|
|
result.IsTruncated = true
|
|
|
|
uploads = uploads[:p.MaxUploads]
|
|
|
|
result.NextUploadIDMarker = uploads[len(uploads)-1].UploadID
|
|
|
|
result.NextKeyMarker = uploads[len(uploads)-1].Key
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, ov := range uploads {
|
|
|
|
if ov.IsDir {
|
|
|
|
result.Prefixes = append(result.Prefixes, ov.Key)
|
|
|
|
} else {
|
|
|
|
result.Uploads = append(result.Uploads, ov)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &result, nil
|
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) AbortMultipartUpload(ctx context.Context, p *UploadInfoParams) error {
|
2022-05-31 12:38:06 +00:00
|
|
|
multipartInfo, parts, err := n.getUploadParts(ctx, p)
|
2021-11-25 15:05:58 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-07-18 13:40:55 +00:00
|
|
|
for _, infos := range parts {
|
|
|
|
for _, info := range infos {
|
|
|
|
if err = n.objectDelete(ctx, p.Bkt, info.OID); err != nil {
|
|
|
|
n.reqLogger(ctx).Warn(logs.CouldntDeletePart, zap.String("cid", p.Bkt.CID.EncodeToString()),
|
|
|
|
zap.String("oid", info.OID.EncodeToString()), zap.Int("part number", info.Number), zap.Error(err))
|
|
|
|
}
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-12-04 06:42:25 +00:00
|
|
|
return n.treeService.DeleteMultipartUpload(ctx, p.Bkt, multipartInfo)
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2024-06-25 12:57:55 +00:00
|
|
|
func (n *Layer) ListParts(ctx context.Context, p *ListPartsParams) (*ListPartsInfo, error) {
|
2021-11-25 15:05:58 +00:00
|
|
|
var res ListPartsInfo
|
2022-05-31 12:38:06 +00:00
|
|
|
multipartInfo, partsInfo, err := n.getUploadParts(ctx, p.Info)
|
2021-11-25 15:05:58 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2022-08-11 08:48:58 +00:00
|
|
|
encInfo := FormEncryptionInfo(multipartInfo.Meta)
|
2022-08-01 16:52:09 +00:00
|
|
|
if err = p.Info.Encryption.MatchObjectEncryption(encInfo); err != nil {
|
2023-08-23 11:07:52 +00:00
|
|
|
n.reqLogger(ctx).Warn(logs.MismatchedObjEncryptionInfo, zap.Error(err))
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, s3errors.GetAPIError(s3errors.ErrInvalidEncryptionParameters)
|
2022-08-01 16:52:09 +00:00
|
|
|
}
|
|
|
|
|
2022-05-24 13:07:47 +00:00
|
|
|
res.Owner = multipartInfo.Owner
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2022-05-31 12:38:06 +00:00
|
|
|
parts := make([]*Part, 0, len(partsInfo))
|
2021-11-25 15:05:58 +00:00
|
|
|
|
2024-07-18 13:40:55 +00:00
|
|
|
for _, infos := range partsInfo {
|
|
|
|
sort.Slice(infos, func(i, j int) bool {
|
|
|
|
return infos[i].Timestamp < infos[j].Timestamp
|
|
|
|
})
|
|
|
|
|
|
|
|
partInfo := infos[len(infos)-1]
|
2021-11-25 15:05:58 +00:00
|
|
|
parts = append(parts, &Part{
|
2023-10-27 15:15:33 +00:00
|
|
|
ETag: data.Quote(partInfo.GetETag(n.features.MD5Enabled())),
|
2022-05-31 12:38:06 +00:00
|
|
|
LastModified: partInfo.Created.UTC().Format(time.RFC3339),
|
|
|
|
PartNumber: partInfo.Number,
|
|
|
|
Size: partInfo.Size,
|
2021-11-25 15:05:58 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
sort.Slice(parts, func(i, j int) bool {
|
|
|
|
return parts[i].PartNumber < parts[j].PartNumber
|
|
|
|
})
|
|
|
|
|
2023-09-21 08:22:55 +00:00
|
|
|
if len(parts) == 0 || p.PartNumberMarker >= parts[len(parts)-1].PartNumber {
|
2023-09-15 15:08:02 +00:00
|
|
|
res.Parts = make([]*Part, 0)
|
|
|
|
return &res, nil
|
|
|
|
}
|
2021-11-25 15:05:58 +00:00
|
|
|
if p.PartNumberMarker != 0 {
|
|
|
|
for i, part := range parts {
|
|
|
|
if part.PartNumber > p.PartNumberMarker {
|
|
|
|
parts = parts[i:]
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(parts) > p.MaxParts {
|
|
|
|
res.IsTruncated = true
|
|
|
|
parts = parts[:p.MaxParts]
|
|
|
|
}
|
|
|
|
|
2024-08-06 09:04:33 +00:00
|
|
|
res.NextPartNumberMarker = parts[len(parts)-1].PartNumber
|
2021-11-25 15:05:58 +00:00
|
|
|
res.Parts = parts
|
|
|
|
|
|
|
|
return &res, nil
|
|
|
|
}
|
|
|
|
|
2024-07-18 13:40:55 +00:00
|
|
|
type PartsInfo map[int][]*data.PartInfoExtended
|
|
|
|
|
|
|
|
func (p PartsInfo) Extract(part int, etag string, md5Enabled bool) *data.PartInfoExtended {
|
|
|
|
parts := p[part]
|
|
|
|
|
|
|
|
for i, info := range parts {
|
|
|
|
if info.GetETag(md5Enabled) == etag {
|
|
|
|
p[part] = append(parts[:i], parts[i+1:]...)
|
|
|
|
return info
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (n *Layer) getUploadParts(ctx context.Context, p *UploadInfoParams) (*data.MultipartInfo, PartsInfo, error) {
|
2022-09-13 09:44:18 +00:00
|
|
|
multipartInfo, err := n.treeService.GetMultipartUpload(ctx, p.Bkt, p.Key, p.UploadID)
|
2022-05-24 13:07:47 +00:00
|
|
|
if err != nil {
|
2024-09-27 08:13:13 +00:00
|
|
|
if errors.Is(err, tree.ErrNodeNotFound) {
|
2023-06-30 09:03:55 +00:00
|
|
|
return nil, nil, fmt.Errorf("%w: %s", s3errors.GetAPIError(s3errors.ErrNoSuchUpload), err.Error())
|
2022-05-24 13:07:47 +00:00
|
|
|
}
|
|
|
|
return nil, nil, err
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2022-09-13 09:44:18 +00:00
|
|
|
parts, err := n.treeService.GetParts(ctx, p.Bkt, multipartInfo.ID)
|
2021-11-25 15:05:58 +00:00
|
|
|
if err != nil {
|
2022-05-24 13:07:47 +00:00
|
|
|
return nil, nil, err
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
2024-07-18 13:40:55 +00:00
|
|
|
res := make(map[int][]*data.PartInfoExtended, len(parts))
|
2022-12-09 13:30:51 +00:00
|
|
|
partsNumbers := make([]int, len(parts))
|
|
|
|
oids := make([]string, len(parts))
|
|
|
|
for i, part := range parts {
|
2024-07-18 13:40:55 +00:00
|
|
|
res[part.Number] = append(res[part.Number], part)
|
2022-12-09 13:30:51 +00:00
|
|
|
partsNumbers[i] = part.Number
|
|
|
|
oids[i] = part.OID.EncodeToString()
|
|
|
|
}
|
|
|
|
|
2023-08-23 11:07:52 +00:00
|
|
|
n.reqLogger(ctx).Debug(logs.PartDetails,
|
2022-12-09 13:30:51 +00:00
|
|
|
zap.Stringer("cid", p.Bkt.CID),
|
|
|
|
zap.String("upload id", p.UploadID),
|
|
|
|
zap.Ints("part numbers", partsNumbers),
|
|
|
|
zap.Strings("oids", oids))
|
2022-04-18 12:16:01 +00:00
|
|
|
|
2022-05-24 13:07:47 +00:00
|
|
|
return multipartInfo, res, nil
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func trimAfterUploadIDAndKey(key, id string, uploads []*UploadInfo) []*UploadInfo {
|
|
|
|
var res []*UploadInfo
|
|
|
|
if len(uploads) != 0 && uploads[len(uploads)-1].Key < key {
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, obj := range uploads {
|
|
|
|
if obj.Key >= key && obj.UploadID > id {
|
|
|
|
res = append(res, obj)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
|
|
|
|
func trimAfterUploadKey(key string, objects []*UploadInfo) []*UploadInfo {
|
|
|
|
var result []*UploadInfo
|
|
|
|
if len(objects) != 0 && objects[len(objects)-1].Key <= key {
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
for i, obj := range objects {
|
|
|
|
if obj.Key > key {
|
|
|
|
result = objects[i:]
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2022-05-24 08:41:10 +00:00
|
|
|
func uploadInfoFromMultipartInfo(uploadInfo *data.MultipartInfo, prefix, delimiter string) *UploadInfo {
|
|
|
|
var isDir bool
|
|
|
|
key := uploadInfo.Key
|
2021-11-25 15:05:58 +00:00
|
|
|
|
|
|
|
if !strings.HasPrefix(key, prefix) {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(delimiter) > 0 {
|
|
|
|
tail := strings.TrimPrefix(key, prefix)
|
|
|
|
index := strings.Index(tail, delimiter)
|
|
|
|
if index >= 0 {
|
|
|
|
isDir = true
|
|
|
|
key = prefix + tail[:index+1]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &UploadInfo{
|
|
|
|
IsDir: isDir,
|
|
|
|
Key: key,
|
2022-05-24 08:41:10 +00:00
|
|
|
UploadID: uploadInfo.UploadID,
|
|
|
|
Owner: uploadInfo.Owner,
|
|
|
|
Created: uploadInfo.Created,
|
2021-11-25 15:05:58 +00:00
|
|
|
}
|
|
|
|
}
|