[#369] Request reproducer
All checks were successful
/ DCO (pull_request) Successful in 3m9s
/ Vulncheck (pull_request) Successful in 3m37s
/ Builds (1.22) (pull_request) Successful in 3m52s
/ Builds (1.23) (pull_request) Successful in 3m47s
/ Lint (pull_request) Successful in 4m39s
/ Tests (1.22) (pull_request) Successful in 3m49s
/ Tests (1.23) (pull_request) Successful in 3m48s

Signed-off-by: Nikita Zinkevich <n.zinkevich@yadro.com>
This commit is contained in:
Nikita Zinkevich 2024-08-01 11:21:20 +03:00
parent aef4c77fbb
commit 6da59eccfc
15 changed files with 468 additions and 32 deletions

View file

@ -12,6 +12,7 @@ import (
"fmt" "fmt"
"io" "io"
"mime" "mime"
"net/http"
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
@ -21,6 +22,7 @@ import (
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/data"
apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors" apiErrors "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/errors"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/internal/logs" "git.frostfs.info/TrueCloudLab/frostfs-s3-gw/internal/logs"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/pkg/detector"
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client" "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/client"
cid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/container/id" cid "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/container/id"
"git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object" "git.frostfs.info/TrueCloudLab/frostfs-sdk-go/object"
@ -245,11 +247,11 @@ func (n *Layer) PutObject(ctx context.Context, p *PutObjectParams) (*data.Extend
if r != nil { if r != nil {
if len(p.Header[api.ContentType]) == 0 { if len(p.Header[api.ContentType]) == 0 {
if contentType := MimeByFilePath(p.Object); len(contentType) == 0 { if contentType := MimeByFilePath(p.Object); len(contentType) == 0 {
d := newDetector(r) d := detector.NewDetector(r, http.DetectContentType)
if contentType, err := d.Detect(); err == nil { if contentType, err := d.Detect(); err == nil {
p.Header[api.ContentType] = contentType p.Header[api.ContentType] = contentType
} }
r = d.MultiReader() r = d.RestoredReader()
} else { } else {
p.Header[api.ContentType] = contentType p.Header[api.ContentType] = contentType
} }

View file

@ -32,10 +32,6 @@ type (
*zap.Logger *zap.Logger
logRoller *lumberjack.Logger logRoller *lumberjack.Logger
} }
// Implementation of zap.Sink for using lumberjack.
lumberjackSink struct {
*lumberjack.Logger
}
// responseReadWriter helps read http response body. // responseReadWriter helps read http response body.
responseReadWriter struct { responseReadWriter struct {
http.ResponseWriter http.ResponseWriter
@ -49,10 +45,6 @@ const (
responseLabel = "response" responseLabel = "response"
) )
func (lumberjackSink) Sync() error {
return nil
}
func (lc *LogHTTPConfig) InitHTTPLogger(log *zap.Logger) { func (lc *LogHTTPConfig) InitHTTPLogger(log *zap.Logger) {
if err := lc.initHTTPLogger(); err != nil { if err := lc.initHTTPLogger(); err != nil {
log.Error(logs.FailedToInitializeHTTPLogger, zap.Error(err)) log.Error(logs.FailedToInitializeHTTPLogger, zap.Error(err))

View file

@ -0,0 +1,50 @@
package playback
import (
"encoding/xml"
"fmt"
"net/http"
)
type MultipartUpload struct {
XMLName xml.Name `xml:"http://s3.amazonaws.com/doc/2006-03-01/ InitiateMultipartUploadResult" json:"-"`
Bucket string `json:"bucket" xml:"Bucket"`
Key string `json:"key" xml:"Key"`
UploadID string `json:"uploadId" xml:"UploadId"`
}
func HandleResponse(r *http.Request, mparts map[string]MultipartUpload, resp []byte, logResponse []byte) error {
var mpart, mpartOld MultipartUpload
if r.Method != "POST" || !r.URL.Query().Has("uploads") {
return nil
}
// get new uploadId from response
err := xml.Unmarshal(resp, &mpart)
if err != nil {
return fmt.Errorf("xml unmarshal error: %w", err)
}
// get old uploadId from logs
err = xml.Unmarshal(logResponse, &mpartOld)
if err != nil {
return fmt.Errorf("xml unmarshal error: %w", err)
}
if mpartOld.UploadID != "" {
mparts[mpartOld.UploadID] = mpart
}
return nil
}
func SwapUploadID(r *http.Request, settings *Settings) error {
var uploadID string
query := r.URL.Query()
uploadID = query.Get("uploadId")
mpart, ok := settings.Multiparts[uploadID]
if !ok {
return fmt.Errorf("no multipart upload with specified uploadId")
}
query.Set("uploadId", mpart.UploadID)
r.URL.RawQuery = query.Encode()
return nil
}

View file

@ -0,0 +1,97 @@
package playback
import (
"context"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"strconv"
"strings"
"time"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/api/auth"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/pkg/detector"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/pkg/xmlutils"
v4 "github.com/aws/aws-sdk-go-v2/aws/signer/v4"
"github.com/aws/aws-sdk-go-v2/credentials"
)
type (
httpBody []byte
LoggedRequest struct {
From string `json:"from"`
URI string `json:"URI"`
Method string `json:"method"`
Payload httpBody `json:"payload,omitempty"`
Response httpBody `json:"response,omitempty"`
Query url.Values `json:"query"`
Header http.Header `json:"headers"`
}
Credentials struct {
AccessKey string
SecretKey string
}
Settings struct {
Endpoint string
Creds Credentials
Multiparts map[string]MultipartUpload
Client *http.Client
}
)
func (h *httpBody) UnmarshalJSON(data []byte) error {
unquoted, err := strconv.Unquote(string(data))
if err != nil {
return fmt.Errorf("failed to unquote data: %w", err)
}
detect := detector.NewDetector(strings.NewReader(unquoted), xmlutils.DetectXML)
dataType, err := detect.Detect()
if err != nil {
return fmt.Errorf("failed to detect data: %w", err)
}
reader := xmlutils.ChooseReader(dataType, detect.RestoredReader())
*h, err = io.ReadAll(reader)
if err != nil {
return fmt.Errorf("failed to unmarshal httpbody: %w", err)
}
return nil
}
// Sign replace Authorization header with new Access key id and Signature values.
func Sign(ctx context.Context, r *http.Request, creds Credentials) error {
credProvider := credentials.NewStaticCredentialsProvider(creds.AccessKey, creds.SecretKey, "")
awsCred, err := credProvider.Retrieve(ctx)
if err != nil {
return err
}
authHdr := r.Header.Get(auth.AuthorizationHdr)
authInfo, err := parseAuthHeader(authHdr)
if err != nil {
return err
}
newHeader := strings.Replace(authHdr, authInfo["access_key_id"], creds.AccessKey, 1)
r.Header.Set(auth.AuthorizationHdr, newHeader)
signer := v4.NewSigner()
signatureDateTimeStr := r.Header.Get(api.AmzDate)
signatureDateTime, err := time.Parse("20060102T150405Z", signatureDateTimeStr)
if err != nil {
return err
}
return signer.SignHTTP(ctx, awsCred, r, r.Header.Get(api.AmzContentSha256), authInfo["service"], authInfo["region"], signatureDateTime)
}
func parseAuthHeader(authHeader string) (map[string]string, error) {
authInfo := auth.NewRegexpMatcher(auth.AuthorizationFieldRegexp).GetSubmatches(authHeader)
if len(authInfo) == 0 {
return nil, errors.New("no matches found")
}
return authInfo, nil
}

View file

@ -0,0 +1,98 @@
package playback
import (
"errors"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
var errNoMatches = errors.New("no matches found")
func withoutValue(data map[string]string, field string) map[string]string {
result := make(map[string]string)
for k, v := range data {
result[k] = v
}
result[field] = ""
return result
}
func TestParseAuthHeader(t *testing.T) {
defaultHeader := "AWS4-HMAC-SHA256 Credential=oid0cid/20210809/us-east-1/s3/aws4_request, SignedHeaders=host;x-amz-content-sha256;x-amz-date, Signature=2811ccb9e242f41426738fb1f"
defaultAuthInfo := map[string]string{
"access_key_id": "oid0cid",
"service": "s3",
"region": "us-east-1",
"v4_signature": "2811ccb9e242f41426738fb1f",
"signed_header_fields": "host;x-amz-content-sha256;x-amz-date",
"date": "20210809",
}
for _, tc := range []struct {
title string
header string
err error
expected map[string]string
}{
{
title: "correct full header",
header: defaultHeader,
err: nil,
expected: defaultAuthInfo,
},
{
title: "correct with empty region",
header: strings.Replace(defaultHeader, "/us-east-1/", "//", 1),
err: nil,
expected: withoutValue(defaultAuthInfo, "region"),
},
{
title: "empty access key",
header: strings.Replace(defaultHeader, "oid0cid", "", 1),
err: errNoMatches,
expected: nil,
},
{
title: "empty service",
header: strings.Replace(defaultHeader, "/s3/", "//", 1),
err: errNoMatches,
expected: nil,
},
{
title: "empty date",
header: strings.Replace(defaultHeader, "/20210809/", "//", 1),
err: errNoMatches,
expected: nil,
},
{
title: "empty v4_signature",
header: strings.Replace(defaultHeader, "Signature=2811ccb9e242f41426738fb1f",
"Signature=", 1),
err: errNoMatches,
expected: nil,
},
{
title: "empty signed_fields",
header: strings.Replace(defaultHeader, "SignedHeaders=host;x-amz-content-sha256;x-amz-date",
"SignedHeaders=", 1),
err: errNoMatches,
expected: nil,
},
{
title: "empty signed_fields",
header: strings.Replace(defaultHeader, "SignedHeaders=host;x-amz-content-sha256;x-amz-date",
"SignedHeaders=", 1),
err: errNoMatches,
expected: nil,
},
} {
t.Run(tc.title, func(t *testing.T) {
authInfo, err := parseAuthHeader(tc.header)
require.Equal(t, err, tc.err, tc.header)
require.Equal(t, tc.expected, authInfo, tc.header)
})
}
}

20
cmd/s3-playback/main.go Normal file
View file

@ -0,0 +1,20 @@
package main
import (
"context"
"os"
"os/signal"
"syscall"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/cmd/s3-playback/modules"
)
func main() {
ctx, _ := signal.NotifyContext(context.Background(), syscall.SIGINT, syscall.SIGTERM, syscall.SIGHUP)
if cmd, err := modules.Execute(ctx); err != nil {
cmd.PrintErrln("Error:", err.Error())
cmd.PrintErrf("Run '%v --help' for usage.\n", cmd.CommandPath())
os.Exit(1)
}
}

View file

@ -0,0 +1,66 @@
package modules
import (
"context"
"os"
"strings"
"time"
"git.frostfs.info/TrueCloudLab/frostfs-s3-gw/internal/version"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/spf13/viper"
)
const (
defaultPrintResponseLimit = 1024
cfgConfigPath = "config"
cfgHTTPTimeoutFlag = "http-timeout"
cfgSkipVerifyTLS = "skip-verify-tls"
)
var (
cfgFile string
rootCmd = &cobra.Command{
Use: "frostfs-s3-playback",
Version: version.Version,
Short: "FrostFS S3 Traffic Playback",
Long: "Helps to reproduce s3 commands from log files",
Example: "frostfs-s3-playback [--skip-verify-tls] [--http-timeout <timeout>] " +
"[--version] --config <config_path> <command>",
SilenceUsage: true,
SilenceErrors: true,
PersistentPreRunE: func(cmd *cobra.Command, _ []string) error {
return viper.BindPFlags(cmd.Flags())
},
RunE: func(cmd *cobra.Command, _ []string) error {
return cmd.Help()
},
}
)
func Execute(ctx context.Context) (*cobra.Command, error) {
return rootCmd.ExecuteContextC(ctx)
}
func initConfig() {
viper.SetConfigFile(cfgFile)
_ = viper.ReadInConfig()
}
func init() {
cobra.OnInitialize(initConfig)
cobra.EnableTraverseRunHooks = true
rootCmd.SetGlobalNormalizationFunc(func(_ *pflag.FlagSet, name string) pflag.NormalizedName {
return pflag.NormalizedName(strings.ReplaceAll(name, "_", "-"))
})
rootCmd.PersistentFlags().StringVar(&cfgFile, cfgConfigPath, "", "configuration filepath")
_ = rootCmd.MarkPersistentFlagRequired(cfgConfigPath)
_ = rootCmd.MarkPersistentFlagFilename(cfgConfigPath)
rootCmd.PersistentFlags().Duration(cfgHTTPTimeoutFlag, time.Minute, "http request timeout")
rootCmd.PersistentFlags().Bool(cfgSkipVerifyTLS, false, "skip TLS certificate verification")
rootCmd.SetOut(os.Stdout)
rootCmd.AddCommand(runCmd)
}

View file

@ -57,7 +57,7 @@ S3_GW_HTTP_LOGGING_ENABLED=false
# max body size to log # max body size to log
S3_GW_HTTP_LOGGING_MAX_BODY=1024 S3_GW_HTTP_LOGGING_MAX_BODY=1024
# max log size in Mb # max log size in Mb
S3_GW_HTTP_LOGGING_MAX_LOG_SIZE: 20 S3_GW_HTTP_LOGGING_MAX_LOG_SIZE=20
# use log compression # use log compression
S3_GW_HTTP_LOGGING_GZIP=true S3_GW_HTTP_LOGGING_GZIP=true
# possible destination output values: filesystem path, url, "stdout", "stderr" # possible destination output values: filesystem path, url, "stdout", "stderr"

View file

@ -0,0 +1,8 @@
endpoint: http://localhost:8084
log: ./log/multipart11.log
credentials:
access_key: 5KYq6dmkwT1tnStGQi7XcjHStNuRtm8e1LmrPhDwUPk10BjfRbepqLAcTo1iizhv5ifGsdq1TUKCribFeRcATwHxh
secret_key: 632c2edcad237e50fb39a42edb22bff0831f9b55561f3b48c22a3817012cc87e
http_timeout: 60s
skip_verify_tls: false
print_response_limit: 1024

View file

@ -380,6 +380,11 @@ logger:
### `http_logging` section ### `http_logging` section
Could be enabled only in builds with `loghttp` build tag. To build with `loghttp` tag, pass `GOFLAGS` var to `make`:
```bash
make GOFLAGS="-tags=loghttp" [target]
```
```yaml ```yaml
http_logging: http_logging:
enabled: false enabled: false
@ -387,7 +392,6 @@ http_logging:
max_log_size: 20 max_log_size: 20
gzip: true gzip: true
destination: stdout destination: stdout
log_response: true
``` ```
| Parameter | Type | SIGHUP reload | Default value | Description | | Parameter | Type | SIGHUP reload | Default value | Description |
@ -397,7 +401,6 @@ http_logging:
| `max_log_size` | `int` | yes | `50` | Log file size threshold (in megabytes) to be moved in backup file. After reaching threshold, initial filename is appended with timestamp. And new empty file with initial name is created. | | `max_log_size` | `int` | yes | `50` | Log file size threshold (in megabytes) to be moved in backup file. After reaching threshold, initial filename is appended with timestamp. And new empty file with initial name is created. |
| `gzip` | `bool` | yes | `false` | Whether to enable Gzip compression to backup log files. | | `gzip` | `bool` | yes | `false` | Whether to enable Gzip compression to backup log files. |
| `destination` | `string` | yes | `stdout` | Specify path for log output. Accepts log file path, or "stdout" and "stderr" reserved words to print in output streams. File and folders are created if necessary. | | `destination` | `string` | yes | `stdout` | Specify path for log output. Accepts log file path, or "stdout" and "stderr" reserved words to print in output streams. File and folders are created if necessary. |
| `log_response` | bool | yes | true | Whether to attach response body to the log. |
### `cache` section ### `cache` section

48
docs/playback.md Normal file
View file

@ -0,0 +1,48 @@
# FrostFS S3 Playback
Playback is a tool to reproduce queries to `frostfs-s3-gw` in dev environment. Network logs could be
gathered from `s3-gw` via HTTP Logger which could be enabled on build with `loghttp` build tag
and `http_logging.enabled` option set to `true` in `s3-gw` configuration.
## Commands
`run` - reads log file and reproduces send requests from it to specified endpoint
#### Example
```bash
frostfs-s3-playback --config <config_path> run [--endpoint=<endpoint>] [--log=<log_path>]
```
## Configuration
Playback accepts configuration file path in yaml with corresponding options:
```yaml
endpoint: http://localhost:8084
log: ./request.log
env: .env
credentials:
access_key: CAtUxDSSFtuVyVCjHTMhwx3eP3YSPo5ffwbPcnKfcdrD06WwUSn72T5EBNe3jcgjL54rmxFM6u3nUAoNBps8qJ1PD
secret_key: 560027d81c277de7378f71cbf12a32e4f7f541de724be59bcfdbfdc925425f30
http_timeout: 60s
skip_verify_tls: true
```
Configuration path is passed via required `--config` flag.
If corresponding flag is set, it overrides parameter from config.
### Configuration parameters
#### Global parameters
| Config parameter name | Flag name | Type | Default value | Description |
|-------------------------|-------------------|------------|---------------|-------------------------------------------------------------------------------|
| - | `config` | `string` | - | config file path (e.g. `./config/playback.yaml`) |
| `http_timeout` | `http-timeout` | `duration` | `60s` | http request timeout |
| `skip_verify_tls` | `skip-verify-tls` | `bool` | `false` | skips tls certificate verification for https endpoints |
| `credentials.accessKey` | - | `string` | - | AWS access key id |
| `credentials.secretKey` | - | `string` | - | AWS secret key |
| `print_response_limit` | - | `int` | `1024` | max response length to be printed in stdout, the rest of body will be omitted |
#### `run` command parameters
| Config parameter name | Flag name | Type | Default value | Description |
|-----------------------|-----------|--------|---------------|--------------------------------------------------------|
| `endpoint` | endpoint | string | - | s3-gw endpoint URL |
| `log` | log | string | ./request.log | path to log file, could be either absolute or relative |

5
go.mod
View file

@ -10,7 +10,8 @@ require (
git.frostfs.info/TrueCloudLab/policy-engine v0.0.0-20240821072038-a1386f6d259a git.frostfs.info/TrueCloudLab/policy-engine v0.0.0-20240821072038-a1386f6d259a
git.frostfs.info/TrueCloudLab/zapjournald v0.0.0-20240124114243-cb2e66427d02 git.frostfs.info/TrueCloudLab/zapjournald v0.0.0-20240124114243-cb2e66427d02
github.com/aws/aws-sdk-go v1.44.6 github.com/aws/aws-sdk-go v1.44.6
github.com/aws/aws-sdk-go-v2 v1.18.1 github.com/aws/aws-sdk-go-v2 v1.30.3
github.com/aws/aws-sdk-go-v2/credentials v1.17.27
github.com/bluele/gcache v0.0.2 github.com/bluele/gcache v0.0.2
github.com/go-chi/chi/v5 v5.0.8 github.com/go-chi/chi/v5 v5.0.8
github.com/google/uuid v1.6.0 github.com/google/uuid v1.6.0
@ -45,7 +46,7 @@ require (
git.frostfs.info/TrueCloudLab/tzhash v1.8.0 // indirect git.frostfs.info/TrueCloudLab/tzhash v1.8.0 // indirect
github.com/VictoriaMetrics/easyproto v0.1.4 // indirect github.com/VictoriaMetrics/easyproto v0.1.4 // indirect
github.com/antlr4-go/antlr/v4 v4.13.0 // indirect github.com/antlr4-go/antlr/v4 v4.13.0 // indirect
github.com/aws/smithy-go v1.13.5 // indirect github.com/aws/smithy-go v1.20.3 // indirect
github.com/beorn7/perks v1.0.1 // indirect github.com/beorn7/perks v1.0.1 // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect github.com/cenkalti/backoff/v4 v4.2.1 // indirect
github.com/cespare/xxhash/v2 v2.2.0 // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect

11
go.sum
View file

@ -66,10 +66,12 @@ github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8
github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g=
github.com/aws/aws-sdk-go v1.44.6 h1:Y+uHxmZfhRTLX2X3khkdxCoTZAyGEX21aOUHe1U6geg= github.com/aws/aws-sdk-go v1.44.6 h1:Y+uHxmZfhRTLX2X3khkdxCoTZAyGEX21aOUHe1U6geg=
github.com/aws/aws-sdk-go v1.44.6/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo= github.com/aws/aws-sdk-go v1.44.6/go.mod h1:y4AeaBuwd2Lk+GepC1E9v0qOiTws0MIWAX4oIKwKHZo=
github.com/aws/aws-sdk-go-v2 v1.18.1 h1:+tefE750oAb7ZQGzla6bLkOwfcQCEtC5y2RqoqCeqKo= github.com/aws/aws-sdk-go-v2 v1.30.3 h1:jUeBtG0Ih+ZIFH0F4UkmL9w3cSpaMv9tYYDbzILP8dY=
github.com/aws/aws-sdk-go-v2 v1.18.1/go.mod h1:uzbQtefpm44goOPmdKyAlXSNcwlRgF3ePWVW6EtJvvw= github.com/aws/aws-sdk-go-v2 v1.30.3/go.mod h1:nIQjQVp5sfpQcTc9mPSr1B0PaWK5ByX9MOoDadSN4lc=
github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= github.com/aws/aws-sdk-go-v2/credentials v1.17.27 h1:2raNba6gr2IfA0eqqiP2XiQ0UVOpGPgDSi0I9iAP+UI=
github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/aws/aws-sdk-go-v2/credentials v1.17.27/go.mod h1:gniiwbGahQByxan6YjQUMcW4Aov6bLC3m+evgcoN4r4=
github.com/aws/smithy-go v1.20.3 h1:ryHwveWzPV5BIof6fyDvor6V3iUL7nTfiTKXHiW05nE=
github.com/aws/smithy-go v1.20.3/go.mod h1:krry+ya/rV9RDcV/Q16kpu6ypI4K2czasz0NC3qS14E=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.8.0 h1:FD+XqgOZDUxxZ8hzoBFuV9+cGWY9CslN6d5MS5JVb4c= github.com/bits-and-blooms/bitset v1.8.0 h1:FD+XqgOZDUxxZ8hzoBFuV9+cGWY9CslN6d5MS5JVb4c=
@ -177,7 +179,6 @@ github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=

View file

@ -1,15 +1,15 @@
package layer package detector
import ( import (
"io" "io"
"net/http"
) )
type ( type (
detector struct { Detector struct {
io.Reader io.Reader
err error err error
data []byte data []byte
detectFunc func([]byte) string
} }
errReader struct { errReader struct {
data []byte data []byte
@ -36,23 +36,24 @@ func (r *errReader) Read(b []byte) (int, error) {
return n, nil return n, nil
} }
func newDetector(reader io.Reader) *detector { func NewDetector(reader io.Reader, detectFunc func([]byte) string) *Detector {
return &detector{ return &Detector{
data: make([]byte, contentTypeDetectSize), data: make([]byte, contentTypeDetectSize),
Reader: reader, Reader: reader,
detectFunc: detectFunc,
} }
} }
func (d *detector) Detect() (string, error) { func (d *Detector) Detect() (string, error) {
n, err := d.Reader.Read(d.data) n, err := d.Reader.Read(d.data)
if err != nil && err != io.EOF { if err != nil && err != io.EOF {
d.err = err d.err = err
return "", err return "", err
} }
d.data = d.data[:n] d.data = d.data[:n]
return http.DetectContentType(d.data), nil return d.detectFunc(d.data), nil
} }
func (d *detector) MultiReader() io.Reader { func (d *Detector) RestoredReader() io.Reader {
return io.MultiReader(newReader(d.data, d.err), d.Reader) return io.MultiReader(newReader(d.data, d.err), d.Reader)
} }

49
playback/utils/utils.go Normal file
View file

@ -0,0 +1,49 @@
package utils
import (
"bytes"
"encoding/base64"
"encoding/xml"
"io"
)
type nopCloseWriter struct {
io.Writer
}
func (b nopCloseWriter) Close() error {
return nil
}
const (
nonXML = "nonXML"
typeXML = "application/XML"
)
func DetectXML(data []byte) string {
token, err := xml.NewDecoder(bytes.NewReader(data)).RawToken()
if err != nil {
return nonXML
}
switch token.(type) {
case xml.StartElement, xml.ProcInst:
return typeXML
}
return nonXML
}
func ChooseWriter(dataType string, bodyWriter io.Writer) io.WriteCloser {
writeCloser := nopCloseWriter{bodyWriter}
if dataType == typeXML {
return writeCloser
}
return base64.NewEncoder(base64.StdEncoding, bodyWriter)
}
func ChooseReader(dataType string, bodyReader io.Reader) io.Reader {
if dataType == typeXML {
return bodyReader
}
return base64.NewDecoder(base64.StdEncoding, bodyReader)
}