forked from TrueCloudLab/frostfs-http-gw
[#121] Add canonicalizer
Some headers might be passed in non-canonical way by proxy servers, such as 'Authorization' header. Server does not normalize headers, so we can get custom object attributes. Therefore, app has to normalize all non object attribute headers by itself. Signed-off-by: Alex Vanin <a.vanin@yadro.com>
This commit is contained in:
parent
1737f1d95f
commit
d9cbd302b1
1 changed files with 39 additions and 6 deletions
|
@ -1,6 +1,7 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
@ -534,15 +535,15 @@ func (a *app) configureRouter(handler *handler.Handler) {
|
|||
response.Error(r, "Method Not Allowed", fasthttp.StatusMethodNotAllowed)
|
||||
}
|
||||
|
||||
r.POST("/upload/{cid}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.Upload)))))
|
||||
r.POST("/upload/{cid}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.Upload))))))
|
||||
a.log.Info(logs.AddedPathUploadCid)
|
||||
r.GET("/get/{cid}/{oid:*}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadByAddressOrBucketName)))))
|
||||
r.HEAD("/get/{cid}/{oid:*}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.HeadByAddressOrBucketName)))))
|
||||
r.GET("/get/{cid}/{oid:*}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadByAddressOrBucketName))))))
|
||||
r.HEAD("/get/{cid}/{oid:*}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.HeadByAddressOrBucketName))))))
|
||||
a.log.Info(logs.AddedPathGetCidOid)
|
||||
r.GET("/get_by_attribute/{cid}/{attr_key}/{attr_val:*}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadByAttribute)))))
|
||||
r.HEAD("/get_by_attribute/{cid}/{attr_key}/{attr_val:*}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.HeadByAttribute)))))
|
||||
r.GET("/get_by_attribute/{cid}/{attr_key}/{attr_val:*}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadByAttribute))))))
|
||||
r.HEAD("/get_by_attribute/{cid}/{attr_key}/{attr_val:*}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.HeadByAttribute))))))
|
||||
a.log.Info(logs.AddedPathGetByAttributeCidAttrKeyAttrVal)
|
||||
r.GET("/zip/{cid}/{prefix:*}", a.logger(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadZipped)))))
|
||||
r.GET("/zip/{cid}/{prefix:*}", a.logger(a.canonicalizer(a.tokenizer(a.tracer(a.reqNamespace(handler.DownloadZipped))))))
|
||||
a.log.Info(logs.AddedPathZipCidPrefix)
|
||||
|
||||
a.webServer.Handler = r.Handler
|
||||
|
@ -559,6 +560,38 @@ func (a *app) logger(h fasthttp.RequestHandler) fasthttp.RequestHandler {
|
|||
}
|
||||
}
|
||||
|
||||
func (a *app) canonicalizer(h fasthttp.RequestHandler) fasthttp.RequestHandler {
|
||||
return func(req *fasthttp.RequestCtx) {
|
||||
// regardless of DisableHeaderNamesNormalizing setting, some headers
|
||||
// MUST be normalized in order to process execution. They are normalized
|
||||
// here.
|
||||
|
||||
toAddKeys := make([][]byte, 0, 10)
|
||||
toAddValues := make([][]byte, 0, 10)
|
||||
prefix := []byte(utils.UserAttributeHeaderPrefix)
|
||||
|
||||
req.Request.Header.VisitAll(func(k, v []byte) {
|
||||
if bytes.HasPrefix(k, prefix) {
|
||||
return
|
||||
}
|
||||
toAddKeys = append(toAddKeys, k)
|
||||
toAddValues = append(toAddValues, v)
|
||||
})
|
||||
|
||||
// this is safe to do after all headers were read into header structure
|
||||
req.Request.Header.EnableNormalizing()
|
||||
|
||||
for i := range toAddKeys {
|
||||
req.Request.Header.SetBytesKV(toAddKeys[i], toAddValues[i])
|
||||
}
|
||||
|
||||
// return normalization setting back
|
||||
req.Request.Header.DisableNormalizing()
|
||||
|
||||
h(req)
|
||||
}
|
||||
}
|
||||
|
||||
func (a *app) tokenizer(h fasthttp.RequestHandler) fasthttp.RequestHandler {
|
||||
return func(req *fasthttp.RequestCtx) {
|
||||
appCtx, err := tokens.StoreBearerTokenAppCtx(a.ctx, req)
|
||||
|
|
Loading…
Reference in a new issue