Update k8s client-go to v6.0.0 (#1340)
* Update k8s client-go to v6.0.0 This fix updates k8s client-go to v6.0.0 as CoreDNS is supported in 1.9 and v6.0.0 is the recommended version. There are quite some massive changes that need to be made: 1. k8s.io/client-go/pkg/api/v1 has been changed to k8s.io/api/v1 (repo changed from `client-go` to `api`) 2. kubernetes.Clientset adds one extra layer, so that `kubernetes.Clientset.Services()` and like has been changed to `kubernetes.Clientset.CoreV1().Services()` Also, we have to stick with specific commits of `k8s.io/apimachinery` and the newly introduced `k8s.io/api` because go dep still could not figure out the right version to fetch. Signed-off-by: Yong Tang <yong.tang.github@outlook.com> * Update vendor with `dep ensure --update` and `dep prune` Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
This commit is contained in:
parent
bce7f5fbec
commit
7fe5b0bb1f
1278 changed files with 123970 additions and 277876 deletions
353
Gopkg.lock
generated
353
Gopkg.lock
generated
|
@ -25,17 +25,17 @@
|
||||||
revision = "327ebb6c2b6df8bf075da02ef45a2a034e9b79ba"
|
revision = "327ebb6c2b6df8bf075da02ef45a2a034e9b79ba"
|
||||||
version = "0.11.0"
|
version = "0.11.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/asaskevich/govalidator"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "521b25f4b05fd26bec69d9dedeb8f9c9a83939a8"
|
|
||||||
version = "v8"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/coreos/etcd"
|
name = "github.com/coreos/etcd"
|
||||||
packages = ["client","pkg/pathutil","pkg/srv","pkg/types","version"]
|
packages = [
|
||||||
revision = "1e1dbb23924672c6cd72c62ee0db2b45f778da71"
|
"client",
|
||||||
version = "v3.2.11"
|
"pkg/pathutil",
|
||||||
|
"pkg/srv",
|
||||||
|
"pkg/types",
|
||||||
|
"version"
|
||||||
|
]
|
||||||
|
revision = "8c43bc2c1ea98b535b652426a9b0f4344fcbda68"
|
||||||
|
version = "v3.2.13"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/coreos/go-semver"
|
name = "github.com/coreos/go-semver"
|
||||||
|
@ -55,12 +55,6 @@
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "2cf77a2b5e11ac8d0ba3892772ac8e1f7b528344"
|
revision = "2cf77a2b5e11ac8d0ba3892772ac8e1f7b528344"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/docker/distribution"
|
|
||||||
packages = ["digest","reference"]
|
|
||||||
revision = "48294d928ced5dd9b378f7fd7c6f5da3ff3f2c89"
|
|
||||||
version = "v2.6.2"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/eapache/go-resiliency"
|
name = "github.com/eapache/go-resiliency"
|
||||||
packages = ["breaker"]
|
packages = ["breaker"]
|
||||||
|
@ -81,16 +75,13 @@
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/emicklei/go-restful"
|
name = "github.com/emicklei/go-restful"
|
||||||
packages = [".","log"]
|
packages = [
|
||||||
|
".",
|
||||||
|
"log"
|
||||||
|
]
|
||||||
revision = "5741799b275a3c4a5a9623a993576d7545cf7b5c"
|
revision = "5741799b275a3c4a5a9623a993576d7545cf7b5c"
|
||||||
version = "v2.4.0"
|
version = "v2.4.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
name = "github.com/emicklei/go-restful-swagger12"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "dcef7f55730566d41eae5db10e7d6981829720f6"
|
|
||||||
version = "1.0.1"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/farsightsec/golang-framestream"
|
name = "github.com/farsightsec/golang-framestream"
|
||||||
|
@ -109,18 +100,6 @@
|
||||||
revision = "390ab7935ee28ec6b286364bba9b4dd6410cb3d5"
|
revision = "390ab7935ee28ec6b286364bba9b4dd6410cb3d5"
|
||||||
version = "v0.3.0"
|
version = "v0.3.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/go-openapi/analysis"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "2bbaa248df987bc6fbd0210da05071e111dec40c"
|
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/go-openapi/errors"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "03cfca65330da08a5a440053faf994a3c682b5bf"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/go-openapi/jsonpointer"
|
name = "github.com/go-openapi/jsonpointer"
|
||||||
|
@ -133,23 +112,11 @@
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "36d33bfe519efae5632669801b180bf1a245da3b"
|
revision = "36d33bfe519efae5632669801b180bf1a245da3b"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/go-openapi/loads"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "2a2b323bab96e6b1fdee110e57d959322446e9c9"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/go-openapi/spec"
|
name = "github.com/go-openapi/spec"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "01738944bdee0f26bf66420c5b17d54cfdf55341"
|
revision = "fa03337d7da5735229ee8f5e9d5d0b996014b7f8"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/go-openapi/strfmt"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "610b6cacdcde6852f4de68998bd20ce1dac85b22"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
|
@ -159,7 +126,10 @@
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/gogo/protobuf"
|
name = "github.com/gogo/protobuf"
|
||||||
packages = ["proto","sortkeys"]
|
packages = [
|
||||||
|
"proto",
|
||||||
|
"sortkeys"
|
||||||
|
]
|
||||||
revision = "342cbe0a04158f6dcb03ca0079991a51a4248c02"
|
revision = "342cbe0a04158f6dcb03ca0079991a51a4248c02"
|
||||||
version = "v0.5"
|
version = "v0.5"
|
||||||
|
|
||||||
|
@ -172,7 +142,13 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/golang/protobuf"
|
name = "github.com/golang/protobuf"
|
||||||
packages = ["proto","ptypes","ptypes/any","ptypes/duration","ptypes/timestamp"]
|
packages = [
|
||||||
|
"proto",
|
||||||
|
"ptypes",
|
||||||
|
"ptypes/any",
|
||||||
|
"ptypes/duration",
|
||||||
|
"ptypes/timestamp"
|
||||||
|
]
|
||||||
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
|
revision = "1e59b77b52bf8e4b449a57e6f79f21226d571845"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
|
@ -181,12 +157,37 @@
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "553a641470496b2327abcac10b36396bd98e45c9"
|
revision = "553a641470496b2327abcac10b36396bd98e45c9"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/google/btree"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "316fb6d3f031ae8f4d457c6c5186b9e3ded70435"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/google/gofuzz"
|
name = "github.com/google/gofuzz"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
|
revision = "24818f796faf91cd76ec7bddd72458fbced7a6c1"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/googleapis/gnostic"
|
||||||
|
packages = [
|
||||||
|
"OpenAPIv2",
|
||||||
|
"compiler",
|
||||||
|
"extensions"
|
||||||
|
]
|
||||||
|
revision = "ee43cbb60db7bd22502942cccbc39059117352ab"
|
||||||
|
version = "v0.1.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/gregjones/httpcache"
|
||||||
|
packages = [
|
||||||
|
".",
|
||||||
|
"diskcache"
|
||||||
|
]
|
||||||
|
revision = "2bcd89a1743fd4b373f7370ce8ddc14dfbd18229"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/grpc-ecosystem/grpc-opentracing"
|
name = "github.com/grpc-ecosystem/grpc-opentracing"
|
||||||
|
@ -196,7 +197,10 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/hashicorp/golang-lru"
|
name = "github.com/hashicorp/golang-lru"
|
||||||
packages = [".","simplelru"]
|
packages = [
|
||||||
|
".",
|
||||||
|
"simplelru"
|
||||||
|
]
|
||||||
revision = "0a025b7e63adc15a622f29b0b2c4c3848243bbf6"
|
revision = "0a025b7e63adc15a622f29b0b2c4c3848243bbf6"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
|
@ -211,6 +215,12 @@
|
||||||
revision = "7fe0c75c13abdee74b09fcacef5ea1c6bba6a874"
|
revision = "7fe0c75c13abdee74b09fcacef5ea1c6bba6a874"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/json-iterator/go"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "f7279a603edee96fe7764d3de9c6ff8cf9970994"
|
||||||
|
version = "1.0.4"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/juju/ratelimit"
|
name = "github.com/juju/ratelimit"
|
||||||
|
@ -226,7 +236,11 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/mailru/easyjson"
|
name = "github.com/mailru/easyjson"
|
||||||
packages = ["buffer","jlexer","jwriter"]
|
packages = [
|
||||||
|
"buffer",
|
||||||
|
"jlexer",
|
||||||
|
"jwriter"
|
||||||
|
]
|
||||||
revision = "32fa128f234d041f196a9f3e0fea5ac9772c08e1"
|
revision = "32fa128f234d041f196a9f3e0fea5ac9772c08e1"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
|
@ -235,12 +249,6 @@
|
||||||
revision = "3247c84500bff8d9fb6d579d800f20b3e091582c"
|
revision = "3247c84500bff8d9fb6d579d800f20b3e091582c"
|
||||||
version = "v1.0.0"
|
version = "v1.0.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "master"
|
|
||||||
name = "github.com/mitchellh/mapstructure"
|
|
||||||
packages = ["."]
|
|
||||||
revision = "06020f85339e21b2478f756a78e295255ffa4d6a"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/opentracing-contrib/go-observer"
|
name = "github.com/opentracing-contrib/go-observer"
|
||||||
|
@ -249,21 +257,44 @@
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/opentracing/opentracing-go"
|
name = "github.com/opentracing/opentracing-go"
|
||||||
packages = [".","ext","log"]
|
packages = [
|
||||||
|
".",
|
||||||
|
"ext",
|
||||||
|
"log"
|
||||||
|
]
|
||||||
revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
|
revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38"
|
||||||
version = "v1.0.2"
|
version = "v1.0.2"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/openzipkin/zipkin-go-opentracing"
|
name = "github.com/openzipkin/zipkin-go-opentracing"
|
||||||
packages = [".","flag","thrift/gen-go/scribe","thrift/gen-go/zipkincore","types","wire"]
|
packages = [
|
||||||
|
".",
|
||||||
|
"flag",
|
||||||
|
"thrift/gen-go/scribe",
|
||||||
|
"thrift/gen-go/zipkincore",
|
||||||
|
"types",
|
||||||
|
"wire"
|
||||||
|
]
|
||||||
revision = "45e90b00710a4c34a1a7d8a78d90f9b010b0bd4d"
|
revision = "45e90b00710a4c34a1a7d8a78d90f9b010b0bd4d"
|
||||||
version = "v0.3.2"
|
version = "v0.3.2"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "github.com/petar/GoLLRB"
|
||||||
|
packages = ["llrb"]
|
||||||
|
revision = "53be0d36a84c2a886ca057d34b6aa4468df9ccb4"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "github.com/peterbourgon/diskv"
|
||||||
|
packages = ["."]
|
||||||
|
revision = "5f041e8faa004a95c88a202771f4cc3e991971e6"
|
||||||
|
version = "v2.0.1"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/pierrec/lz4"
|
name = "github.com/pierrec/lz4"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "08c27939df1bd95e881e2c2367a749964ad1fceb"
|
revision = "2fcda4cb7018ce05a25959d2fe08c83e3329f169"
|
||||||
version = "v1.0.1"
|
version = "v1.1"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "github.com/pierrec/xxHash"
|
name = "github.com/pierrec/xxHash"
|
||||||
|
@ -280,7 +311,11 @@
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "github.com/prometheus/common"
|
name = "github.com/prometheus/common"
|
||||||
packages = ["expfmt","internal/bitbucket.org/ww/goautoneg","model"]
|
packages = [
|
||||||
|
"expfmt",
|
||||||
|
"internal/bitbucket.org/ww/goautoneg",
|
||||||
|
"model"
|
||||||
|
]
|
||||||
revision = "2e54d0b93cba2fd133edc32211dcc32c06ef72ca"
|
revision = "2e54d0b93cba2fd133edc32211dcc32c06ef72ca"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
|
@ -304,19 +339,30 @@
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/crypto"
|
name = "golang.org/x/crypto"
|
||||||
packages = ["ssh/terminal"]
|
packages = ["ssh/terminal"]
|
||||||
revision = "94eea52f7b742c7cbe0b03b22f0c4c8631ece122"
|
revision = "0fcca4842a8d74bfddc2c96a073bd2a4d2a7a2e8"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/sys"
|
name = "golang.org/x/sys"
|
||||||
packages = ["unix","windows"]
|
packages = [
|
||||||
revision = "571f7bbbe08da2a8955aed9d4db316e78630e9a3"
|
"unix",
|
||||||
|
"windows"
|
||||||
|
]
|
||||||
|
revision = "83801418e1b59fb1880e363299581ee543af32ca"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
name = "golang.org/x/text"
|
name = "golang.org/x/text"
|
||||||
packages = ["internal/gen","internal/triegen","internal/ucd","transform","unicode/cldr","unicode/norm","width"]
|
packages = [
|
||||||
revision = "d5a9226ed7dd70cade6ccae9d37517fe14dd9fee"
|
"internal/gen",
|
||||||
|
"internal/triegen",
|
||||||
|
"internal/ucd",
|
||||||
|
"transform",
|
||||||
|
"unicode/cldr",
|
||||||
|
"unicode/norm",
|
||||||
|
"width"
|
||||||
|
]
|
||||||
|
revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "master"
|
branch = "master"
|
||||||
|
@ -326,7 +372,29 @@
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "google.golang.org/grpc"
|
name = "google.golang.org/grpc"
|
||||||
packages = [".","balancer","balancer/roundrobin","codes","connectivity","credentials","encoding","grpclb/grpc_lb_v1/messages","grpclog","internal","keepalive","metadata","naming","peer","resolver","resolver/dns","resolver/passthrough","stats","status","tap","transport"]
|
packages = [
|
||||||
|
".",
|
||||||
|
"balancer",
|
||||||
|
"balancer/roundrobin",
|
||||||
|
"codes",
|
||||||
|
"connectivity",
|
||||||
|
"credentials",
|
||||||
|
"encoding",
|
||||||
|
"grpclb/grpc_lb_v1/messages",
|
||||||
|
"grpclog",
|
||||||
|
"internal",
|
||||||
|
"keepalive",
|
||||||
|
"metadata",
|
||||||
|
"naming",
|
||||||
|
"peer",
|
||||||
|
"resolver",
|
||||||
|
"resolver/dns",
|
||||||
|
"resolver/passthrough",
|
||||||
|
"stats",
|
||||||
|
"status",
|
||||||
|
"tap",
|
||||||
|
"transport"
|
||||||
|
]
|
||||||
revision = "e687fa4e6424368ece6e4fe727cea2c806a0fcb4"
|
revision = "e687fa4e6424368ece6e4fe727cea2c806a0fcb4"
|
||||||
version = "v1.8.2"
|
version = "v1.8.2"
|
||||||
|
|
||||||
|
@ -336,32 +404,155 @@
|
||||||
revision = "3887ee99ecf07df5b447e9b00d9c0b2adaa9f3e4"
|
revision = "3887ee99ecf07df5b447e9b00d9c0b2adaa9f3e4"
|
||||||
version = "v0.9.0"
|
version = "v0.9.0"
|
||||||
|
|
||||||
[[projects]]
|
|
||||||
branch = "v2"
|
|
||||||
name = "gopkg.in/mgo.v2"
|
|
||||||
packages = ["bson","internal/json"]
|
|
||||||
revision = "3f83fa5005286a7fe593b055f0d7771a7dce4655"
|
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
branch = "v2"
|
branch = "v2"
|
||||||
name = "gopkg.in/yaml.v2"
|
name = "gopkg.in/yaml.v2"
|
||||||
packages = ["."]
|
packages = ["."]
|
||||||
revision = "287cf08546ab5e7e37d55a84f7ed3fd1db036de5"
|
revision = "c95af922eae69f190717a0b7148960af8c55a072"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
name = "k8s.io/api"
|
||||||
|
packages = [
|
||||||
|
"admissionregistration/v1alpha1",
|
||||||
|
"admissionregistration/v1beta1",
|
||||||
|
"apps/v1",
|
||||||
|
"apps/v1beta1",
|
||||||
|
"apps/v1beta2",
|
||||||
|
"authentication/v1",
|
||||||
|
"authentication/v1beta1",
|
||||||
|
"authorization/v1",
|
||||||
|
"authorization/v1beta1",
|
||||||
|
"autoscaling/v1",
|
||||||
|
"autoscaling/v2beta1",
|
||||||
|
"batch/v1",
|
||||||
|
"batch/v1beta1",
|
||||||
|
"batch/v2alpha1",
|
||||||
|
"certificates/v1beta1",
|
||||||
|
"core/v1",
|
||||||
|
"events/v1beta1",
|
||||||
|
"extensions/v1beta1",
|
||||||
|
"networking/v1",
|
||||||
|
"policy/v1beta1",
|
||||||
|
"rbac/v1",
|
||||||
|
"rbac/v1alpha1",
|
||||||
|
"rbac/v1beta1",
|
||||||
|
"scheduling/v1alpha1",
|
||||||
|
"settings/v1alpha1",
|
||||||
|
"storage/v1",
|
||||||
|
"storage/v1alpha1",
|
||||||
|
"storage/v1beta1"
|
||||||
|
]
|
||||||
|
revision = "11147472b7c934c474a2c484af3c0c5210b7a3af"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "k8s.io/apimachinery"
|
name = "k8s.io/apimachinery"
|
||||||
packages = ["pkg/api/equality","pkg/api/errors","pkg/api/meta","pkg/api/resource","pkg/apimachinery","pkg/apimachinery/announced","pkg/apimachinery/registered","pkg/apis/meta/v1","pkg/apis/meta/v1/unstructured","pkg/apis/meta/v1alpha1","pkg/conversion","pkg/conversion/queryparams","pkg/conversion/unstructured","pkg/fields","pkg/labels","pkg/openapi","pkg/runtime","pkg/runtime/schema","pkg/runtime/serializer","pkg/runtime/serializer/json","pkg/runtime/serializer/protobuf","pkg/runtime/serializer/recognizer","pkg/runtime/serializer/streaming","pkg/runtime/serializer/versioning","pkg/selection","pkg/types","pkg/util/cache","pkg/util/clock","pkg/util/diff","pkg/util/errors","pkg/util/framer","pkg/util/intstr","pkg/util/json","pkg/util/net","pkg/util/rand","pkg/util/runtime","pkg/util/sets","pkg/util/validation","pkg/util/validation/field","pkg/util/wait","pkg/util/yaml","pkg/version","pkg/watch","third_party/forked/golang/reflect"]
|
packages = [
|
||||||
revision = "1fd2e63a9a370677308a42f24fd40c86438afddf"
|
"pkg/api/errors",
|
||||||
|
"pkg/api/meta",
|
||||||
|
"pkg/api/resource",
|
||||||
|
"pkg/apis/meta/internalversion",
|
||||||
|
"pkg/apis/meta/v1",
|
||||||
|
"pkg/apis/meta/v1/unstructured",
|
||||||
|
"pkg/apis/meta/v1alpha1",
|
||||||
|
"pkg/conversion",
|
||||||
|
"pkg/conversion/queryparams",
|
||||||
|
"pkg/fields",
|
||||||
|
"pkg/labels",
|
||||||
|
"pkg/runtime",
|
||||||
|
"pkg/runtime/schema",
|
||||||
|
"pkg/runtime/serializer",
|
||||||
|
"pkg/runtime/serializer/json",
|
||||||
|
"pkg/runtime/serializer/protobuf",
|
||||||
|
"pkg/runtime/serializer/recognizer",
|
||||||
|
"pkg/runtime/serializer/streaming",
|
||||||
|
"pkg/runtime/serializer/versioning",
|
||||||
|
"pkg/selection",
|
||||||
|
"pkg/types",
|
||||||
|
"pkg/util/cache",
|
||||||
|
"pkg/util/clock",
|
||||||
|
"pkg/util/diff",
|
||||||
|
"pkg/util/errors",
|
||||||
|
"pkg/util/framer",
|
||||||
|
"pkg/util/intstr",
|
||||||
|
"pkg/util/json",
|
||||||
|
"pkg/util/net",
|
||||||
|
"pkg/util/runtime",
|
||||||
|
"pkg/util/sets",
|
||||||
|
"pkg/util/validation",
|
||||||
|
"pkg/util/validation/field",
|
||||||
|
"pkg/util/wait",
|
||||||
|
"pkg/util/yaml",
|
||||||
|
"pkg/version",
|
||||||
|
"pkg/watch",
|
||||||
|
"third_party/forked/golang/reflect"
|
||||||
|
]
|
||||||
|
revision = "180eddb345a5be3a157cea1c624700ad5bd27b8f"
|
||||||
|
|
||||||
[[projects]]
|
[[projects]]
|
||||||
name = "k8s.io/client-go"
|
name = "k8s.io/client-go"
|
||||||
packages = ["discovery","kubernetes","kubernetes/scheme","kubernetes/typed/admissionregistration/v1alpha1","kubernetes/typed/apps/v1beta1","kubernetes/typed/authentication/v1","kubernetes/typed/authentication/v1beta1","kubernetes/typed/authorization/v1","kubernetes/typed/authorization/v1beta1","kubernetes/typed/autoscaling/v1","kubernetes/typed/autoscaling/v2alpha1","kubernetes/typed/batch/v1","kubernetes/typed/batch/v2alpha1","kubernetes/typed/certificates/v1beta1","kubernetes/typed/core/v1","kubernetes/typed/extensions/v1beta1","kubernetes/typed/networking/v1","kubernetes/typed/policy/v1beta1","kubernetes/typed/rbac/v1alpha1","kubernetes/typed/rbac/v1beta1","kubernetes/typed/settings/v1alpha1","kubernetes/typed/storage/v1","kubernetes/typed/storage/v1beta1","pkg/api","pkg/api/v1","pkg/api/v1/ref","pkg/apis/admissionregistration","pkg/apis/admissionregistration/v1alpha1","pkg/apis/apps","pkg/apis/apps/v1beta1","pkg/apis/authentication","pkg/apis/authentication/v1","pkg/apis/authentication/v1beta1","pkg/apis/authorization","pkg/apis/authorization/v1","pkg/apis/authorization/v1beta1","pkg/apis/autoscaling","pkg/apis/autoscaling/v1","pkg/apis/autoscaling/v2alpha1","pkg/apis/batch","pkg/apis/batch/v1","pkg/apis/batch/v2alpha1","pkg/apis/certificates","pkg/apis/certificates/v1beta1","pkg/apis/extensions","pkg/apis/extensions/v1beta1","pkg/apis/networking","pkg/apis/networking/v1","pkg/apis/policy","pkg/apis/policy/v1beta1","pkg/apis/rbac","pkg/apis/rbac/v1alpha1","pkg/apis/rbac/v1beta1","pkg/apis/settings","pkg/apis/settings/v1alpha1","pkg/apis/storage","pkg/apis/storage/v1","pkg/apis/storage/v1beta1","pkg/util","pkg/util/parsers","pkg/version","rest","rest/watch","tools/auth","tools/cache","tools/clientcmd","tools/clientcmd/api","tools/clientcmd/api/latest","tools/clientcmd/api/v1","tools/metrics","transport","util/cert","util/flowcontrol","util/homedir","util/integer"]
|
packages = [
|
||||||
revision = "d92e8497f71b7b4e0494e5bd204b48d34bd6f254"
|
"discovery",
|
||||||
version = "v4.0.0"
|
"kubernetes",
|
||||||
|
"kubernetes/scheme",
|
||||||
|
"kubernetes/typed/admissionregistration/v1alpha1",
|
||||||
|
"kubernetes/typed/admissionregistration/v1beta1",
|
||||||
|
"kubernetes/typed/apps/v1",
|
||||||
|
"kubernetes/typed/apps/v1beta1",
|
||||||
|
"kubernetes/typed/apps/v1beta2",
|
||||||
|
"kubernetes/typed/authentication/v1",
|
||||||
|
"kubernetes/typed/authentication/v1beta1",
|
||||||
|
"kubernetes/typed/authorization/v1",
|
||||||
|
"kubernetes/typed/authorization/v1beta1",
|
||||||
|
"kubernetes/typed/autoscaling/v1",
|
||||||
|
"kubernetes/typed/autoscaling/v2beta1",
|
||||||
|
"kubernetes/typed/batch/v1",
|
||||||
|
"kubernetes/typed/batch/v1beta1",
|
||||||
|
"kubernetes/typed/batch/v2alpha1",
|
||||||
|
"kubernetes/typed/certificates/v1beta1",
|
||||||
|
"kubernetes/typed/core/v1",
|
||||||
|
"kubernetes/typed/events/v1beta1",
|
||||||
|
"kubernetes/typed/extensions/v1beta1",
|
||||||
|
"kubernetes/typed/networking/v1",
|
||||||
|
"kubernetes/typed/policy/v1beta1",
|
||||||
|
"kubernetes/typed/rbac/v1",
|
||||||
|
"kubernetes/typed/rbac/v1alpha1",
|
||||||
|
"kubernetes/typed/rbac/v1beta1",
|
||||||
|
"kubernetes/typed/scheduling/v1alpha1",
|
||||||
|
"kubernetes/typed/settings/v1alpha1",
|
||||||
|
"kubernetes/typed/storage/v1",
|
||||||
|
"kubernetes/typed/storage/v1alpha1",
|
||||||
|
"kubernetes/typed/storage/v1beta1",
|
||||||
|
"pkg/version",
|
||||||
|
"rest",
|
||||||
|
"rest/watch",
|
||||||
|
"tools/auth",
|
||||||
|
"tools/cache",
|
||||||
|
"tools/clientcmd",
|
||||||
|
"tools/clientcmd/api",
|
||||||
|
"tools/clientcmd/api/latest",
|
||||||
|
"tools/clientcmd/api/v1",
|
||||||
|
"tools/metrics",
|
||||||
|
"tools/pager",
|
||||||
|
"tools/reference",
|
||||||
|
"transport",
|
||||||
|
"util/buffer",
|
||||||
|
"util/cert",
|
||||||
|
"util/flowcontrol",
|
||||||
|
"util/homedir",
|
||||||
|
"util/integer"
|
||||||
|
]
|
||||||
|
revision = "78700dec6369ba22221b72770783300f143df150"
|
||||||
|
version = "v6.0.0"
|
||||||
|
|
||||||
|
[[projects]]
|
||||||
|
branch = "master"
|
||||||
|
name = "k8s.io/kube-openapi"
|
||||||
|
packages = ["pkg/common"]
|
||||||
|
revision = "b16ebc07f5cad97831f961e4b5a9cc1caed33b7e"
|
||||||
|
|
||||||
[solve-meta]
|
[solve-meta]
|
||||||
analyzer-name = "dep"
|
analyzer-name = "dep"
|
||||||
analyzer-version = 1
|
analyzer-version = 1
|
||||||
inputs-digest = "7449762cfec347014dce97efce332ac6a9bca1c9cbc82418b2bf2e6af6a5d6db"
|
inputs-digest = "4aeebd6b2fe24acde2f69a5dc347ba7ebead910ac1c3455a0656c00724c9b1be"
|
||||||
solver-name = "gps-cdcl"
|
solver-name = "gps-cdcl"
|
||||||
solver-version = 1
|
solver-version = 1
|
||||||
|
|
11
Gopkg.toml
11
Gopkg.toml
|
@ -19,10 +19,15 @@ ignored = [
|
||||||
name = "github.com/ugorji/go"
|
name = "github.com/ugorji/go"
|
||||||
revision = "8c0409fcbb70099c748d71f714529204975f6c3f"
|
revision = "8c0409fcbb70099c748d71f714529204975f6c3f"
|
||||||
|
|
||||||
# client-go 4.0.0 uses apimachinery 1fd2e63a9a370677308a42f24fd40c86438afddf (see Godep.json)
|
# client-go 6.0.0 uses apimachinery 180eddb345a5be3a157cea1c624700ad5bd27b8f
|
||||||
|
# and api 11147472b7c934c474a2c484af3c0c5210b7a3af (see Godep.json). go dep
|
||||||
|
# is unable to match Godep.json automatically so have to specify here.
|
||||||
[[constraint]]
|
[[constraint]]
|
||||||
name = "k8s.io/client-go"
|
name = "k8s.io/client-go"
|
||||||
version = "v4.0.0"
|
version = "v6.0.0"
|
||||||
[[override]]
|
[[override]]
|
||||||
name = "k8s.io/apimachinery"
|
name = "k8s.io/apimachinery"
|
||||||
revision = "1fd2e63a9a370677308a42f24fd40c86438afddf"
|
revision = "180eddb345a5be3a157cea1c624700ad5bd27b8f"
|
||||||
|
[[override]]
|
||||||
|
name = "k8s.io/api"
|
||||||
|
revision = "11147472b7c934c474a2c484af3c0c5210b7a3af"
|
||||||
|
|
|
@ -3,8 +3,8 @@ package federation
|
||||||
import (
|
import (
|
||||||
"github.com/coredns/coredns/plugin/kubernetes"
|
"github.com/coredns/coredns/plugin/kubernetes"
|
||||||
|
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type APIConnFederationTest struct{}
|
type APIConnFederationTest struct{}
|
||||||
|
|
|
@ -4,7 +4,7 @@ import (
|
||||||
"github.com/coredns/coredns/plugin"
|
"github.com/coredns/coredns/plugin"
|
||||||
"github.com/coredns/coredns/request"
|
"github.com/coredns/coredns/request"
|
||||||
|
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
api "k8s.io/api/core/v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
// AutoPath implements the AutoPathFunc call from the autopath plugin.
|
// AutoPath implements the AutoPathFunc call from the autopath plugin.
|
||||||
|
|
|
@ -6,8 +6,8 @@ import (
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
"k8s.io/client-go/kubernetes"
|
"k8s.io/client-go/kubernetes"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
"k8s.io/client-go/tools/cache"
|
"k8s.io/client-go/tools/cache"
|
||||||
|
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
|
@ -164,7 +164,7 @@ func serviceListFunc(c *kubernetes.Clientset, ns string, s *labels.Selector) fun
|
||||||
if s != nil {
|
if s != nil {
|
||||||
opts.LabelSelector = (*s).String()
|
opts.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
listV1, err := c.Services(ns).List(opts)
|
listV1, err := c.CoreV1().Services(ns).List(opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -177,7 +177,7 @@ func podListFunc(c *kubernetes.Clientset, ns string, s *labels.Selector) func(me
|
||||||
if s != nil {
|
if s != nil {
|
||||||
opts.LabelSelector = (*s).String()
|
opts.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
listV1, err := c.Pods(ns).List(opts)
|
listV1, err := c.CoreV1().Pods(ns).List(opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -190,7 +190,7 @@ func serviceWatchFunc(c *kubernetes.Clientset, ns string, s *labels.Selector) fu
|
||||||
if s != nil {
|
if s != nil {
|
||||||
options.LabelSelector = (*s).String()
|
options.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
w, err := c.Services(ns).Watch(options)
|
w, err := c.CoreV1().Services(ns).Watch(options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -203,7 +203,7 @@ func podWatchFunc(c *kubernetes.Clientset, ns string, s *labels.Selector) func(o
|
||||||
if s != nil {
|
if s != nil {
|
||||||
options.LabelSelector = (*s).String()
|
options.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
w, err := c.Pods(ns).Watch(options)
|
w, err := c.CoreV1().Pods(ns).Watch(options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -216,7 +216,7 @@ func endpointsListFunc(c *kubernetes.Clientset, ns string, s *labels.Selector) f
|
||||||
if s != nil {
|
if s != nil {
|
||||||
opts.LabelSelector = (*s).String()
|
opts.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
listV1, err := c.Endpoints(ns).List(opts)
|
listV1, err := c.CoreV1().Endpoints(ns).List(opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -229,7 +229,7 @@ func endpointsWatchFunc(c *kubernetes.Clientset, ns string, s *labels.Selector)
|
||||||
if s != nil {
|
if s != nil {
|
||||||
options.LabelSelector = (*s).String()
|
options.LabelSelector = (*s).String()
|
||||||
}
|
}
|
||||||
w, err := c.Endpoints(ns).Watch(options)
|
w, err := c.CoreV1().Endpoints(ns).Watch(options)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -393,7 +393,7 @@ func (dns *dnsControl) EndpointsList() (eps []*api.Endpoints) {
|
||||||
// returned. This query causes a roundtrip to the k8s API server, so use
|
// returned. This query causes a roundtrip to the k8s API server, so use
|
||||||
// sparingly. Currently this is only used for Federation.
|
// sparingly. Currently this is only used for Federation.
|
||||||
func (dns *dnsControl) GetNodeByName(name string) (*api.Node, error) {
|
func (dns *dnsControl) GetNodeByName(name string) (*api.Node, error) {
|
||||||
v1node, err := dns.client.Nodes().Get(name, meta.GetOptions{})
|
v1node, err := dns.client.CoreV1().Nodes().Get(name, meta.GetOptions{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &api.Node{}, err
|
return &api.Node{}, err
|
||||||
}
|
}
|
||||||
|
@ -404,7 +404,7 @@ func (dns *dnsControl) GetNodeByName(name string) (*api.Node, error) {
|
||||||
// error is returned. This query causes a roundtrip to the k8s API server, so
|
// error is returned. This query causes a roundtrip to the k8s API server, so
|
||||||
// use sparingly.
|
// use sparingly.
|
||||||
func (dns *dnsControl) GetNamespaceByName(name string) (*api.Namespace, error) {
|
func (dns *dnsControl) GetNamespaceByName(name string) (*api.Namespace, error) {
|
||||||
v1ns, err := dns.client.Namespaces().Get(name, meta.GetOptions{})
|
v1ns, err := dns.client.CoreV1().Namespaces().Get(name, meta.GetOptions{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &api.Namespace{}, err
|
return &api.Namespace{}, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,8 @@ import (
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
"golang.org/x/net/context"
|
"golang.org/x/net/context"
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var dnsTestCases = []test.Case{
|
var dnsTestCases = []test.Case{
|
||||||
|
|
|
@ -17,10 +17,10 @@ import (
|
||||||
"github.com/coredns/coredns/request"
|
"github.com/coredns/coredns/request"
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
"k8s.io/apimachinery/pkg/labels"
|
"k8s.io/apimachinery/pkg/labels"
|
||||||
"k8s.io/client-go/kubernetes"
|
"k8s.io/client-go/kubernetes"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
"k8s.io/client-go/rest"
|
"k8s.io/client-go/rest"
|
||||||
"k8s.io/client-go/tools/clientcmd"
|
"k8s.io/client-go/tools/clientcmd"
|
||||||
clientcmdapi "k8s.io/client-go/tools/clientcmd/api"
|
clientcmdapi "k8s.io/client-go/tools/clientcmd/api"
|
||||||
|
|
|
@ -7,8 +7,8 @@ import (
|
||||||
"github.com/coredns/coredns/request"
|
"github.com/coredns/coredns/request"
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestWildcard(t *testing.T) {
|
func TestWildcard(t *testing.T) {
|
||||||
|
|
|
@ -5,7 +5,7 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
api "k8s.io/api/core/v1"
|
||||||
)
|
)
|
||||||
|
|
||||||
func isDefaultNS(name, zone string) bool {
|
func isDefaultNS(name, zone string) bool {
|
||||||
|
|
|
@ -3,8 +3,8 @@ package kubernetes
|
||||||
import (
|
import (
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type APIConnTest struct{}
|
type APIConnTest struct{}
|
||||||
|
|
|
@ -8,8 +8,8 @@ import (
|
||||||
|
|
||||||
"github.com/miekg/dns"
|
"github.com/miekg/dns"
|
||||||
"golang.org/x/net/context"
|
"golang.org/x/net/context"
|
||||||
|
api "k8s.io/api/core/v1"
|
||||||
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
meta "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||||
api "k8s.io/client-go/pkg/api/v1"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type APIConnReverseTest struct{}
|
type APIConnReverseTest struct{}
|
||||||
|
|
14
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
14
vendor/github.com/asaskevich/govalidator/.travis.yml
generated
vendored
|
@ -1,14 +0,0 @@
|
||||||
language: go
|
|
||||||
|
|
||||||
go:
|
|
||||||
- 1.1
|
|
||||||
- 1.2
|
|
||||||
- 1.3
|
|
||||||
- 1.4
|
|
||||||
- 1.5
|
|
||||||
- 1.6
|
|
||||||
- tip
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
email:
|
|
||||||
- bwatas@gmail.com
|
|
26
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
26
vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md
generated
vendored
|
@ -1,26 +0,0 @@
|
||||||
#### Support
|
|
||||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
|
||||||
|
|
||||||
#### What to contribute
|
|
||||||
If you don't know what to do, there are some features and functions that need to be done
|
|
||||||
|
|
||||||
- [ ] Refactor code
|
|
||||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
|
||||||
- [ ] Create actual list of contributors and projects that currently using this package
|
|
||||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
|
||||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
|
||||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
|
||||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
|
||||||
- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
|
||||||
- [ ] Implement fuzzing testing
|
|
||||||
- [ ] Implement some struct/map/array utilities
|
|
||||||
- [ ] Implement map/array validation
|
|
||||||
- [ ] Implement benchmarking
|
|
||||||
- [ ] Implement batch of examples
|
|
||||||
- [ ] Look at forks for new features and fixes
|
|
||||||
|
|
||||||
#### Advice
|
|
||||||
Feel free to create what you want, but keep in mind when you implement new features:
|
|
||||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
|
||||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
|
||||||
- There are must be unit-tests for any new functions and improvements
|
|
449
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
449
vendor/github.com/asaskevich/govalidator/README.md
generated
vendored
|
@ -1,449 +0,0 @@
|
||||||
govalidator
|
|
||||||
===========
|
|
||||||
[](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [](https://godoc.org/github.com/asaskevich/govalidator) [](https://coveralls.io/r/asaskevich/govalidator?branch=master) [](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043)
|
|
||||||
[](https://travis-ci.org/asaskevich/govalidator) [](https://goreportcard.com/report/github.com/asaskevich/govalidator) [](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator)
|
|
||||||
|
|
||||||
A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js).
|
|
||||||
|
|
||||||
#### Installation
|
|
||||||
Make sure that Go is installed on your computer.
|
|
||||||
Type the following command in your terminal:
|
|
||||||
|
|
||||||
go get github.com/asaskevich/govalidator
|
|
||||||
|
|
||||||
or you can get specified release of the package with `gopkg.in`:
|
|
||||||
|
|
||||||
go get gopkg.in/asaskevich/govalidator.v4
|
|
||||||
|
|
||||||
After it the package is ready to use.
|
|
||||||
|
|
||||||
|
|
||||||
#### Import package in your project
|
|
||||||
Add following line in your `*.go` file:
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
```
|
|
||||||
If you are unhappy to use long `govalidator`, you can do something like this:
|
|
||||||
```go
|
|
||||||
import (
|
|
||||||
valid "github.com/asaskevich/govalidator"
|
|
||||||
)
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Activate behavior to require all fields have a validation tag by default
|
|
||||||
`SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function.
|
|
||||||
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
govalidator.SetFieldsRequiredByDefault(true)
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Here's some code to explain it:
|
|
||||||
```go
|
|
||||||
// this struct definition will fail govalidator.ValidateStruct() (and the field values do not matter):
|
|
||||||
type exampleStruct struct {
|
|
||||||
Name string ``
|
|
||||||
Email string `valid:"email"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// this, however, will only fail when Email is empty or an invalid email address:
|
|
||||||
type exampleStruct2 struct {
|
|
||||||
Name string `valid:"-"`
|
|
||||||
Email string `valid:"email"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// lastly, this will only fail when Email is an invalid email address but not when it's empty:
|
|
||||||
type exampleStruct2 struct {
|
|
||||||
Name string `valid:"-"`
|
|
||||||
Email string `valid:"email,optional"`
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Recent breaking changes (see [#123](https://github.com/asaskevich/govalidator/pull/123))
|
|
||||||
##### Custom validator function signature
|
|
||||||
A context was added as the second parameter, for structs this is the object being validated – this makes dependent validation possible.
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
// old signature
|
|
||||||
func(i interface{}) bool
|
|
||||||
|
|
||||||
// new signature
|
|
||||||
func(i interface{}, o interface{}) bool
|
|
||||||
```
|
|
||||||
|
|
||||||
##### Adding a custom validator
|
|
||||||
This was changed to prevent data races when accessing custom validators.
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
// before
|
|
||||||
govalidator.CustomTypeTagMap["customByteArrayValidator"] = CustomTypeValidator(func(i interface{}, o interface{}) bool {
|
|
||||||
// ...
|
|
||||||
})
|
|
||||||
|
|
||||||
// after
|
|
||||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator(func(i interface{}, o interface{}) bool {
|
|
||||||
// ...
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
#### List of functions:
|
|
||||||
```go
|
|
||||||
func Abs(value float64) float64
|
|
||||||
func BlackList(str, chars string) string
|
|
||||||
func ByteLength(str string, params ...string) bool
|
|
||||||
func CamelCaseToUnderscore(str string) string
|
|
||||||
func Contains(str, substring string) bool
|
|
||||||
func Count(array []interface{}, iterator ConditionIterator) int
|
|
||||||
func Each(array []interface{}, iterator Iterator)
|
|
||||||
func ErrorByField(e error, field string) string
|
|
||||||
func ErrorsByField(e error) map[string]string
|
|
||||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{}
|
|
||||||
func Find(array []interface{}, iterator ConditionIterator) interface{}
|
|
||||||
func GetLine(s string, index int) (string, error)
|
|
||||||
func GetLines(s string) []string
|
|
||||||
func InRange(value, left, right float64) bool
|
|
||||||
func IsASCII(str string) bool
|
|
||||||
func IsAlpha(str string) bool
|
|
||||||
func IsAlphanumeric(str string) bool
|
|
||||||
func IsBase64(str string) bool
|
|
||||||
func IsByteLength(str string, min, max int) bool
|
|
||||||
func IsCIDR(str string) bool
|
|
||||||
func IsCreditCard(str string) bool
|
|
||||||
func IsDNSName(str string) bool
|
|
||||||
func IsDataURI(str string) bool
|
|
||||||
func IsDialString(str string) bool
|
|
||||||
func IsDivisibleBy(str, num string) bool
|
|
||||||
func IsEmail(str string) bool
|
|
||||||
func IsFilePath(str string) (bool, int)
|
|
||||||
func IsFloat(str string) bool
|
|
||||||
func IsFullWidth(str string) bool
|
|
||||||
func IsHalfWidth(str string) bool
|
|
||||||
func IsHexadecimal(str string) bool
|
|
||||||
func IsHexcolor(str string) bool
|
|
||||||
func IsHost(str string) bool
|
|
||||||
func IsIP(str string) bool
|
|
||||||
func IsIPv4(str string) bool
|
|
||||||
func IsIPv6(str string) bool
|
|
||||||
func IsISBN(str string, version int) bool
|
|
||||||
func IsISBN10(str string) bool
|
|
||||||
func IsISBN13(str string) bool
|
|
||||||
func IsISO3166Alpha2(str string) bool
|
|
||||||
func IsISO3166Alpha3(str string) bool
|
|
||||||
func IsISO693Alpha2(str string) bool
|
|
||||||
func IsISO693Alpha3b(str string) bool
|
|
||||||
func IsISO4217(str string) bool
|
|
||||||
func IsIn(str string, params ...string) bool
|
|
||||||
func IsInt(str string) bool
|
|
||||||
func IsJSON(str string) bool
|
|
||||||
func IsLatitude(str string) bool
|
|
||||||
func IsLongitude(str string) bool
|
|
||||||
func IsLowerCase(str string) bool
|
|
||||||
func IsMAC(str string) bool
|
|
||||||
func IsMongoID(str string) bool
|
|
||||||
func IsMultibyte(str string) bool
|
|
||||||
func IsNatural(value float64) bool
|
|
||||||
func IsNegative(value float64) bool
|
|
||||||
func IsNonNegative(value float64) bool
|
|
||||||
func IsNonPositive(value float64) bool
|
|
||||||
func IsNull(str string) bool
|
|
||||||
func IsNumeric(str string) bool
|
|
||||||
func IsPort(str string) bool
|
|
||||||
func IsPositive(value float64) bool
|
|
||||||
func IsPrintableASCII(str string) bool
|
|
||||||
func IsRFC3339(str string) bool
|
|
||||||
func IsRFC3339WithoutZone(str string) bool
|
|
||||||
func IsRGBcolor(str string) bool
|
|
||||||
func IsRequestURI(rawurl string) bool
|
|
||||||
func IsRequestURL(rawurl string) bool
|
|
||||||
func IsSSN(str string) bool
|
|
||||||
func IsSemver(str string) bool
|
|
||||||
func IsTime(str string, format string) bool
|
|
||||||
func IsURL(str string) bool
|
|
||||||
func IsUTFDigit(str string) bool
|
|
||||||
func IsUTFLetter(str string) bool
|
|
||||||
func IsUTFLetterNumeric(str string) bool
|
|
||||||
func IsUTFNumeric(str string) bool
|
|
||||||
func IsUUID(str string) bool
|
|
||||||
func IsUUIDv3(str string) bool
|
|
||||||
func IsUUIDv4(str string) bool
|
|
||||||
func IsUUIDv5(str string) bool
|
|
||||||
func IsUpperCase(str string) bool
|
|
||||||
func IsVariableWidth(str string) bool
|
|
||||||
func IsWhole(value float64) bool
|
|
||||||
func LeftTrim(str, chars string) string
|
|
||||||
func Map(array []interface{}, iterator ResultIterator) []interface{}
|
|
||||||
func Matches(str, pattern string) bool
|
|
||||||
func NormalizeEmail(str string) (string, error)
|
|
||||||
func PadBoth(str string, padStr string, padLen int) string
|
|
||||||
func PadLeft(str string, padStr string, padLen int) string
|
|
||||||
func PadRight(str string, padStr string, padLen int) string
|
|
||||||
func Range(str string, params ...string) bool
|
|
||||||
func RemoveTags(s string) string
|
|
||||||
func ReplacePattern(str, pattern, replace string) string
|
|
||||||
func Reverse(s string) string
|
|
||||||
func RightTrim(str, chars string) string
|
|
||||||
func RuneLength(str string, params ...string) bool
|
|
||||||
func SafeFileName(str string) string
|
|
||||||
func SetFieldsRequiredByDefault(value bool)
|
|
||||||
func Sign(value float64) float64
|
|
||||||
func StringLength(str string, params ...string) bool
|
|
||||||
func StringMatches(s string, params ...string) bool
|
|
||||||
func StripLow(str string, keepNewLines bool) string
|
|
||||||
func ToBoolean(str string) (bool, error)
|
|
||||||
func ToFloat(str string) (float64, error)
|
|
||||||
func ToInt(str string) (int64, error)
|
|
||||||
func ToJSON(obj interface{}) (string, error)
|
|
||||||
func ToString(obj interface{}) string
|
|
||||||
func Trim(str, chars string) string
|
|
||||||
func Truncate(str string, length int, ending string) string
|
|
||||||
func UnderscoreToCamelCase(s string) string
|
|
||||||
func ValidateStruct(s interface{}) (bool, error)
|
|
||||||
func WhiteList(str, chars string) string
|
|
||||||
type ConditionIterator
|
|
||||||
type CustomTypeValidator
|
|
||||||
type Error
|
|
||||||
func (e Error) Error() string
|
|
||||||
type Errors
|
|
||||||
func (es Errors) Error() string
|
|
||||||
func (es Errors) Errors() []error
|
|
||||||
type ISO3166Entry
|
|
||||||
type Iterator
|
|
||||||
type ParamValidator
|
|
||||||
type ResultIterator
|
|
||||||
type UnsupportedTypeError
|
|
||||||
func (e *UnsupportedTypeError) Error() string
|
|
||||||
type Validator
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Examples
|
|
||||||
###### IsURL
|
|
||||||
```go
|
|
||||||
println(govalidator.IsURL(`http://user@pass:domain.com/path/page`))
|
|
||||||
```
|
|
||||||
###### ToString
|
|
||||||
```go
|
|
||||||
type User struct {
|
|
||||||
FirstName string
|
|
||||||
LastName string
|
|
||||||
}
|
|
||||||
|
|
||||||
str := govalidator.ToString(&User{"John", "Juan"})
|
|
||||||
println(str)
|
|
||||||
```
|
|
||||||
###### Each, Map, Filter, Count for slices
|
|
||||||
Each iterates over the slice/array and calls Iterator for every item
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn govalidator.Iterator = func(value interface{}, index int) {
|
|
||||||
println(value.(int))
|
|
||||||
}
|
|
||||||
govalidator.Each(data, fn)
|
|
||||||
```
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn govalidator.ResultIterator = func(value interface{}, index int) interface{} {
|
|
||||||
return value.(int) * 3
|
|
||||||
}
|
|
||||||
_ = govalidator.Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
|
|
||||||
```
|
|
||||||
```go
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
var fn govalidator.ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
_ = govalidator.Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
|
|
||||||
_ = govalidator.Count(data, fn) // result = 5
|
|
||||||
```
|
|
||||||
###### ValidateStruct [#2](https://github.com/asaskevich/govalidator/pull/2)
|
|
||||||
If you want to validate structs, you can use tag `valid` for any field in your structure. All validators used with this field in one tag are separated by comma. If you want to skip validation, place `-` in your tag. If you need a validator that is not on the list below, you can add it like this:
|
|
||||||
```go
|
|
||||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
|
||||||
return str == "duck"
|
|
||||||
})
|
|
||||||
```
|
|
||||||
For completely custom validators (interface-based), see below.
|
|
||||||
|
|
||||||
Here is a list of available validators for struct fields (validator - used function):
|
|
||||||
```go
|
|
||||||
"email": IsEmail,
|
|
||||||
"url": IsURL,
|
|
||||||
"dialstring": IsDialString,
|
|
||||||
"requrl": IsRequestURL,
|
|
||||||
"requri": IsRequestURI,
|
|
||||||
"alpha": IsAlpha,
|
|
||||||
"utfletter": IsUTFLetter,
|
|
||||||
"alphanum": IsAlphanumeric,
|
|
||||||
"utfletternum": IsUTFLetterNumeric,
|
|
||||||
"numeric": IsNumeric,
|
|
||||||
"utfnumeric": IsUTFNumeric,
|
|
||||||
"utfdigit": IsUTFDigit,
|
|
||||||
"hexadecimal": IsHexadecimal,
|
|
||||||
"hexcolor": IsHexcolor,
|
|
||||||
"rgbcolor": IsRGBcolor,
|
|
||||||
"lowercase": IsLowerCase,
|
|
||||||
"uppercase": IsUpperCase,
|
|
||||||
"int": IsInt,
|
|
||||||
"float": IsFloat,
|
|
||||||
"null": IsNull,
|
|
||||||
"uuid": IsUUID,
|
|
||||||
"uuidv3": IsUUIDv3,
|
|
||||||
"uuidv4": IsUUIDv4,
|
|
||||||
"uuidv5": IsUUIDv5,
|
|
||||||
"creditcard": IsCreditCard,
|
|
||||||
"isbn10": IsISBN10,
|
|
||||||
"isbn13": IsISBN13,
|
|
||||||
"json": IsJSON,
|
|
||||||
"multibyte": IsMultibyte,
|
|
||||||
"ascii": IsASCII,
|
|
||||||
"printableascii": IsPrintableASCII,
|
|
||||||
"fullwidth": IsFullWidth,
|
|
||||||
"halfwidth": IsHalfWidth,
|
|
||||||
"variablewidth": IsVariableWidth,
|
|
||||||
"base64": IsBase64,
|
|
||||||
"datauri": IsDataURI,
|
|
||||||
"ip": IsIP,
|
|
||||||
"port": IsPort,
|
|
||||||
"ipv4": IsIPv4,
|
|
||||||
"ipv6": IsIPv6,
|
|
||||||
"dns": IsDNSName,
|
|
||||||
"host": IsHost,
|
|
||||||
"mac": IsMAC,
|
|
||||||
"latitude": IsLatitude,
|
|
||||||
"longitude": IsLongitude,
|
|
||||||
"ssn": IsSSN,
|
|
||||||
"semver": IsSemver,
|
|
||||||
"rfc3339": IsRFC3339,
|
|
||||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
|
||||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
|
||||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
|
||||||
```
|
|
||||||
Validators with parameters
|
|
||||||
|
|
||||||
```go
|
|
||||||
"range(min|max)": Range,
|
|
||||||
"length(min|max)": ByteLength,
|
|
||||||
"runelength(min|max)": RuneLength,
|
|
||||||
"matches(pattern)": StringMatches,
|
|
||||||
"in(string1|string2|...|stringN)": IsIn,
|
|
||||||
```
|
|
||||||
|
|
||||||
And here is small example of usage:
|
|
||||||
```go
|
|
||||||
type Post struct {
|
|
||||||
Title string `valid:"alphanum,required"`
|
|
||||||
Message string `valid:"duck,ascii"`
|
|
||||||
AuthorIP string `valid:"ipv4"`
|
|
||||||
Date string `valid:"-"`
|
|
||||||
}
|
|
||||||
post := &Post{
|
|
||||||
Title: "My Example Post",
|
|
||||||
Message: "duck",
|
|
||||||
AuthorIP: "123.234.54.3",
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add your own struct validation tags
|
|
||||||
govalidator.TagMap["duck"] = govalidator.Validator(func(str string) bool {
|
|
||||||
return str == "duck"
|
|
||||||
})
|
|
||||||
|
|
||||||
result, err := govalidator.ValidateStruct(post)
|
|
||||||
if err != nil {
|
|
||||||
println("error: " + err.Error())
|
|
||||||
}
|
|
||||||
println(result)
|
|
||||||
```
|
|
||||||
###### WhiteList
|
|
||||||
```go
|
|
||||||
// Remove all characters from string ignoring characters between "a" and "z"
|
|
||||||
println(govalidator.WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
|
|
||||||
```
|
|
||||||
|
|
||||||
###### Custom validation functions
|
|
||||||
Custom validation using your own domain specific validators is also available - here's an example of how to use it:
|
|
||||||
```go
|
|
||||||
import "github.com/asaskevich/govalidator"
|
|
||||||
|
|
||||||
type CustomByteArray [6]byte // custom types are supported and can be validated
|
|
||||||
|
|
||||||
type StructWithCustomByteArray struct {
|
|
||||||
ID CustomByteArray `valid:"customByteArrayValidator,customMinLengthValidator"` // multiple custom validators are possible as well and will be evaluated in sequence
|
|
||||||
Email string `valid:"email"`
|
|
||||||
CustomMinLength int `valid:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator(func(i interface{}, context interface{}) bool {
|
|
||||||
switch v := context.(type) { // you can type switch on the context interface being validated
|
|
||||||
case StructWithCustomByteArray:
|
|
||||||
// you can check and validate against some other field in the context,
|
|
||||||
// return early or not validate against the context at all – your choice
|
|
||||||
case SomeOtherType:
|
|
||||||
// ...
|
|
||||||
default:
|
|
||||||
// expecting some other type? Throw/panic here or continue
|
|
||||||
}
|
|
||||||
|
|
||||||
switch v := i.(type) { // type switch on the struct field being validated
|
|
||||||
case CustomByteArray:
|
|
||||||
for _, e := range v { // this validator checks that the byte array is not empty, i.e. not all zeroes
|
|
||||||
if e != 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}))
|
|
||||||
govalidator.CustomTypeTagMap.Set("customMinLengthValidator", CustomTypeValidator(func(i interface{}, context interface{}) bool {
|
|
||||||
switch v := context.(type) { // this validates a field against the value in another field, i.e. dependent validation
|
|
||||||
case StructWithCustomByteArray:
|
|
||||||
return len(v.ID) >= v.CustomMinLength
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}))
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Notes
|
|
||||||
Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator).
|
|
||||||
Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator).
|
|
||||||
|
|
||||||
#### Support
|
|
||||||
If you do have a contribution to the package, feel free to create a Pull Request or an Issue.
|
|
||||||
|
|
||||||
#### What to contribute
|
|
||||||
If you don't know what to do, there are some features and functions that need to be done
|
|
||||||
|
|
||||||
- [ ] Refactor code
|
|
||||||
- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check
|
|
||||||
- [ ] Create actual list of contributors and projects that currently using this package
|
|
||||||
- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues)
|
|
||||||
- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions)
|
|
||||||
- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new
|
|
||||||
- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc
|
|
||||||
- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224)
|
|
||||||
- [ ] Implement fuzzing testing
|
|
||||||
- [ ] Implement some struct/map/array utilities
|
|
||||||
- [ ] Implement map/array validation
|
|
||||||
- [ ] Implement benchmarking
|
|
||||||
- [ ] Implement batch of examples
|
|
||||||
- [ ] Look at forks for new features and fixes
|
|
||||||
|
|
||||||
#### Advice
|
|
||||||
Feel free to create what you want, but keep in mind when you implement new features:
|
|
||||||
- Code must be clear and readable, names of variables/constants clearly describes what they are doing
|
|
||||||
- Public functions must be documented and described in source file and added to README.md to the list of available functions
|
|
||||||
- There are must be unit-tests for any new functions and improvements
|
|
||||||
|
|
||||||
#### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors)
|
|
||||||
* [Daniel Lohse](https://github.com/annismckenzie)
|
|
||||||
* [Attila Oláh](https://github.com/attilaolah)
|
|
||||||
* [Daniel Korner](https://github.com/Dadie)
|
|
||||||
* [Steven Wilkin](https://github.com/stevenwilkin)
|
|
||||||
* [Deiwin Sarjas](https://github.com/deiwin)
|
|
||||||
* [Noah Shibley](https://github.com/slugmobile)
|
|
||||||
* [Nathan Davies](https://github.com/nathj07)
|
|
||||||
* [Matt Sanford](https://github.com/mzsanford)
|
|
||||||
* [Simon ccl1115](https://github.com/ccl1115)
|
|
58
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
58
vendor/github.com/asaskevich/govalidator/arrays.go
generated
vendored
|
@ -1,58 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
// Iterator is the function that accepts element of slice/array and its index
|
|
||||||
type Iterator func(interface{}, int)
|
|
||||||
|
|
||||||
// ResultIterator is the function that accepts element of slice/array and its index and returns any result
|
|
||||||
type ResultIterator func(interface{}, int) interface{}
|
|
||||||
|
|
||||||
// ConditionIterator is the function that accepts element of slice/array and its index and returns boolean
|
|
||||||
type ConditionIterator func(interface{}, int) bool
|
|
||||||
|
|
||||||
// Each iterates over the slice and apply Iterator to every item
|
|
||||||
func Each(array []interface{}, iterator Iterator) {
|
|
||||||
for index, data := range array {
|
|
||||||
iterator(data, index)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Map iterates over the slice and apply ResultIterator to every item. Returns new slice as a result.
|
|
||||||
func Map(array []interface{}, iterator ResultIterator) []interface{} {
|
|
||||||
var result = make([]interface{}, len(array))
|
|
||||||
for index, data := range array {
|
|
||||||
result[index] = iterator(data, index)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Find iterates over the slice and apply ConditionIterator to every item. Returns first item that meet ConditionIterator or nil otherwise.
|
|
||||||
func Find(array []interface{}, iterator ConditionIterator) interface{} {
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filter iterates over the slice and apply ConditionIterator to every item. Returns new slice.
|
|
||||||
func Filter(array []interface{}, iterator ConditionIterator) []interface{} {
|
|
||||||
var result = make([]interface{}, 0)
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
result = append(result, data)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// Count iterates over the slice and apply ConditionIterator to every item. Returns count of items that meets ConditionIterator.
|
|
||||||
func Count(array []interface{}, iterator ConditionIterator) int {
|
|
||||||
count := 0
|
|
||||||
for index, data := range array {
|
|
||||||
if iterator(data, index) {
|
|
||||||
count = count + 1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return count
|
|
||||||
}
|
|
116
vendor/github.com/asaskevich/govalidator/arrays_test.go
generated
vendored
116
vendor/github.com/asaskevich/govalidator/arrays_test.go
generated
vendored
|
@ -1,116 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
func TestEach(t *testing.T) {
|
|
||||||
// TODO Maybe refactor?
|
|
||||||
t.Parallel()
|
|
||||||
acc := 0
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn Iterator = func(value interface{}, index int) {
|
|
||||||
acc = acc + value.(int)
|
|
||||||
}
|
|
||||||
Each(data, fn)
|
|
||||||
if acc != 15 {
|
|
||||||
t.Errorf("Expected Each(..) to be %v, got %v", 15, acc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleEach() {
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn Iterator = func(value interface{}, index int) {
|
|
||||||
println(value.(int))
|
|
||||||
}
|
|
||||||
Each(data, fn)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMap(t *testing.T) {
|
|
||||||
// TODO Maybe refactor?
|
|
||||||
t.Parallel()
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn ResultIterator = func(value interface{}, index int) interface{} {
|
|
||||||
return value.(int) * 3
|
|
||||||
}
|
|
||||||
result := Map(data, fn)
|
|
||||||
for i, d := range result {
|
|
||||||
if d != fn(data[i], i) {
|
|
||||||
t.Errorf("Expected Map(..) to be %v, got %v", fn(data[i], i), d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleMap() {
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5}
|
|
||||||
var fn ResultIterator = func(value interface{}, index int) interface{} {
|
|
||||||
return value.(int) * 3
|
|
||||||
}
|
|
||||||
_ = Map(data, fn) // result = []interface{}{1, 6, 9, 12, 15}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFind(t *testing.T) {
|
|
||||||
// TODO Maybe refactor?
|
|
||||||
t.Parallel()
|
|
||||||
findElement := 96
|
|
||||||
data := []interface{}{1, 2, 3, 4, findElement, 5}
|
|
||||||
var fn1 ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int) == findElement
|
|
||||||
}
|
|
||||||
var fn2 ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
value, _ = value.(string)
|
|
||||||
return value == "govalidator"
|
|
||||||
}
|
|
||||||
val1 := Find(data, fn1)
|
|
||||||
val2 := Find(data, fn2)
|
|
||||||
if val1 != findElement {
|
|
||||||
t.Errorf("Expected Find(..) to be %v, got %v", findElement, val1)
|
|
||||||
}
|
|
||||||
if val2 != nil {
|
|
||||||
t.Errorf("Expected Find(..) to be %v, got %v", nil, val2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFilter(t *testing.T) {
|
|
||||||
// TODO Maybe refactor?
|
|
||||||
t.Parallel()
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
answer := []interface{}{2, 4, 6, 8, 10}
|
|
||||||
var fn ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
result := Filter(data, fn)
|
|
||||||
for i := range result {
|
|
||||||
if result[i] != answer[i] {
|
|
||||||
t.Errorf("Expected Filter(..) to be %v, got %v", answer[i], result[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleFilter() {
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
var fn ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
_ = Filter(data, fn) // result = []interface{}{2, 4, 6, 8, 10}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCount(t *testing.T) {
|
|
||||||
// TODO Maybe refactor?
|
|
||||||
t.Parallel()
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
count := 5
|
|
||||||
var fn ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
result := Count(data, fn)
|
|
||||||
if result != count {
|
|
||||||
t.Errorf("Expected Count(..) to be %v, got %v", count, result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExampleCount() {
|
|
||||||
data := []interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}
|
|
||||||
var fn ConditionIterator = func(value interface{}, index int) bool {
|
|
||||||
return value.(int)%2 == 0
|
|
||||||
}
|
|
||||||
_ = Count(data, fn) // result = 5
|
|
||||||
}
|
|
45
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
45
vendor/github.com/asaskevich/govalidator/converter.go
generated
vendored
|
@ -1,45 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"strconv"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ToString convert the input to a string.
|
|
||||||
func ToString(obj interface{}) string {
|
|
||||||
res := fmt.Sprintf("%v", obj)
|
|
||||||
return string(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToJSON convert the input to a valid JSON string
|
|
||||||
func ToJSON(obj interface{}) (string, error) {
|
|
||||||
res, err := json.Marshal(obj)
|
|
||||||
if err != nil {
|
|
||||||
res = []byte("")
|
|
||||||
}
|
|
||||||
return string(res), err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToFloat convert the input string to a float, or 0.0 if the input is not a float.
|
|
||||||
func ToFloat(str string) (float64, error) {
|
|
||||||
res, err := strconv.ParseFloat(str, 64)
|
|
||||||
if err != nil {
|
|
||||||
res = 0.0
|
|
||||||
}
|
|
||||||
return res, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToInt convert the input string to an integer, or 0 if the input is not an integer.
|
|
||||||
func ToInt(str string) (int64, error) {
|
|
||||||
res, err := strconv.ParseInt(str, 0, 64)
|
|
||||||
if err != nil {
|
|
||||||
res = 0
|
|
||||||
}
|
|
||||||
return res, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ToBoolean convert the input string to a boolean.
|
|
||||||
func ToBoolean(str string) (bool, error) {
|
|
||||||
return strconv.ParseBool(str)
|
|
||||||
}
|
|
78
vendor/github.com/asaskevich/govalidator/converter_test.go
generated
vendored
78
vendor/github.com/asaskevich/govalidator/converter_test.go
generated
vendored
|
@ -1,78 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestToInt(t *testing.T) {
|
|
||||||
tests := []string{"1000", "-123", "abcdef", "100000000000000000000000000000000000000000000"}
|
|
||||||
expected := []int64{1000, -123, 0, 0}
|
|
||||||
for i := 0; i < len(tests); i++ {
|
|
||||||
result, _ := ToInt(tests[i])
|
|
||||||
if result != expected[i] {
|
|
||||||
t.Log("Case ", i, ": expected ", expected[i], " when result is ", result)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToBoolean(t *testing.T) {
|
|
||||||
tests := []string{"true", "1", "True", "false", "0", "abcdef"}
|
|
||||||
expected := []bool{true, true, true, false, false, false}
|
|
||||||
for i := 0; i < len(tests); i++ {
|
|
||||||
res, _ := ToBoolean(tests[i])
|
|
||||||
if res != expected[i] {
|
|
||||||
t.Log("Case ", i, ": expected ", expected[i], " when result is ", res)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func toString(t *testing.T, test interface{}, expected string) {
|
|
||||||
res := ToString(test)
|
|
||||||
if res != expected {
|
|
||||||
t.Log("Case ToString: expected ", expected, " when result is ", res)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToString(t *testing.T) {
|
|
||||||
toString(t, "str123", "str123")
|
|
||||||
toString(t, 123, "123")
|
|
||||||
toString(t, 12.3, "12.3")
|
|
||||||
toString(t, true, "true")
|
|
||||||
toString(t, 1.5+10i, "(1.5+10i)")
|
|
||||||
// Sprintf function not guarantee that maps with equal keys always will be equal in string representation
|
|
||||||
//toString(t, struct{ Keys map[int]int }{Keys: map[int]int{1: 2, 3: 4}}, "{map[1:2 3:4]}")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToFloat(t *testing.T) {
|
|
||||||
tests := []string{"", "123", "-.01", "10.", "string", "1.23e3", ".23e10"}
|
|
||||||
expected := []float64{0, 123, -0.01, 10.0, 0, 1230, 0.23e10}
|
|
||||||
for i := 0; i < len(tests); i++ {
|
|
||||||
res, _ := ToFloat(tests[i])
|
|
||||||
if res != expected[i] {
|
|
||||||
t.Log("Case ", i, ": expected ", expected[i], " when result is ", res)
|
|
||||||
t.FailNow()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestToJSON(t *testing.T) {
|
|
||||||
tests := []interface{}{"test", map[string]string{"a": "b", "b": "c"}, func() error { return fmt.Errorf("Error") }}
|
|
||||||
expected := [][]string{
|
|
||||||
{"\"test\"", "<nil>"},
|
|
||||||
{"{\"a\":\"b\",\"b\":\"c\"}", "<nil>"},
|
|
||||||
{"", "json: unsupported type: func() error"},
|
|
||||||
}
|
|
||||||
for i, test := range tests {
|
|
||||||
actual, err := ToJSON(test)
|
|
||||||
if actual != expected[i][0] {
|
|
||||||
t.Errorf("Expected toJSON(%v) to return '%v', got '%v'", test, expected[i][0], actual)
|
|
||||||
}
|
|
||||||
if fmt.Sprintf("%v", err) != expected[i][1] {
|
|
||||||
t.Errorf("Expected error returned from toJSON(%v) to return '%v', got '%v'", test, expected[i][1], fmt.Sprintf("%v", err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
36
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
36
vendor/github.com/asaskevich/govalidator/error.go
generated
vendored
|
@ -1,36 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import "strings"
|
|
||||||
|
|
||||||
// Errors is an array of multiple errors and conforms to the error interface.
|
|
||||||
type Errors []error
|
|
||||||
|
|
||||||
// Errors returns itself.
|
|
||||||
func (es Errors) Errors() []error {
|
|
||||||
return es
|
|
||||||
}
|
|
||||||
|
|
||||||
func (es Errors) Error() string {
|
|
||||||
var errs []string
|
|
||||||
for _, e := range es {
|
|
||||||
errs = append(errs, e.Error())
|
|
||||||
}
|
|
||||||
return strings.Join(errs, ";")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Error encapsulates a name, an error and whether there's a custom error message or not.
|
|
||||||
type Error struct {
|
|
||||||
Name string
|
|
||||||
Err error
|
|
||||||
CustomErrorMessageExists bool
|
|
||||||
|
|
||||||
// Validator indicates the name of the validator that failed
|
|
||||||
Validator string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e Error) Error() string {
|
|
||||||
if e.CustomErrorMessageExists {
|
|
||||||
return e.Err.Error()
|
|
||||||
}
|
|
||||||
return e.Name + ": " + e.Err.Error()
|
|
||||||
}
|
|
29
vendor/github.com/asaskevich/govalidator/error_test.go
generated
vendored
29
vendor/github.com/asaskevich/govalidator/error_test.go
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestErrorsToString(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
customErr := &Error{Name: "Custom Error Name", Err: fmt.Errorf("stdlib error")}
|
|
||||||
customErrWithCustomErrorMessage := &Error{Name: "Custom Error Name 2", Err: fmt.Errorf("Bad stuff happened"), CustomErrorMessageExists: true}
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 Errors
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{Errors{}, ""},
|
|
||||||
{Errors{fmt.Errorf("Error 1")}, "Error 1"},
|
|
||||||
{Errors{fmt.Errorf("Error 1"), fmt.Errorf("Error 2")}, "Error 1;Error 2"},
|
|
||||||
{Errors{customErr, fmt.Errorf("Error 2")}, "Custom Error Name: stdlib error;Error 2"},
|
|
||||||
{Errors{fmt.Errorf("Error 123"), customErrWithCustomErrorMessage}, "Error 123;Bad stuff happened"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := test.param1.Error()
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Error() to return '%v', got '%v'", test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
94
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
94
vendor/github.com/asaskevich/govalidator/numerics.go
generated
vendored
|
@ -1,94 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math"
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Abs returns absolute value of number
|
|
||||||
func Abs(value float64) float64 {
|
|
||||||
return math.Abs(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise
|
|
||||||
func Sign(value float64) float64 {
|
|
||||||
if value > 0 {
|
|
||||||
return 1
|
|
||||||
} else if value < 0 {
|
|
||||||
return -1
|
|
||||||
} else {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNegative returns true if value < 0
|
|
||||||
func IsNegative(value float64) bool {
|
|
||||||
return value < 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsPositive returns true if value > 0
|
|
||||||
func IsPositive(value float64) bool {
|
|
||||||
return value > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNonNegative returns true if value >= 0
|
|
||||||
func IsNonNegative(value float64) bool {
|
|
||||||
return value >= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNonPositive returns true if value <= 0
|
|
||||||
func IsNonPositive(value float64) bool {
|
|
||||||
return value <= 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeInt(value, left, right int) bool {
|
|
||||||
if left > right {
|
|
||||||
left, right = right, left
|
|
||||||
}
|
|
||||||
return value >= left && value <= right
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeFloat32(value, left, right float32) bool {
|
|
||||||
if left > right {
|
|
||||||
left, right = right, left
|
|
||||||
}
|
|
||||||
return value >= left && value <= right
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border
|
|
||||||
func InRangeFloat64(value, left, right float64) bool {
|
|
||||||
if left > right {
|
|
||||||
left, right = right, left
|
|
||||||
}
|
|
||||||
return value >= left && value <= right
|
|
||||||
}
|
|
||||||
|
|
||||||
// InRange returns true if value lies between left and right border, generic type to handle int, float32 or float64, all types must the same type
|
|
||||||
func InRange(value interface{}, left interface{}, right interface{}) bool {
|
|
||||||
|
|
||||||
reflectValue := reflect.TypeOf(value).Kind()
|
|
||||||
reflectLeft := reflect.TypeOf(left).Kind()
|
|
||||||
reflectRight := reflect.TypeOf(right).Kind()
|
|
||||||
|
|
||||||
if reflectValue == reflect.Int && reflectLeft == reflect.Int && reflectRight == reflect.Int {
|
|
||||||
return InRangeInt(value.(int), left.(int), right.(int))
|
|
||||||
} else if reflectValue == reflect.Float32 && reflectLeft == reflect.Float32 && reflectRight == reflect.Float32 {
|
|
||||||
return InRangeFloat32(value.(float32), left.(float32), right.(float32))
|
|
||||||
} else if reflectValue == reflect.Float64 && reflectLeft == reflect.Float64 && reflectRight == reflect.Float64 {
|
|
||||||
return InRangeFloat64(value.(float64), left.(float64), right.(float64))
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsWhole returns true if value is whole number
|
|
||||||
func IsWhole(value float64) bool {
|
|
||||||
return math.Remainder(value, 1) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsNatural returns true if value is natural number (positive and whole)
|
|
||||||
func IsNatural(value float64) bool {
|
|
||||||
return IsWhole(value) && IsPositive(value)
|
|
||||||
}
|
|
349
vendor/github.com/asaskevich/govalidator/numerics_test.go
generated
vendored
349
vendor/github.com/asaskevich/govalidator/numerics_test.go
generated
vendored
|
@ -1,349 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
func TestAbs(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected float64
|
|
||||||
}{
|
|
||||||
{0, 0},
|
|
||||||
{-1, 1},
|
|
||||||
{10, 10},
|
|
||||||
{3.14, 3.14},
|
|
||||||
{-96, 96},
|
|
||||||
{-10e-12, 10e-12},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Abs(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Abs(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSign(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected float64
|
|
||||||
}{
|
|
||||||
{0, 0},
|
|
||||||
{-1, -1},
|
|
||||||
{10, 1},
|
|
||||||
{3.14, 1},
|
|
||||||
{-96, -1},
|
|
||||||
{-10e-12, -1},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Sign(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Sign(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsNegative(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, false},
|
|
||||||
{-1, true},
|
|
||||||
{10, false},
|
|
||||||
{3.14, false},
|
|
||||||
{-96, true},
|
|
||||||
{-10e-12, true},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsNegative(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsNegative(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsNonNegative(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, true},
|
|
||||||
{-1, false},
|
|
||||||
{10, true},
|
|
||||||
{3.14, true},
|
|
||||||
{-96, false},
|
|
||||||
{-10e-12, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsNonNegative(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsNonNegative(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsPositive(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, false},
|
|
||||||
{-1, false},
|
|
||||||
{10, true},
|
|
||||||
{3.14, true},
|
|
||||||
{-96, false},
|
|
||||||
{-10e-12, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsPositive(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsPositive(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsNonPositive(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, true},
|
|
||||||
{-1, true},
|
|
||||||
{10, false},
|
|
||||||
{3.14, false},
|
|
||||||
{-96, true},
|
|
||||||
{-10e-12, true},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsNonPositive(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsNonPositive(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsWhole(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, true},
|
|
||||||
{-1, true},
|
|
||||||
{10, true},
|
|
||||||
{3.14, false},
|
|
||||||
{-96, true},
|
|
||||||
{-10e-12, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsWhole(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsWhole(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIsNatural(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, false},
|
|
||||||
{-1, false},
|
|
||||||
{10, true},
|
|
||||||
{3.14, false},
|
|
||||||
{96, true},
|
|
||||||
{-10e-12, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := IsNatural(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected IsNatural(%v) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInRangeInt(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param int
|
|
||||||
left int
|
|
||||||
right int
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := InRangeInt(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRangeInt(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInRangeFloat32(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float32
|
|
||||||
left float32
|
|
||||||
right float32
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := InRangeFloat32(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRangeFloat32(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInRangeFloat64(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param float64
|
|
||||||
left float64
|
|
||||||
right float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := InRangeFloat64(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRangeFloat64(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInRange(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var testsInt = []struct {
|
|
||||||
param int
|
|
||||||
left int
|
|
||||||
right int
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range testsInt {
|
|
||||||
actual := InRange(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRange(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var testsFloat32 = []struct {
|
|
||||||
param float32
|
|
||||||
left float32
|
|
||||||
right float32
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range testsFloat32 {
|
|
||||||
actual := InRange(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRange(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var testsFloat64 = []struct {
|
|
||||||
param float64
|
|
||||||
left float64
|
|
||||||
right float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, true},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, true},
|
|
||||||
{0, 0, 1, true},
|
|
||||||
{0, -1, 0, true},
|
|
||||||
{0, 0, -1, true},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range testsFloat64 {
|
|
||||||
actual := InRange(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRange(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var testsTypeMix = []struct {
|
|
||||||
param int
|
|
||||||
left float64
|
|
||||||
right float64
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{0, 0, 0, false},
|
|
||||||
{1, 0, 0, false},
|
|
||||||
{-1, 0, 0, false},
|
|
||||||
{0, -1, 1, false},
|
|
||||||
{0, 0, 1, false},
|
|
||||||
{0, -1, 0, false},
|
|
||||||
{0, 0, -1, false},
|
|
||||||
{0, 10, 5, false},
|
|
||||||
}
|
|
||||||
for _, test := range testsTypeMix {
|
|
||||||
actual := InRange(test.param, test.left, test.right)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected InRange(%v, %v, %v) to be %v, got %v", test.param, test.left, test.right, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
90
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
90
vendor/github.com/asaskevich/govalidator/patterns.go
generated
vendored
|
@ -1,90 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import "regexp"
|
|
||||||
|
|
||||||
// Basic regular expressions for validating strings
|
|
||||||
const (
|
|
||||||
Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$"
|
|
||||||
CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$"
|
|
||||||
ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$"
|
|
||||||
ISBN13 string = "^(?:[0-9]{13})$"
|
|
||||||
UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
|
||||||
UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
|
||||||
UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$"
|
|
||||||
UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$"
|
|
||||||
Alpha string = "^[a-zA-Z]+$"
|
|
||||||
Alphanumeric string = "^[a-zA-Z0-9]+$"
|
|
||||||
Numeric string = "^[0-9]+$"
|
|
||||||
Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$"
|
|
||||||
Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$"
|
|
||||||
Hexadecimal string = "^[0-9a-fA-F]+$"
|
|
||||||
Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$"
|
|
||||||
RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$"
|
|
||||||
ASCII string = "^[\x00-\x7F]+$"
|
|
||||||
Multibyte string = "[^\x00-\x7F]"
|
|
||||||
FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
|
||||||
HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]"
|
|
||||||
Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$"
|
|
||||||
PrintableASCII string = "^[\x20-\x7E]+$"
|
|
||||||
DataURI string = "^data:.+\\/(.+);base64$"
|
|
||||||
Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$"
|
|
||||||
Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$"
|
|
||||||
DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$`
|
|
||||||
IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))`
|
|
||||||
URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)`
|
|
||||||
URLUsername string = `(\S+(:\S*)?@)`
|
|
||||||
URLPath string = `((\/|\?|#)[^\s]*)`
|
|
||||||
URLPort string = `(:(\d{1,5}))`
|
|
||||||
URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))`
|
|
||||||
URLSubdomain string = `((www\.)|([a-zA-Z0-9]([-\.][-\._a-zA-Z0-9]+)*))`
|
|
||||||
URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$`
|
|
||||||
SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$`
|
|
||||||
WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$`
|
|
||||||
UnixPath string = `^(/[^/\x00]*)+/?$`
|
|
||||||
Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$"
|
|
||||||
tagName string = "valid"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Used by IsFilePath func
|
|
||||||
const (
|
|
||||||
// Unknown is unresolved OS type
|
|
||||||
Unknown = iota
|
|
||||||
// Win is Windows type
|
|
||||||
Win
|
|
||||||
// Unix is *nix OS types
|
|
||||||
Unix
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
rxEmail = regexp.MustCompile(Email)
|
|
||||||
rxCreditCard = regexp.MustCompile(CreditCard)
|
|
||||||
rxISBN10 = regexp.MustCompile(ISBN10)
|
|
||||||
rxISBN13 = regexp.MustCompile(ISBN13)
|
|
||||||
rxUUID3 = regexp.MustCompile(UUID3)
|
|
||||||
rxUUID4 = regexp.MustCompile(UUID4)
|
|
||||||
rxUUID5 = regexp.MustCompile(UUID5)
|
|
||||||
rxUUID = regexp.MustCompile(UUID)
|
|
||||||
rxAlpha = regexp.MustCompile(Alpha)
|
|
||||||
rxAlphanumeric = regexp.MustCompile(Alphanumeric)
|
|
||||||
rxNumeric = regexp.MustCompile(Numeric)
|
|
||||||
rxInt = regexp.MustCompile(Int)
|
|
||||||
rxFloat = regexp.MustCompile(Float)
|
|
||||||
rxHexadecimal = regexp.MustCompile(Hexadecimal)
|
|
||||||
rxHexcolor = regexp.MustCompile(Hexcolor)
|
|
||||||
rxRGBcolor = regexp.MustCompile(RGBcolor)
|
|
||||||
rxASCII = regexp.MustCompile(ASCII)
|
|
||||||
rxPrintableASCII = regexp.MustCompile(PrintableASCII)
|
|
||||||
rxMultibyte = regexp.MustCompile(Multibyte)
|
|
||||||
rxFullWidth = regexp.MustCompile(FullWidth)
|
|
||||||
rxHalfWidth = regexp.MustCompile(HalfWidth)
|
|
||||||
rxBase64 = regexp.MustCompile(Base64)
|
|
||||||
rxDataURI = regexp.MustCompile(DataURI)
|
|
||||||
rxLatitude = regexp.MustCompile(Latitude)
|
|
||||||
rxLongitude = regexp.MustCompile(Longitude)
|
|
||||||
rxDNSName = regexp.MustCompile(DNSName)
|
|
||||||
rxURL = regexp.MustCompile(URL)
|
|
||||||
rxSSN = regexp.MustCompile(SSN)
|
|
||||||
rxWinPath = regexp.MustCompile(WinPath)
|
|
||||||
rxUnixPath = regexp.MustCompile(UnixPath)
|
|
||||||
rxSemver = regexp.MustCompile(Semver)
|
|
||||||
)
|
|
616
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
616
vendor/github.com/asaskevich/govalidator/types.go
generated
vendored
|
@ -1,616 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Validator is a wrapper for a validator function that returns bool and accepts string.
|
|
||||||
type Validator func(str string) bool
|
|
||||||
|
|
||||||
// CustomTypeValidator is a wrapper for validator functions that returns bool and accepts any type.
|
|
||||||
// The second parameter should be the context (in the case of validating a struct: the whole object being validated).
|
|
||||||
type CustomTypeValidator func(i interface{}, o interface{}) bool
|
|
||||||
|
|
||||||
// ParamValidator is a wrapper for validator functions that accepts additional parameters.
|
|
||||||
type ParamValidator func(str string, params ...string) bool
|
|
||||||
type tagOptionsMap map[string]string
|
|
||||||
|
|
||||||
// UnsupportedTypeError is a wrapper for reflect.Type
|
|
||||||
type UnsupportedTypeError struct {
|
|
||||||
Type reflect.Type
|
|
||||||
}
|
|
||||||
|
|
||||||
// stringValues is a slice of reflect.Value holding *reflect.StringValue.
|
|
||||||
// It implements the methods to sort by string.
|
|
||||||
type stringValues []reflect.Value
|
|
||||||
|
|
||||||
// ParamTagMap is a map of functions accept variants parameters
|
|
||||||
var ParamTagMap = map[string]ParamValidator{
|
|
||||||
"length": ByteLength,
|
|
||||||
"range": Range,
|
|
||||||
"runelength": RuneLength,
|
|
||||||
"stringlength": StringLength,
|
|
||||||
"matches": StringMatches,
|
|
||||||
"in": isInRaw,
|
|
||||||
"rsapub": IsRsaPub,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParamTagRegexMap maps param tags to their respective regexes.
|
|
||||||
var ParamTagRegexMap = map[string]*regexp.Regexp{
|
|
||||||
"range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"),
|
|
||||||
"in": regexp.MustCompile(`^in\((.*)\)`),
|
|
||||||
"matches": regexp.MustCompile(`^matches\((.+)\)$`),
|
|
||||||
"rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"),
|
|
||||||
}
|
|
||||||
|
|
||||||
type customTypeTagMap struct {
|
|
||||||
validators map[string]CustomTypeValidator
|
|
||||||
|
|
||||||
sync.RWMutex
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tm *customTypeTagMap) Get(name string) (CustomTypeValidator, bool) {
|
|
||||||
tm.RLock()
|
|
||||||
defer tm.RUnlock()
|
|
||||||
v, ok := tm.validators[name]
|
|
||||||
return v, ok
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tm *customTypeTagMap) Set(name string, ctv CustomTypeValidator) {
|
|
||||||
tm.Lock()
|
|
||||||
defer tm.Unlock()
|
|
||||||
tm.validators[name] = ctv
|
|
||||||
}
|
|
||||||
|
|
||||||
// CustomTypeTagMap is a map of functions that can be used as tags for ValidateStruct function.
|
|
||||||
// Use this to validate compound or custom types that need to be handled as a whole, e.g.
|
|
||||||
// `type UUID [16]byte` (this would be handled as an array of bytes).
|
|
||||||
var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeValidator)}
|
|
||||||
|
|
||||||
// TagMap is a map of functions, that can be used as tags for ValidateStruct function.
|
|
||||||
var TagMap = map[string]Validator{
|
|
||||||
"email": IsEmail,
|
|
||||||
"url": IsURL,
|
|
||||||
"dialstring": IsDialString,
|
|
||||||
"requrl": IsRequestURL,
|
|
||||||
"requri": IsRequestURI,
|
|
||||||
"alpha": IsAlpha,
|
|
||||||
"utfletter": IsUTFLetter,
|
|
||||||
"alphanum": IsAlphanumeric,
|
|
||||||
"utfletternum": IsUTFLetterNumeric,
|
|
||||||
"numeric": IsNumeric,
|
|
||||||
"utfnumeric": IsUTFNumeric,
|
|
||||||
"utfdigit": IsUTFDigit,
|
|
||||||
"hexadecimal": IsHexadecimal,
|
|
||||||
"hexcolor": IsHexcolor,
|
|
||||||
"rgbcolor": IsRGBcolor,
|
|
||||||
"lowercase": IsLowerCase,
|
|
||||||
"uppercase": IsUpperCase,
|
|
||||||
"int": IsInt,
|
|
||||||
"float": IsFloat,
|
|
||||||
"null": IsNull,
|
|
||||||
"uuid": IsUUID,
|
|
||||||
"uuidv3": IsUUIDv3,
|
|
||||||
"uuidv4": IsUUIDv4,
|
|
||||||
"uuidv5": IsUUIDv5,
|
|
||||||
"creditcard": IsCreditCard,
|
|
||||||
"isbn10": IsISBN10,
|
|
||||||
"isbn13": IsISBN13,
|
|
||||||
"json": IsJSON,
|
|
||||||
"multibyte": IsMultibyte,
|
|
||||||
"ascii": IsASCII,
|
|
||||||
"printableascii": IsPrintableASCII,
|
|
||||||
"fullwidth": IsFullWidth,
|
|
||||||
"halfwidth": IsHalfWidth,
|
|
||||||
"variablewidth": IsVariableWidth,
|
|
||||||
"base64": IsBase64,
|
|
||||||
"datauri": IsDataURI,
|
|
||||||
"ip": IsIP,
|
|
||||||
"port": IsPort,
|
|
||||||
"ipv4": IsIPv4,
|
|
||||||
"ipv6": IsIPv6,
|
|
||||||
"dns": IsDNSName,
|
|
||||||
"host": IsHost,
|
|
||||||
"mac": IsMAC,
|
|
||||||
"latitude": IsLatitude,
|
|
||||||
"longitude": IsLongitude,
|
|
||||||
"ssn": IsSSN,
|
|
||||||
"semver": IsSemver,
|
|
||||||
"rfc3339": IsRFC3339,
|
|
||||||
"rfc3339WithoutZone": IsRFC3339WithoutZone,
|
|
||||||
"ISO3166Alpha2": IsISO3166Alpha2,
|
|
||||||
"ISO3166Alpha3": IsISO3166Alpha3,
|
|
||||||
"ISO4217": IsISO4217,
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO3166Entry stores country codes
|
|
||||||
type ISO3166Entry struct {
|
|
||||||
EnglishShortName string
|
|
||||||
FrenchShortName string
|
|
||||||
Alpha2Code string
|
|
||||||
Alpha3Code string
|
|
||||||
Numeric string
|
|
||||||
}
|
|
||||||
|
|
||||||
//ISO3166List based on https://www.iso.org/obp/ui/#search/code/ Code Type "Officially Assigned Codes"
|
|
||||||
var ISO3166List = []ISO3166Entry{
|
|
||||||
{"Afghanistan", "Afghanistan (l')", "AF", "AFG", "004"},
|
|
||||||
{"Albania", "Albanie (l')", "AL", "ALB", "008"},
|
|
||||||
{"Antarctica", "Antarctique (l')", "AQ", "ATA", "010"},
|
|
||||||
{"Algeria", "Algérie (l')", "DZ", "DZA", "012"},
|
|
||||||
{"American Samoa", "Samoa américaines (les)", "AS", "ASM", "016"},
|
|
||||||
{"Andorra", "Andorre (l')", "AD", "AND", "020"},
|
|
||||||
{"Angola", "Angola (l')", "AO", "AGO", "024"},
|
|
||||||
{"Antigua and Barbuda", "Antigua-et-Barbuda", "AG", "ATG", "028"},
|
|
||||||
{"Azerbaijan", "Azerbaïdjan (l')", "AZ", "AZE", "031"},
|
|
||||||
{"Argentina", "Argentine (l')", "AR", "ARG", "032"},
|
|
||||||
{"Australia", "Australie (l')", "AU", "AUS", "036"},
|
|
||||||
{"Austria", "Autriche (l')", "AT", "AUT", "040"},
|
|
||||||
{"Bahamas (the)", "Bahamas (les)", "BS", "BHS", "044"},
|
|
||||||
{"Bahrain", "Bahreïn", "BH", "BHR", "048"},
|
|
||||||
{"Bangladesh", "Bangladesh (le)", "BD", "BGD", "050"},
|
|
||||||
{"Armenia", "Arménie (l')", "AM", "ARM", "051"},
|
|
||||||
{"Barbados", "Barbade (la)", "BB", "BRB", "052"},
|
|
||||||
{"Belgium", "Belgique (la)", "BE", "BEL", "056"},
|
|
||||||
{"Bermuda", "Bermudes (les)", "BM", "BMU", "060"},
|
|
||||||
{"Bhutan", "Bhoutan (le)", "BT", "BTN", "064"},
|
|
||||||
{"Bolivia (Plurinational State of)", "Bolivie (État plurinational de)", "BO", "BOL", "068"},
|
|
||||||
{"Bosnia and Herzegovina", "Bosnie-Herzégovine (la)", "BA", "BIH", "070"},
|
|
||||||
{"Botswana", "Botswana (le)", "BW", "BWA", "072"},
|
|
||||||
{"Bouvet Island", "Bouvet (l'Île)", "BV", "BVT", "074"},
|
|
||||||
{"Brazil", "Brésil (le)", "BR", "BRA", "076"},
|
|
||||||
{"Belize", "Belize (le)", "BZ", "BLZ", "084"},
|
|
||||||
{"British Indian Ocean Territory (the)", "Indien (le Territoire britannique de l'océan)", "IO", "IOT", "086"},
|
|
||||||
{"Solomon Islands", "Salomon (Îles)", "SB", "SLB", "090"},
|
|
||||||
{"Virgin Islands (British)", "Vierges britanniques (les Îles)", "VG", "VGB", "092"},
|
|
||||||
{"Brunei Darussalam", "Brunéi Darussalam (le)", "BN", "BRN", "096"},
|
|
||||||
{"Bulgaria", "Bulgarie (la)", "BG", "BGR", "100"},
|
|
||||||
{"Myanmar", "Myanmar (le)", "MM", "MMR", "104"},
|
|
||||||
{"Burundi", "Burundi (le)", "BI", "BDI", "108"},
|
|
||||||
{"Belarus", "Bélarus (le)", "BY", "BLR", "112"},
|
|
||||||
{"Cambodia", "Cambodge (le)", "KH", "KHM", "116"},
|
|
||||||
{"Cameroon", "Cameroun (le)", "CM", "CMR", "120"},
|
|
||||||
{"Canada", "Canada (le)", "CA", "CAN", "124"},
|
|
||||||
{"Cabo Verde", "Cabo Verde", "CV", "CPV", "132"},
|
|
||||||
{"Cayman Islands (the)", "Caïmans (les Îles)", "KY", "CYM", "136"},
|
|
||||||
{"Central African Republic (the)", "République centrafricaine (la)", "CF", "CAF", "140"},
|
|
||||||
{"Sri Lanka", "Sri Lanka", "LK", "LKA", "144"},
|
|
||||||
{"Chad", "Tchad (le)", "TD", "TCD", "148"},
|
|
||||||
{"Chile", "Chili (le)", "CL", "CHL", "152"},
|
|
||||||
{"China", "Chine (la)", "CN", "CHN", "156"},
|
|
||||||
{"Taiwan (Province of China)", "Taïwan (Province de Chine)", "TW", "TWN", "158"},
|
|
||||||
{"Christmas Island", "Christmas (l'Île)", "CX", "CXR", "162"},
|
|
||||||
{"Cocos (Keeling) Islands (the)", "Cocos (les Îles)/ Keeling (les Îles)", "CC", "CCK", "166"},
|
|
||||||
{"Colombia", "Colombie (la)", "CO", "COL", "170"},
|
|
||||||
{"Comoros (the)", "Comores (les)", "KM", "COM", "174"},
|
|
||||||
{"Mayotte", "Mayotte", "YT", "MYT", "175"},
|
|
||||||
{"Congo (the)", "Congo (le)", "CG", "COG", "178"},
|
|
||||||
{"Congo (the Democratic Republic of the)", "Congo (la République démocratique du)", "CD", "COD", "180"},
|
|
||||||
{"Cook Islands (the)", "Cook (les Îles)", "CK", "COK", "184"},
|
|
||||||
{"Costa Rica", "Costa Rica (le)", "CR", "CRI", "188"},
|
|
||||||
{"Croatia", "Croatie (la)", "HR", "HRV", "191"},
|
|
||||||
{"Cuba", "Cuba", "CU", "CUB", "192"},
|
|
||||||
{"Cyprus", "Chypre", "CY", "CYP", "196"},
|
|
||||||
{"Czech Republic (the)", "tchèque (la République)", "CZ", "CZE", "203"},
|
|
||||||
{"Benin", "Bénin (le)", "BJ", "BEN", "204"},
|
|
||||||
{"Denmark", "Danemark (le)", "DK", "DNK", "208"},
|
|
||||||
{"Dominica", "Dominique (la)", "DM", "DMA", "212"},
|
|
||||||
{"Dominican Republic (the)", "dominicaine (la République)", "DO", "DOM", "214"},
|
|
||||||
{"Ecuador", "Équateur (l')", "EC", "ECU", "218"},
|
|
||||||
{"El Salvador", "El Salvador", "SV", "SLV", "222"},
|
|
||||||
{"Equatorial Guinea", "Guinée équatoriale (la)", "GQ", "GNQ", "226"},
|
|
||||||
{"Ethiopia", "Éthiopie (l')", "ET", "ETH", "231"},
|
|
||||||
{"Eritrea", "Érythrée (l')", "ER", "ERI", "232"},
|
|
||||||
{"Estonia", "Estonie (l')", "EE", "EST", "233"},
|
|
||||||
{"Faroe Islands (the)", "Féroé (les Îles)", "FO", "FRO", "234"},
|
|
||||||
{"Falkland Islands (the) [Malvinas]", "Falkland (les Îles)/Malouines (les Îles)", "FK", "FLK", "238"},
|
|
||||||
{"South Georgia and the South Sandwich Islands", "Géorgie du Sud-et-les Îles Sandwich du Sud (la)", "GS", "SGS", "239"},
|
|
||||||
{"Fiji", "Fidji (les)", "FJ", "FJI", "242"},
|
|
||||||
{"Finland", "Finlande (la)", "FI", "FIN", "246"},
|
|
||||||
{"Åland Islands", "Åland(les Îles)", "AX", "ALA", "248"},
|
|
||||||
{"France", "France (la)", "FR", "FRA", "250"},
|
|
||||||
{"French Guiana", "Guyane française (la )", "GF", "GUF", "254"},
|
|
||||||
{"French Polynesia", "Polynésie française (la)", "PF", "PYF", "258"},
|
|
||||||
{"French Southern Territories (the)", "Terres australes françaises (les)", "TF", "ATF", "260"},
|
|
||||||
{"Djibouti", "Djibouti", "DJ", "DJI", "262"},
|
|
||||||
{"Gabon", "Gabon (le)", "GA", "GAB", "266"},
|
|
||||||
{"Georgia", "Géorgie (la)", "GE", "GEO", "268"},
|
|
||||||
{"Gambia (the)", "Gambie (la)", "GM", "GMB", "270"},
|
|
||||||
{"Palestine, State of", "Palestine, État de", "PS", "PSE", "275"},
|
|
||||||
{"Germany", "Allemagne (l')", "DE", "DEU", "276"},
|
|
||||||
{"Ghana", "Ghana (le)", "GH", "GHA", "288"},
|
|
||||||
{"Gibraltar", "Gibraltar", "GI", "GIB", "292"},
|
|
||||||
{"Kiribati", "Kiribati", "KI", "KIR", "296"},
|
|
||||||
{"Greece", "Grèce (la)", "GR", "GRC", "300"},
|
|
||||||
{"Greenland", "Groenland (le)", "GL", "GRL", "304"},
|
|
||||||
{"Grenada", "Grenade (la)", "GD", "GRD", "308"},
|
|
||||||
{"Guadeloupe", "Guadeloupe (la)", "GP", "GLP", "312"},
|
|
||||||
{"Guam", "Guam", "GU", "GUM", "316"},
|
|
||||||
{"Guatemala", "Guatemala (le)", "GT", "GTM", "320"},
|
|
||||||
{"Guinea", "Guinée (la)", "GN", "GIN", "324"},
|
|
||||||
{"Guyana", "Guyana (le)", "GY", "GUY", "328"},
|
|
||||||
{"Haiti", "Haïti", "HT", "HTI", "332"},
|
|
||||||
{"Heard Island and McDonald Islands", "Heard-et-Îles MacDonald (l'Île)", "HM", "HMD", "334"},
|
|
||||||
{"Holy See (the)", "Saint-Siège (le)", "VA", "VAT", "336"},
|
|
||||||
{"Honduras", "Honduras (le)", "HN", "HND", "340"},
|
|
||||||
{"Hong Kong", "Hong Kong", "HK", "HKG", "344"},
|
|
||||||
{"Hungary", "Hongrie (la)", "HU", "HUN", "348"},
|
|
||||||
{"Iceland", "Islande (l')", "IS", "ISL", "352"},
|
|
||||||
{"India", "Inde (l')", "IN", "IND", "356"},
|
|
||||||
{"Indonesia", "Indonésie (l')", "ID", "IDN", "360"},
|
|
||||||
{"Iran (Islamic Republic of)", "Iran (République Islamique d')", "IR", "IRN", "364"},
|
|
||||||
{"Iraq", "Iraq (l')", "IQ", "IRQ", "368"},
|
|
||||||
{"Ireland", "Irlande (l')", "IE", "IRL", "372"},
|
|
||||||
{"Israel", "Israël", "IL", "ISR", "376"},
|
|
||||||
{"Italy", "Italie (l')", "IT", "ITA", "380"},
|
|
||||||
{"Côte d'Ivoire", "Côte d'Ivoire (la)", "CI", "CIV", "384"},
|
|
||||||
{"Jamaica", "Jamaïque (la)", "JM", "JAM", "388"},
|
|
||||||
{"Japan", "Japon (le)", "JP", "JPN", "392"},
|
|
||||||
{"Kazakhstan", "Kazakhstan (le)", "KZ", "KAZ", "398"},
|
|
||||||
{"Jordan", "Jordanie (la)", "JO", "JOR", "400"},
|
|
||||||
{"Kenya", "Kenya (le)", "KE", "KEN", "404"},
|
|
||||||
{"Korea (the Democratic People's Republic of)", "Corée (la République populaire démocratique de)", "KP", "PRK", "408"},
|
|
||||||
{"Korea (the Republic of)", "Corée (la République de)", "KR", "KOR", "410"},
|
|
||||||
{"Kuwait", "Koweït (le)", "KW", "KWT", "414"},
|
|
||||||
{"Kyrgyzstan", "Kirghizistan (le)", "KG", "KGZ", "417"},
|
|
||||||
{"Lao People's Democratic Republic (the)", "Lao, République démocratique populaire", "LA", "LAO", "418"},
|
|
||||||
{"Lebanon", "Liban (le)", "LB", "LBN", "422"},
|
|
||||||
{"Lesotho", "Lesotho (le)", "LS", "LSO", "426"},
|
|
||||||
{"Latvia", "Lettonie (la)", "LV", "LVA", "428"},
|
|
||||||
{"Liberia", "Libéria (le)", "LR", "LBR", "430"},
|
|
||||||
{"Libya", "Libye (la)", "LY", "LBY", "434"},
|
|
||||||
{"Liechtenstein", "Liechtenstein (le)", "LI", "LIE", "438"},
|
|
||||||
{"Lithuania", "Lituanie (la)", "LT", "LTU", "440"},
|
|
||||||
{"Luxembourg", "Luxembourg (le)", "LU", "LUX", "442"},
|
|
||||||
{"Macao", "Macao", "MO", "MAC", "446"},
|
|
||||||
{"Madagascar", "Madagascar", "MG", "MDG", "450"},
|
|
||||||
{"Malawi", "Malawi (le)", "MW", "MWI", "454"},
|
|
||||||
{"Malaysia", "Malaisie (la)", "MY", "MYS", "458"},
|
|
||||||
{"Maldives", "Maldives (les)", "MV", "MDV", "462"},
|
|
||||||
{"Mali", "Mali (le)", "ML", "MLI", "466"},
|
|
||||||
{"Malta", "Malte", "MT", "MLT", "470"},
|
|
||||||
{"Martinique", "Martinique (la)", "MQ", "MTQ", "474"},
|
|
||||||
{"Mauritania", "Mauritanie (la)", "MR", "MRT", "478"},
|
|
||||||
{"Mauritius", "Maurice", "MU", "MUS", "480"},
|
|
||||||
{"Mexico", "Mexique (le)", "MX", "MEX", "484"},
|
|
||||||
{"Monaco", "Monaco", "MC", "MCO", "492"},
|
|
||||||
{"Mongolia", "Mongolie (la)", "MN", "MNG", "496"},
|
|
||||||
{"Moldova (the Republic of)", "Moldova , République de", "MD", "MDA", "498"},
|
|
||||||
{"Montenegro", "Monténégro (le)", "ME", "MNE", "499"},
|
|
||||||
{"Montserrat", "Montserrat", "MS", "MSR", "500"},
|
|
||||||
{"Morocco", "Maroc (le)", "MA", "MAR", "504"},
|
|
||||||
{"Mozambique", "Mozambique (le)", "MZ", "MOZ", "508"},
|
|
||||||
{"Oman", "Oman", "OM", "OMN", "512"},
|
|
||||||
{"Namibia", "Namibie (la)", "NA", "NAM", "516"},
|
|
||||||
{"Nauru", "Nauru", "NR", "NRU", "520"},
|
|
||||||
{"Nepal", "Népal (le)", "NP", "NPL", "524"},
|
|
||||||
{"Netherlands (the)", "Pays-Bas (les)", "NL", "NLD", "528"},
|
|
||||||
{"Curaçao", "Curaçao", "CW", "CUW", "531"},
|
|
||||||
{"Aruba", "Aruba", "AW", "ABW", "533"},
|
|
||||||
{"Sint Maarten (Dutch part)", "Saint-Martin (partie néerlandaise)", "SX", "SXM", "534"},
|
|
||||||
{"Bonaire, Sint Eustatius and Saba", "Bonaire, Saint-Eustache et Saba", "BQ", "BES", "535"},
|
|
||||||
{"New Caledonia", "Nouvelle-Calédonie (la)", "NC", "NCL", "540"},
|
|
||||||
{"Vanuatu", "Vanuatu (le)", "VU", "VUT", "548"},
|
|
||||||
{"New Zealand", "Nouvelle-Zélande (la)", "NZ", "NZL", "554"},
|
|
||||||
{"Nicaragua", "Nicaragua (le)", "NI", "NIC", "558"},
|
|
||||||
{"Niger (the)", "Niger (le)", "NE", "NER", "562"},
|
|
||||||
{"Nigeria", "Nigéria (le)", "NG", "NGA", "566"},
|
|
||||||
{"Niue", "Niue", "NU", "NIU", "570"},
|
|
||||||
{"Norfolk Island", "Norfolk (l'Île)", "NF", "NFK", "574"},
|
|
||||||
{"Norway", "Norvège (la)", "NO", "NOR", "578"},
|
|
||||||
{"Northern Mariana Islands (the)", "Mariannes du Nord (les Îles)", "MP", "MNP", "580"},
|
|
||||||
{"United States Minor Outlying Islands (the)", "Îles mineures éloignées des États-Unis (les)", "UM", "UMI", "581"},
|
|
||||||
{"Micronesia (Federated States of)", "Micronésie (États fédérés de)", "FM", "FSM", "583"},
|
|
||||||
{"Marshall Islands (the)", "Marshall (Îles)", "MH", "MHL", "584"},
|
|
||||||
{"Palau", "Palaos (les)", "PW", "PLW", "585"},
|
|
||||||
{"Pakistan", "Pakistan (le)", "PK", "PAK", "586"},
|
|
||||||
{"Panama", "Panama (le)", "PA", "PAN", "591"},
|
|
||||||
{"Papua New Guinea", "Papouasie-Nouvelle-Guinée (la)", "PG", "PNG", "598"},
|
|
||||||
{"Paraguay", "Paraguay (le)", "PY", "PRY", "600"},
|
|
||||||
{"Peru", "Pérou (le)", "PE", "PER", "604"},
|
|
||||||
{"Philippines (the)", "Philippines (les)", "PH", "PHL", "608"},
|
|
||||||
{"Pitcairn", "Pitcairn", "PN", "PCN", "612"},
|
|
||||||
{"Poland", "Pologne (la)", "PL", "POL", "616"},
|
|
||||||
{"Portugal", "Portugal (le)", "PT", "PRT", "620"},
|
|
||||||
{"Guinea-Bissau", "Guinée-Bissau (la)", "GW", "GNB", "624"},
|
|
||||||
{"Timor-Leste", "Timor-Leste (le)", "TL", "TLS", "626"},
|
|
||||||
{"Puerto Rico", "Porto Rico", "PR", "PRI", "630"},
|
|
||||||
{"Qatar", "Qatar (le)", "QA", "QAT", "634"},
|
|
||||||
{"Réunion", "Réunion (La)", "RE", "REU", "638"},
|
|
||||||
{"Romania", "Roumanie (la)", "RO", "ROU", "642"},
|
|
||||||
{"Russian Federation (the)", "Russie (la Fédération de)", "RU", "RUS", "643"},
|
|
||||||
{"Rwanda", "Rwanda (le)", "RW", "RWA", "646"},
|
|
||||||
{"Saint Barthélemy", "Saint-Barthélemy", "BL", "BLM", "652"},
|
|
||||||
{"Saint Helena, Ascension and Tristan da Cunha", "Sainte-Hélène, Ascension et Tristan da Cunha", "SH", "SHN", "654"},
|
|
||||||
{"Saint Kitts and Nevis", "Saint-Kitts-et-Nevis", "KN", "KNA", "659"},
|
|
||||||
{"Anguilla", "Anguilla", "AI", "AIA", "660"},
|
|
||||||
{"Saint Lucia", "Sainte-Lucie", "LC", "LCA", "662"},
|
|
||||||
{"Saint Martin (French part)", "Saint-Martin (partie française)", "MF", "MAF", "663"},
|
|
||||||
{"Saint Pierre and Miquelon", "Saint-Pierre-et-Miquelon", "PM", "SPM", "666"},
|
|
||||||
{"Saint Vincent and the Grenadines", "Saint-Vincent-et-les Grenadines", "VC", "VCT", "670"},
|
|
||||||
{"San Marino", "Saint-Marin", "SM", "SMR", "674"},
|
|
||||||
{"Sao Tome and Principe", "Sao Tomé-et-Principe", "ST", "STP", "678"},
|
|
||||||
{"Saudi Arabia", "Arabie saoudite (l')", "SA", "SAU", "682"},
|
|
||||||
{"Senegal", "Sénégal (le)", "SN", "SEN", "686"},
|
|
||||||
{"Serbia", "Serbie (la)", "RS", "SRB", "688"},
|
|
||||||
{"Seychelles", "Seychelles (les)", "SC", "SYC", "690"},
|
|
||||||
{"Sierra Leone", "Sierra Leone (la)", "SL", "SLE", "694"},
|
|
||||||
{"Singapore", "Singapour", "SG", "SGP", "702"},
|
|
||||||
{"Slovakia", "Slovaquie (la)", "SK", "SVK", "703"},
|
|
||||||
{"Viet Nam", "Viet Nam (le)", "VN", "VNM", "704"},
|
|
||||||
{"Slovenia", "Slovénie (la)", "SI", "SVN", "705"},
|
|
||||||
{"Somalia", "Somalie (la)", "SO", "SOM", "706"},
|
|
||||||
{"South Africa", "Afrique du Sud (l')", "ZA", "ZAF", "710"},
|
|
||||||
{"Zimbabwe", "Zimbabwe (le)", "ZW", "ZWE", "716"},
|
|
||||||
{"Spain", "Espagne (l')", "ES", "ESP", "724"},
|
|
||||||
{"South Sudan", "Soudan du Sud (le)", "SS", "SSD", "728"},
|
|
||||||
{"Sudan (the)", "Soudan (le)", "SD", "SDN", "729"},
|
|
||||||
{"Western Sahara*", "Sahara occidental (le)*", "EH", "ESH", "732"},
|
|
||||||
{"Suriname", "Suriname (le)", "SR", "SUR", "740"},
|
|
||||||
{"Svalbard and Jan Mayen", "Svalbard et l'Île Jan Mayen (le)", "SJ", "SJM", "744"},
|
|
||||||
{"Swaziland", "Swaziland (le)", "SZ", "SWZ", "748"},
|
|
||||||
{"Sweden", "Suède (la)", "SE", "SWE", "752"},
|
|
||||||
{"Switzerland", "Suisse (la)", "CH", "CHE", "756"},
|
|
||||||
{"Syrian Arab Republic", "République arabe syrienne (la)", "SY", "SYR", "760"},
|
|
||||||
{"Tajikistan", "Tadjikistan (le)", "TJ", "TJK", "762"},
|
|
||||||
{"Thailand", "Thaïlande (la)", "TH", "THA", "764"},
|
|
||||||
{"Togo", "Togo (le)", "TG", "TGO", "768"},
|
|
||||||
{"Tokelau", "Tokelau (les)", "TK", "TKL", "772"},
|
|
||||||
{"Tonga", "Tonga (les)", "TO", "TON", "776"},
|
|
||||||
{"Trinidad and Tobago", "Trinité-et-Tobago (la)", "TT", "TTO", "780"},
|
|
||||||
{"United Arab Emirates (the)", "Émirats arabes unis (les)", "AE", "ARE", "784"},
|
|
||||||
{"Tunisia", "Tunisie (la)", "TN", "TUN", "788"},
|
|
||||||
{"Turkey", "Turquie (la)", "TR", "TUR", "792"},
|
|
||||||
{"Turkmenistan", "Turkménistan (le)", "TM", "TKM", "795"},
|
|
||||||
{"Turks and Caicos Islands (the)", "Turks-et-Caïcos (les Îles)", "TC", "TCA", "796"},
|
|
||||||
{"Tuvalu", "Tuvalu (les)", "TV", "TUV", "798"},
|
|
||||||
{"Uganda", "Ouganda (l')", "UG", "UGA", "800"},
|
|
||||||
{"Ukraine", "Ukraine (l')", "UA", "UKR", "804"},
|
|
||||||
{"Macedonia (the former Yugoslav Republic of)", "Macédoine (l'ex‑République yougoslave de)", "MK", "MKD", "807"},
|
|
||||||
{"Egypt", "Égypte (l')", "EG", "EGY", "818"},
|
|
||||||
{"United Kingdom of Great Britain and Northern Ireland (the)", "Royaume-Uni de Grande-Bretagne et d'Irlande du Nord (le)", "GB", "GBR", "826"},
|
|
||||||
{"Guernsey", "Guernesey", "GG", "GGY", "831"},
|
|
||||||
{"Jersey", "Jersey", "JE", "JEY", "832"},
|
|
||||||
{"Isle of Man", "Île de Man", "IM", "IMN", "833"},
|
|
||||||
{"Tanzania, United Republic of", "Tanzanie, République-Unie de", "TZ", "TZA", "834"},
|
|
||||||
{"United States of America (the)", "États-Unis d'Amérique (les)", "US", "USA", "840"},
|
|
||||||
{"Virgin Islands (U.S.)", "Vierges des États-Unis (les Îles)", "VI", "VIR", "850"},
|
|
||||||
{"Burkina Faso", "Burkina Faso (le)", "BF", "BFA", "854"},
|
|
||||||
{"Uruguay", "Uruguay (l')", "UY", "URY", "858"},
|
|
||||||
{"Uzbekistan", "Ouzbékistan (l')", "UZ", "UZB", "860"},
|
|
||||||
{"Venezuela (Bolivarian Republic of)", "Venezuela (République bolivarienne du)", "VE", "VEN", "862"},
|
|
||||||
{"Wallis and Futuna", "Wallis-et-Futuna", "WF", "WLF", "876"},
|
|
||||||
{"Samoa", "Samoa (le)", "WS", "WSM", "882"},
|
|
||||||
{"Yemen", "Yémen (le)", "YE", "YEM", "887"},
|
|
||||||
{"Zambia", "Zambie (la)", "ZM", "ZMB", "894"},
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO4217List is the list of ISO currency codes
|
|
||||||
var ISO4217List = []string{
|
|
||||||
"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN",
|
|
||||||
"BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD",
|
|
||||||
"CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK",
|
|
||||||
"DJF", "DKK", "DOP", "DZD",
|
|
||||||
"EGP", "ERN", "ETB", "EUR",
|
|
||||||
"FJD", "FKP",
|
|
||||||
"GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD",
|
|
||||||
"HKD", "HNL", "HRK", "HTG", "HUF",
|
|
||||||
"IDR", "ILS", "INR", "IQD", "IRR", "ISK",
|
|
||||||
"JMD", "JOD", "JPY",
|
|
||||||
"KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT",
|
|
||||||
"LAK", "LBP", "LKR", "LRD", "LSL", "LYD",
|
|
||||||
"MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN",
|
|
||||||
"NAD", "NGN", "NIO", "NOK", "NPR", "NZD",
|
|
||||||
"OMR",
|
|
||||||
"PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG",
|
|
||||||
"QAR",
|
|
||||||
"RON", "RSD", "RUB", "RWF",
|
|
||||||
"SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "SVC", "SYP", "SZL",
|
|
||||||
"THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS",
|
|
||||||
"UAH", "UGX", "USD", "USN", "UYI", "UYU", "UZS",
|
|
||||||
"VEF", "VND", "VUV",
|
|
||||||
"WST",
|
|
||||||
"XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX",
|
|
||||||
"YER",
|
|
||||||
"ZAR", "ZMW", "ZWL",
|
|
||||||
}
|
|
||||||
|
|
||||||
// ISO693Entry stores ISO language codes
|
|
||||||
type ISO693Entry struct {
|
|
||||||
Alpha3bCode string
|
|
||||||
Alpha2Code string
|
|
||||||
English string
|
|
||||||
}
|
|
||||||
|
|
||||||
//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json
|
|
||||||
var ISO693List = []ISO693Entry{
|
|
||||||
{Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"},
|
|
||||||
{Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"},
|
|
||||||
{Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"},
|
|
||||||
{Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"},
|
|
||||||
{Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"},
|
|
||||||
{Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"},
|
|
||||||
{Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"},
|
|
||||||
{Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"},
|
|
||||||
{Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"},
|
|
||||||
{Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"},
|
|
||||||
{Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"},
|
|
||||||
{Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"},
|
|
||||||
{Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"},
|
|
||||||
{Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"},
|
|
||||||
{Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"},
|
|
||||||
{Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"},
|
|
||||||
{Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"},
|
|
||||||
{Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"},
|
|
||||||
{Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"},
|
|
||||||
{Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"},
|
|
||||||
{Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"},
|
|
||||||
{Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"},
|
|
||||||
{Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"},
|
|
||||||
{Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"},
|
|
||||||
{Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"},
|
|
||||||
{Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"},
|
|
||||||
{Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"},
|
|
||||||
{Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"},
|
|
||||||
{Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"},
|
|
||||||
{Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"},
|
|
||||||
{Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"},
|
|
||||||
{Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"},
|
|
||||||
{Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"},
|
|
||||||
{Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"},
|
|
||||||
{Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"},
|
|
||||||
{Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"},
|
|
||||||
{Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"},
|
|
||||||
{Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"},
|
|
||||||
{Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"},
|
|
||||||
{Alpha3bCode: "eng", Alpha2Code: "en", English: "English"},
|
|
||||||
{Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"},
|
|
||||||
{Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"},
|
|
||||||
{Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"},
|
|
||||||
{Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"},
|
|
||||||
{Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"},
|
|
||||||
{Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"},
|
|
||||||
{Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"},
|
|
||||||
{Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"},
|
|
||||||
{Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"},
|
|
||||||
{Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"},
|
|
||||||
{Alpha3bCode: "ger", Alpha2Code: "de", English: "German"},
|
|
||||||
{Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"},
|
|
||||||
{Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"},
|
|
||||||
{Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"},
|
|
||||||
{Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"},
|
|
||||||
{Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"},
|
|
||||||
{Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"},
|
|
||||||
{Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"},
|
|
||||||
{Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"},
|
|
||||||
{Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"},
|
|
||||||
{Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"},
|
|
||||||
{Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"},
|
|
||||||
{Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"},
|
|
||||||
{Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"},
|
|
||||||
{Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"},
|
|
||||||
{Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"},
|
|
||||||
{Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"},
|
|
||||||
{Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"},
|
|
||||||
{Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"},
|
|
||||||
{Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"},
|
|
||||||
{Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"},
|
|
||||||
{Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"},
|
|
||||||
{Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"},
|
|
||||||
{Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"},
|
|
||||||
{Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"},
|
|
||||||
{Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"},
|
|
||||||
{Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"},
|
|
||||||
{Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"},
|
|
||||||
{Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"},
|
|
||||||
{Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"},
|
|
||||||
{Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"},
|
|
||||||
{Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"},
|
|
||||||
{Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"},
|
|
||||||
{Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"},
|
|
||||||
{Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"},
|
|
||||||
{Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"},
|
|
||||||
{Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"},
|
|
||||||
{Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"},
|
|
||||||
{Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"},
|
|
||||||
{Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"},
|
|
||||||
{Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"},
|
|
||||||
{Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"},
|
|
||||||
{Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"},
|
|
||||||
{Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"},
|
|
||||||
{Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"},
|
|
||||||
{Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"},
|
|
||||||
{Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"},
|
|
||||||
{Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"},
|
|
||||||
{Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"},
|
|
||||||
{Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"},
|
|
||||||
{Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"},
|
|
||||||
{Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"},
|
|
||||||
{Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"},
|
|
||||||
{Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"},
|
|
||||||
{Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"},
|
|
||||||
{Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"},
|
|
||||||
{Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"},
|
|
||||||
{Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"},
|
|
||||||
{Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"},
|
|
||||||
{Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"},
|
|
||||||
{Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"},
|
|
||||||
{Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"},
|
|
||||||
{Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"},
|
|
||||||
{Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"},
|
|
||||||
{Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"},
|
|
||||||
{Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"},
|
|
||||||
{Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"},
|
|
||||||
{Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"},
|
|
||||||
{Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"},
|
|
||||||
{Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"},
|
|
||||||
{Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"},
|
|
||||||
{Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"},
|
|
||||||
{Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"},
|
|
||||||
{Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"},
|
|
||||||
{Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"},
|
|
||||||
{Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"},
|
|
||||||
{Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"},
|
|
||||||
{Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"},
|
|
||||||
{Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"},
|
|
||||||
{Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"},
|
|
||||||
{Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"},
|
|
||||||
{Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"},
|
|
||||||
{Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"},
|
|
||||||
{Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"},
|
|
||||||
{Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"},
|
|
||||||
{Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"},
|
|
||||||
{Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"},
|
|
||||||
{Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"},
|
|
||||||
{Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"},
|
|
||||||
{Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"},
|
|
||||||
{Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"},
|
|
||||||
{Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"},
|
|
||||||
{Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"},
|
|
||||||
{Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"},
|
|
||||||
{Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"},
|
|
||||||
{Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"},
|
|
||||||
{Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"},
|
|
||||||
{Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"},
|
|
||||||
{Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"},
|
|
||||||
{Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"},
|
|
||||||
{Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"},
|
|
||||||
{Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"},
|
|
||||||
{Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"},
|
|
||||||
{Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"},
|
|
||||||
{Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"},
|
|
||||||
{Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"},
|
|
||||||
{Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"},
|
|
||||||
{Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"},
|
|
||||||
{Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"},
|
|
||||||
{Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"},
|
|
||||||
{Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"},
|
|
||||||
{Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"},
|
|
||||||
{Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"},
|
|
||||||
{Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"},
|
|
||||||
{Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"},
|
|
||||||
{Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"},
|
|
||||||
{Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"},
|
|
||||||
{Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"},
|
|
||||||
{Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"},
|
|
||||||
{Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"},
|
|
||||||
{Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"},
|
|
||||||
{Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"},
|
|
||||||
{Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"},
|
|
||||||
{Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"},
|
|
||||||
{Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"},
|
|
||||||
{Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"},
|
|
||||||
{Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"},
|
|
||||||
{Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"},
|
|
||||||
{Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"},
|
|
||||||
{Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"},
|
|
||||||
{Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"},
|
|
||||||
{Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"},
|
|
||||||
{Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"},
|
|
||||||
{Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"},
|
|
||||||
}
|
|
262
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
262
vendor/github.com/asaskevich/govalidator/utils.go
generated
vendored
|
@ -1,262 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"html"
|
|
||||||
"math"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Contains check if the string contains the substring.
|
|
||||||
func Contains(str, substring string) bool {
|
|
||||||
return strings.Contains(str, substring)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Matches check if string matches the pattern (pattern is regular expression)
|
|
||||||
// In case of error return false
|
|
||||||
func Matches(str, pattern string) bool {
|
|
||||||
match, _ := regexp.MatchString(pattern, str)
|
|
||||||
return match
|
|
||||||
}
|
|
||||||
|
|
||||||
// LeftTrim trim characters from the left-side of the input.
|
|
||||||
// If second argument is empty, it's will be remove leading spaces.
|
|
||||||
func LeftTrim(str, chars string) string {
|
|
||||||
if chars == "" {
|
|
||||||
return strings.TrimLeftFunc(str, unicode.IsSpace)
|
|
||||||
}
|
|
||||||
r, _ := regexp.Compile("^[" + chars + "]+")
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// RightTrim trim characters from the right-side of the input.
|
|
||||||
// If second argument is empty, it's will be remove spaces.
|
|
||||||
func RightTrim(str, chars string) string {
|
|
||||||
if chars == "" {
|
|
||||||
return strings.TrimRightFunc(str, unicode.IsSpace)
|
|
||||||
}
|
|
||||||
r, _ := regexp.Compile("[" + chars + "]+$")
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Trim trim characters from both sides of the input.
|
|
||||||
// If second argument is empty, it's will be remove spaces.
|
|
||||||
func Trim(str, chars string) string {
|
|
||||||
return LeftTrim(RightTrim(str, chars), chars)
|
|
||||||
}
|
|
||||||
|
|
||||||
// WhiteList remove characters that do not appear in the whitelist.
|
|
||||||
func WhiteList(str, chars string) string {
|
|
||||||
pattern := "[^" + chars + "]+"
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlackList remove characters that appear in the blacklist.
|
|
||||||
func BlackList(str, chars string) string {
|
|
||||||
pattern := "[" + chars + "]+"
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// StripLow remove characters with a numerical value < 32 and 127, mostly control characters.
|
|
||||||
// If keep_new_lines is true, newline characters are preserved (\n and \r, hex 0xA and 0xD).
|
|
||||||
func StripLow(str string, keepNewLines bool) string {
|
|
||||||
chars := ""
|
|
||||||
if keepNewLines {
|
|
||||||
chars = "\x00-\x09\x0B\x0C\x0E-\x1F\x7F"
|
|
||||||
} else {
|
|
||||||
chars = "\x00-\x1F\x7F"
|
|
||||||
}
|
|
||||||
return BlackList(str, chars)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReplacePattern replace regular expression pattern in string
|
|
||||||
func ReplacePattern(str, pattern, replace string) string {
|
|
||||||
r, _ := regexp.Compile(pattern)
|
|
||||||
return r.ReplaceAllString(str, replace)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Escape replace <, >, & and " with HTML entities.
|
|
||||||
var Escape = html.EscapeString
|
|
||||||
|
|
||||||
func addSegment(inrune, segment []rune) []rune {
|
|
||||||
if len(segment) == 0 {
|
|
||||||
return inrune
|
|
||||||
}
|
|
||||||
if len(inrune) != 0 {
|
|
||||||
inrune = append(inrune, '_')
|
|
||||||
}
|
|
||||||
inrune = append(inrune, segment...)
|
|
||||||
return inrune
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnderscoreToCamelCase converts from underscore separated form to camel case form.
|
|
||||||
// Ex.: my_func => MyFunc
|
|
||||||
func UnderscoreToCamelCase(s string) string {
|
|
||||||
return strings.Replace(strings.Title(strings.Replace(strings.ToLower(s), "_", " ", -1)), " ", "", -1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CamelCaseToUnderscore converts from camel case form to underscore separated form.
|
|
||||||
// Ex.: MyFunc => my_func
|
|
||||||
func CamelCaseToUnderscore(str string) string {
|
|
||||||
var output []rune
|
|
||||||
var segment []rune
|
|
||||||
for _, r := range str {
|
|
||||||
if !unicode.IsLower(r) && string(r) != "_" {
|
|
||||||
output = addSegment(output, segment)
|
|
||||||
segment = nil
|
|
||||||
}
|
|
||||||
segment = append(segment, unicode.ToLower(r))
|
|
||||||
}
|
|
||||||
output = addSegment(output, segment)
|
|
||||||
return string(output)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reverse return reversed string
|
|
||||||
func Reverse(s string) string {
|
|
||||||
r := []rune(s)
|
|
||||||
for i, j := 0, len(r)-1; i < j; i, j = i+1, j-1 {
|
|
||||||
r[i], r[j] = r[j], r[i]
|
|
||||||
}
|
|
||||||
return string(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetLines split string by "\n" and return array of lines
|
|
||||||
func GetLines(s string) []string {
|
|
||||||
return strings.Split(s, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetLine return specified line of multiline string
|
|
||||||
func GetLine(s string, index int) (string, error) {
|
|
||||||
lines := GetLines(s)
|
|
||||||
if index < 0 || index >= len(lines) {
|
|
||||||
return "", errors.New("line index out of bounds")
|
|
||||||
}
|
|
||||||
return lines[index], nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// RemoveTags remove all tags from HTML string
|
|
||||||
func RemoveTags(s string) string {
|
|
||||||
return ReplacePattern(s, "<[^>]*>", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
// SafeFileName return safe string that can be used in file names
|
|
||||||
func SafeFileName(str string) string {
|
|
||||||
name := strings.ToLower(str)
|
|
||||||
name = path.Clean(path.Base(name))
|
|
||||||
name = strings.Trim(name, " ")
|
|
||||||
separators, err := regexp.Compile(`[ &_=+:]`)
|
|
||||||
if err == nil {
|
|
||||||
name = separators.ReplaceAllString(name, "-")
|
|
||||||
}
|
|
||||||
legal, err := regexp.Compile(`[^[:alnum:]-.]`)
|
|
||||||
if err == nil {
|
|
||||||
name = legal.ReplaceAllString(name, "")
|
|
||||||
}
|
|
||||||
for strings.Contains(name, "--") {
|
|
||||||
name = strings.Replace(name, "--", "-", -1)
|
|
||||||
}
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
// NormalizeEmail canonicalize an email address.
|
|
||||||
// The local part of the email address is lowercased for all domains; the hostname is always lowercased and
|
|
||||||
// the local part of the email address is always lowercased for hosts that are known to be case-insensitive (currently only GMail).
|
|
||||||
// Normalization follows special rules for known providers: currently, GMail addresses have dots removed in the local part and
|
|
||||||
// are stripped of tags (e.g. some.one+tag@gmail.com becomes someone@gmail.com) and all @googlemail.com addresses are
|
|
||||||
// normalized to @gmail.com.
|
|
||||||
func NormalizeEmail(str string) (string, error) {
|
|
||||||
if !IsEmail(str) {
|
|
||||||
return "", fmt.Errorf("%s is not an email", str)
|
|
||||||
}
|
|
||||||
parts := strings.Split(str, "@")
|
|
||||||
parts[0] = strings.ToLower(parts[0])
|
|
||||||
parts[1] = strings.ToLower(parts[1])
|
|
||||||
if parts[1] == "gmail.com" || parts[1] == "googlemail.com" {
|
|
||||||
parts[1] = "gmail.com"
|
|
||||||
parts[0] = strings.Split(ReplacePattern(parts[0], `\.`, ""), "+")[0]
|
|
||||||
}
|
|
||||||
return strings.Join(parts, "@"), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Truncate a string to the closest length without breaking words.
|
|
||||||
func Truncate(str string, length int, ending string) string {
|
|
||||||
var aftstr, befstr string
|
|
||||||
if len(str) > length {
|
|
||||||
words := strings.Fields(str)
|
|
||||||
before, present := 0, 0
|
|
||||||
for i := range words {
|
|
||||||
befstr = aftstr
|
|
||||||
before = present
|
|
||||||
aftstr = aftstr + words[i] + " "
|
|
||||||
present = len(aftstr)
|
|
||||||
if present > length && i != 0 {
|
|
||||||
if (length - before) < (present - length) {
|
|
||||||
return Trim(befstr, " /\\.,\"'#!?&@+-") + ending
|
|
||||||
}
|
|
||||||
return Trim(aftstr, " /\\.,\"'#!?&@+-") + ending
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadLeft pad left side of string if size of string is less then indicated pad length
|
|
||||||
func PadLeft(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, true, false)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadRight pad right side of string if size of string is less then indicated pad length
|
|
||||||
func PadRight(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, false, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadBoth pad sides of string if size of string is less then indicated pad length
|
|
||||||
func PadBoth(str string, padStr string, padLen int) string {
|
|
||||||
return buildPadStr(str, padStr, padLen, true, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
// PadString either left, right or both sides, not the padding string can be unicode and more then one
|
|
||||||
// character
|
|
||||||
func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string {
|
|
||||||
|
|
||||||
// When padded length is less then the current string size
|
|
||||||
if padLen < utf8.RuneCountInString(str) {
|
|
||||||
return str
|
|
||||||
}
|
|
||||||
|
|
||||||
padLen -= utf8.RuneCountInString(str)
|
|
||||||
|
|
||||||
targetLen := padLen
|
|
||||||
|
|
||||||
targetLenLeft := targetLen
|
|
||||||
targetLenRight := targetLen
|
|
||||||
if padLeft && padRight {
|
|
||||||
targetLenLeft = padLen / 2
|
|
||||||
targetLenRight = padLen - targetLenLeft
|
|
||||||
}
|
|
||||||
|
|
||||||
strToRepeatLen := utf8.RuneCountInString(padStr)
|
|
||||||
|
|
||||||
repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen)))
|
|
||||||
repeatedString := strings.Repeat(padStr, repeatTimes)
|
|
||||||
|
|
||||||
leftSide := ""
|
|
||||||
if padLeft {
|
|
||||||
leftSide = repeatedString[0:targetLenLeft]
|
|
||||||
}
|
|
||||||
|
|
||||||
rightSide := ""
|
|
||||||
if padRight {
|
|
||||||
rightSide = repeatedString[0:targetLenRight]
|
|
||||||
}
|
|
||||||
|
|
||||||
return leftSide + str + rightSide
|
|
||||||
}
|
|
501
vendor/github.com/asaskevich/govalidator/utils_test.go
generated
vendored
501
vendor/github.com/asaskevich/govalidator/utils_test.go
generated
vendored
|
@ -1,501 +0,0 @@
|
||||||
package govalidator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestContains(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{"abacada", "", true},
|
|
||||||
{"abacada", "ritir", false},
|
|
||||||
{"abacada", "a", true},
|
|
||||||
{"abacada", "aca", true},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Contains(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Contains(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMatches(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{"123456789", "[0-9]+", true},
|
|
||||||
{"abacada", "cab$", false},
|
|
||||||
{"111222333", "((111|222|333)+)+", true},
|
|
||||||
{"abacaba", "((123+]", false},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Matches(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Matches(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLeftTrim(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{" \r\n\tfoo \r\n\t ", "", "foo \r\n\t "},
|
|
||||||
{"010100201000", "01", "201000"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := LeftTrim(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected LeftTrim(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRightTrim(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{" \r\n\tfoo \r\n\t ", "", " \r\n\tfoo"},
|
|
||||||
{"010100201000", "01", "0101002"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := RightTrim(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected RightTrim(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTrim(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{" \r\n\tfoo \r\n\t ", "", "foo"},
|
|
||||||
{"010100201000", "01", "2"},
|
|
||||||
{"1234567890987654321", "1-8", "909"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Trim(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Trim(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This small example illustrate how to work with Trim function.
|
|
||||||
func ExampleTrim() {
|
|
||||||
// Remove from left and right spaces and "\r", "\n", "\t" characters
|
|
||||||
println(Trim(" \r\r\ntext\r \t\n", "") == "text")
|
|
||||||
// Remove from left and right characters that are between "1" and "8".
|
|
||||||
// "1-8" is like full list "12345678".
|
|
||||||
println(Trim("1234567890987654321", "1-8") == "909")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWhiteList(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abcdef", "abc", "abc"},
|
|
||||||
{"aaaaaaaaaabbbbbbbbbb", "abc", "aaaaaaaaaabbbbbbbbbb"},
|
|
||||||
{"a1b2c3", "abc", "abc"},
|
|
||||||
{" ", "abc", ""},
|
|
||||||
{"a3a43a5a4a3a2a23a4a5a4a3a4", "a-z", "aaaaaaaaaaaa"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := WhiteList(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected WhiteList(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This small example illustrate how to work with WhiteList function.
|
|
||||||
func ExampleWhiteList() {
|
|
||||||
// Remove all characters from string ignoring characters between "a" and "z"
|
|
||||||
println(WhiteList("a3a43a5a4a3a2a23a4a5a4a3a4", "a-z") == "aaaaaaaaaaaa")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBlackList(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abcdef", "abc", "def"},
|
|
||||||
{"aaaaaaaaaabbbbbbbbbb", "abc", ""},
|
|
||||||
{"a1b2c3", "abc", "123"},
|
|
||||||
{" ", "abc", " "},
|
|
||||||
{"a3a43a5a4a3a2a23a4a5a4a3a4", "a-z", "34354322345434"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := BlackList(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected BlackList(%q,%q) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestStripLow(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 bool
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"foo\x00", false, "foo"},
|
|
||||||
{"\x7Ffoo\x02", false, "foo"},
|
|
||||||
{"\x01\x09", false, ""},
|
|
||||||
{"foo\x0A\x0D", false, "foo"},
|
|
||||||
{"perch\u00e9", false, "perch\u00e9"},
|
|
||||||
{"\u20ac", false, "\u20ac"},
|
|
||||||
{"\u2206\x0A", false, "\u2206"},
|
|
||||||
{"foo\x0A\x0D", true, "foo\x0A\x0D"},
|
|
||||||
{"\x03foo\x0A\x0D", true, "foo\x0A\x0D"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := StripLow(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected StripLow(%q,%t) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReplacePattern(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
param3 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"ab123ba", "[0-9]+", "aca", "abacaba"},
|
|
||||||
{"abacaba", "[0-9]+", "aca", "abacaba"},
|
|
||||||
{"httpftp://github.comio", "(ftp|io)", "", "http://github.com"},
|
|
||||||
{"aaaaaaaaaa", "a", "", ""},
|
|
||||||
{"http123123ftp://git534543hub.comio", "(ftp|io|[0-9]+)", "", "http://github.com"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := ReplacePattern(test.param1, test.param2, test.param3)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected ReplacePattern(%q,%q,%q) to be %v, got %v", test.param1, test.param2, test.param3, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// This small example illustrate how to work with ReplacePattern function.
|
|
||||||
func ExampleReplacePattern() {
|
|
||||||
// Replace in "http123123ftp://git534543hub.comio" following (pattern "(ftp|io|[0-9]+)"):
|
|
||||||
// - Sequence "ftp".
|
|
||||||
// - Sequence "io".
|
|
||||||
// - Sequence of digits.
|
|
||||||
// with empty string.
|
|
||||||
println(ReplacePattern("http123123ftp://git534543hub.comio", "(ftp|io|[0-9]+)", "") == "http://github.com")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestEscape(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{`<img alt="foo&bar">`, "<img alt="foo&bar">"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Escape(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Escape(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUnderscoreToCamelCase(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"a_b_c", "ABC"},
|
|
||||||
{"my_func", "MyFunc"},
|
|
||||||
{"1ab_cd", "1abCd"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := UnderscoreToCamelCase(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected UnderscoreToCamelCase(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCamelCaseToUnderscore(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"MyFunc", "my_func"},
|
|
||||||
{"ABC", "a_b_c"},
|
|
||||||
{"1B", "1_b"},
|
|
||||||
{"foo_bar", "foo_bar"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := CamelCaseToUnderscore(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected CamelCaseToUnderscore(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReverse(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abc", "cba"},
|
|
||||||
{"カタカナ", "ナカタカ"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Reverse(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Reverse(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetLines(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected []string
|
|
||||||
}{
|
|
||||||
{"abc", []string{"abc"}},
|
|
||||||
{"a\nb\nc", []string{"a", "b", "c"}},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := GetLines(test.param)
|
|
||||||
if !reflect.DeepEqual(actual, test.expected) {
|
|
||||||
t.Errorf("Expected GetLines(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestGetLine(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 int
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abc", 0, "abc"},
|
|
||||||
{"a\nb\nc", 0, "a"},
|
|
||||||
{"abc", -1, ""},
|
|
||||||
{"abacaba\n", 1, ""},
|
|
||||||
{"abc", 3, ""},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual, _ := GetLine(test.param1, test.param2)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected GetLine(%q, %d) to be %v, got %v", test.param1, test.param2, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRemoveTags(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abc", "abc"},
|
|
||||||
{"<!-- Test -->", ""},
|
|
||||||
{"<div><div><p><a>Text</a></p></div></div>", "Text"},
|
|
||||||
{`<a href="#">Link</a>`, "Link"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := RemoveTags(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected RemoveTags(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSafeFileName(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"abc", "abc"},
|
|
||||||
{"123456789 '_-?ASDF@£$%£%^é.html", "123456789-asdf.html"},
|
|
||||||
{"ReadMe.md", "readme.md"},
|
|
||||||
{"file:///c:/test.go", "test.go"},
|
|
||||||
{"../../../Hello World!.txt", "hello-world.txt"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := SafeFileName(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected SafeFileName(%q) to be %v, got %v", test.param, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNormalizeEmail(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{`test@me.com`, `test@me.com`},
|
|
||||||
{`some.name@gmail.com`, `somename@gmail.com`},
|
|
||||||
{`some.name@googlemail.com`, `somename@gmail.com`},
|
|
||||||
{`some.name+extension@gmail.com`, `somename@gmail.com`},
|
|
||||||
{`some.name+extension@googlemail.com`, `somename@gmail.com`},
|
|
||||||
{`some.name.middlename+extension@gmail.com`, `somenamemiddlename@gmail.com`},
|
|
||||||
{`some.name.middlename+extension@googlemail.com`, `somenamemiddlename@gmail.com`},
|
|
||||||
{`some.name.midd.lena.me.+extension@gmail.com`, `somenamemiddlename@gmail.com`},
|
|
||||||
{`some.name.midd.lena.me.+extension@googlemail.com`, `somenamemiddlename@gmail.com`},
|
|
||||||
{`some.name+extension@unknown.com`, `some.name+extension@unknown.com`},
|
|
||||||
{`hans@m端ller.com`, `hans@m端ller.com`},
|
|
||||||
{`hans`, ``},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual, err := NormalizeEmail(test.param)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected NormalizeEmail(%q) to be %v, got %v, err %v", test.param, test.expected, actual, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTruncate(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 int
|
|
||||||
param3 string
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{`Lorem ipsum dolor sit amet, consectetur adipiscing elit.`, 25, `...`, `Lorem ipsum dolor sit amet...`},
|
|
||||||
{`Measuring programming progress by lines of code is like measuring aircraft building progress by weight.`, 35, ` new born babies!`, `Measuring programming progress by new born babies!`},
|
|
||||||
{`Testestestestestestestestestest testestestestestestestestest`, 7, `...`, `Testestestestestestestestestest...`},
|
|
||||||
{`Testing`, 7, `...`, `Testing`},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := Truncate(test.param1, test.param2, test.param3)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected Truncate(%q, %d, %q) to be %v, got %v", test.param1, test.param2, test.param3, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPadLeft(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
param3 int
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"こんにちは", "xyz", 12, "xyzxyzxこんにちは"},
|
|
||||||
{"こんにちは", "xyz", 11, "xyzxyzこんにちは"},
|
|
||||||
{"abc", "x", 5, "xxabc"},
|
|
||||||
{"abc", "xyz", 5, "xyabc"},
|
|
||||||
{"abcde", "xyz", 5, "abcde"},
|
|
||||||
{"abcde", "xyz", 4, "abcde"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := PadLeft(test.param1, test.param2, test.param3)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected PadLeft(%q,%q,%q) to be %v, got %v", test.param1, test.param2, test.param3, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPadRight(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
param3 int
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"こんにちは", "xyz", 12, "こんにちはxyzxyzx"},
|
|
||||||
{"こんにちは", "xyz", 11, "こんにちはxyzxyz"},
|
|
||||||
{"abc", "x", 5, "abcxx"},
|
|
||||||
{"abc", "xyz", 5, "abcxy"},
|
|
||||||
{"abcde", "xyz", 5, "abcde"},
|
|
||||||
{"abcde", "xyz", 4, "abcde"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := PadRight(test.param1, test.param2, test.param3)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected PadRight(%q,%q,%q) to be %v, got %v", test.param1, test.param2, test.param3, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPadBoth(t *testing.T) {
|
|
||||||
t.Parallel()
|
|
||||||
|
|
||||||
var tests = []struct {
|
|
||||||
param1 string
|
|
||||||
param2 string
|
|
||||||
param3 int
|
|
||||||
expected string
|
|
||||||
}{
|
|
||||||
{"こんにちは", "xyz", 12, "xyzこんにちはxyzx"},
|
|
||||||
{"こんにちは", "xyz", 11, "xyzこんにちはxyz"},
|
|
||||||
{"abc", "x", 5, "xabcx"},
|
|
||||||
{"abc", "xyz", 5, "xabcx"},
|
|
||||||
{"abcde", "xyz", 5, "abcde"},
|
|
||||||
{"abcde", "xyz", 4, "abcde"},
|
|
||||||
}
|
|
||||||
for _, test := range tests {
|
|
||||||
actual := PadBoth(test.param1, test.param2, test.param3)
|
|
||||||
if actual != test.expected {
|
|
||||||
t.Errorf("Expected PadBoth(%q,%q,%q) to be %v, got %v", test.param1, test.param2, test.param3, test.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
1167
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
1167
vendor/github.com/asaskevich/govalidator/validator.go
generated
vendored
File diff suppressed because it is too large
Load diff
3040
vendor/github.com/asaskevich/govalidator/validator_test.go
generated
vendored
3040
vendor/github.com/asaskevich/govalidator/validator_test.go
generated
vendored
File diff suppressed because it is too large
Load diff
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
15
vendor/github.com/asaskevich/govalidator/wercker.yml
generated
vendored
|
@ -1,15 +0,0 @@
|
||||||
box: golang
|
|
||||||
build:
|
|
||||||
steps:
|
|
||||||
- setup-go-workspace
|
|
||||||
|
|
||||||
- script:
|
|
||||||
name: go get
|
|
||||||
code: |
|
|
||||||
go version
|
|
||||||
go get -t ./...
|
|
||||||
|
|
||||||
- script:
|
|
||||||
name: go test
|
|
||||||
code: |
|
|
||||||
go test -race ./...
|
|
6
vendor/github.com/coreos/etcd/.gitignore
generated
vendored
6
vendor/github.com/coreos/etcd/.gitignore
generated
vendored
|
@ -1,16 +1,22 @@
|
||||||
/agent-*
|
/agent-*
|
||||||
/coverage
|
/coverage
|
||||||
|
/covdir
|
||||||
/gopath
|
/gopath
|
||||||
/gopath.proto
|
/gopath.proto
|
||||||
/go-bindata
|
/go-bindata
|
||||||
|
/release
|
||||||
/machine*
|
/machine*
|
||||||
/bin
|
/bin
|
||||||
|
.Dockerfile-test
|
||||||
.vagrant
|
.vagrant
|
||||||
*.etcd
|
*.etcd
|
||||||
|
*.log
|
||||||
/etcd
|
/etcd
|
||||||
*.swp
|
*.swp
|
||||||
/hack/insta-discovery/.env
|
/hack/insta-discovery/.env
|
||||||
*.test
|
*.test
|
||||||
tools/functional-tester/docker/bin
|
tools/functional-tester/docker/bin
|
||||||
|
hack/scripts-dev/docker-dns/.Dockerfile
|
||||||
|
hack/scripts-dev/docker-dns-srv/.Dockerfile
|
||||||
hack/tls-setup/certs
|
hack/tls-setup/certs
|
||||||
.idea
|
.idea
|
||||||
|
|
4
vendor/github.com/coreos/etcd/.semaphore.sh
generated
vendored
4
vendor/github.com/coreos/etcd/.semaphore.sh
generated
vendored
|
@ -2,7 +2,7 @@
|
||||||
|
|
||||||
TEST_SUFFIX=$(date +%s | base64 | head -c 15)
|
TEST_SUFFIX=$(date +%s | base64 | head -c 15)
|
||||||
|
|
||||||
TEST_OPTS="RELEASE_TEST=y INTEGRATION=y PASSES='build unit release integration_e2e functional' MANUAL_VER=v3.2.9"
|
TEST_OPTS="RELEASE_TEST=y INTEGRATION=y PASSES='build unit release integration_e2e functional' MANUAL_VER=v3.2.11"
|
||||||
if [ "$TEST_ARCH" == "386" ]; then
|
if [ "$TEST_ARCH" == "386" ]; then
|
||||||
TEST_OPTS="GOARCH=386 PASSES='build unit integration_e2e'"
|
TEST_OPTS="GOARCH=386 PASSES='build unit integration_e2e'"
|
||||||
fi
|
fi
|
||||||
|
@ -13,4 +13,4 @@ docker run \
|
||||||
gcr.io/etcd-development/etcd-test:go1.8.5 \
|
gcr.io/etcd-development/etcd-test:go1.8.5 \
|
||||||
/bin/bash -c "${TEST_OPTS} ./test 2>&1 | tee test-${TEST_SUFFIX}.log"
|
/bin/bash -c "${TEST_OPTS} ./test 2>&1 | tee test-${TEST_SUFFIX}.log"
|
||||||
|
|
||||||
! egrep "(--- FAIL:|panic: test timed out|appears to have leaked|Too many goroutines)" -B50 -A10 test-${TEST_SUFFIX}.log
|
! egrep "(--- FAIL:|panic: test timed out|appears to have leaked)" -B50 -A10 test-${TEST_SUFFIX}.log
|
||||||
|
|
6
vendor/github.com/coreos/etcd/glide.lock
generated
vendored
6
vendor/github.com/coreos/etcd/glide.lock
generated
vendored
|
@ -1,5 +1,5 @@
|
||||||
hash: 94008ff434d6e211d8dbd412d81f123c5f43bb41b5267a50612ff34b8879b796
|
hash: 57308341a6ff76ce7960119ca6f589d2f5476c056f1f38f9a32552d9e68509d8
|
||||||
updated: 2017-12-04T14:22:51.678346-08:00
|
updated: 2017-12-19T13:02:46.509863-08:00
|
||||||
imports:
|
imports:
|
||||||
- name: github.com/beorn7/perks
|
- name: github.com/beorn7/perks
|
||||||
version: 4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9
|
version: 4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9
|
||||||
|
@ -149,7 +149,7 @@ imports:
|
||||||
- googleapis/api/annotations
|
- googleapis/api/annotations
|
||||||
- googleapis/rpc/status
|
- googleapis/rpc/status
|
||||||
- name: google.golang.org/grpc
|
- name: google.golang.org/grpc
|
||||||
version: 9a2334748bab9638f1480ad4f0ac6ac0c6c3a486
|
version: 5b3c4e850e90a4cf6a20ebd46c8b32a0a3afcb9e
|
||||||
subpackages:
|
subpackages:
|
||||||
- balancer
|
- balancer
|
||||||
- codes
|
- codes
|
||||||
|
|
4
vendor/github.com/coreos/etcd/glide.yaml
generated
vendored
4
vendor/github.com/coreos/etcd/glide.yaml
generated
vendored
|
@ -40,7 +40,7 @@ import:
|
||||||
- package: github.com/google/btree
|
- package: github.com/google/btree
|
||||||
version: 925471ac9e2131377a91e1595defec898166fe49
|
version: 925471ac9e2131377a91e1595defec898166fe49
|
||||||
- package: github.com/grpc-ecosystem/grpc-gateway
|
- package: github.com/grpc-ecosystem/grpc-gateway
|
||||||
version: v1.3
|
version: v1.3.0
|
||||||
subpackages:
|
subpackages:
|
||||||
- runtime
|
- runtime
|
||||||
- runtime/internal
|
- runtime/internal
|
||||||
|
@ -97,7 +97,7 @@ import:
|
||||||
subpackages:
|
subpackages:
|
||||||
- rate
|
- rate
|
||||||
- package: google.golang.org/grpc
|
- package: google.golang.org/grpc
|
||||||
version: v1.7.4
|
version: v1.7.5
|
||||||
subpackages:
|
subpackages:
|
||||||
- codes
|
- codes
|
||||||
- credentials
|
- credentials
|
||||||
|
|
2
vendor/github.com/coreos/etcd/test
generated
vendored
2
vendor/github.com/coreos/etcd/test
generated
vendored
|
@ -148,7 +148,7 @@ function e2e_pass {
|
||||||
|
|
||||||
function integration_extra {
|
function integration_extra {
|
||||||
go test -timeout 15m -v ${RACE} -cpu 1,2,4 "$@" "${REPO_PATH}/client/integration"
|
go test -timeout 15m -v ${RACE} -cpu 1,2,4 "$@" "${REPO_PATH}/client/integration"
|
||||||
go test -timeout 15m -v ${RACE} -cpu 1,2,4 "$@" "${REPO_PATH}/clientv3/integration"
|
go test -timeout 20m -v ${RACE} -cpu 1,2,4 "$@" "${REPO_PATH}/clientv3/integration"
|
||||||
}
|
}
|
||||||
|
|
||||||
function integration_e2e_pass {
|
function integration_e2e_pass {
|
||||||
|
|
2
vendor/github.com/coreos/etcd/version/version.go
generated
vendored
2
vendor/github.com/coreos/etcd/version/version.go
generated
vendored
|
@ -26,7 +26,7 @@ import (
|
||||||
var (
|
var (
|
||||||
// MinClusterVersion is the min cluster version this etcd binary is compatible with.
|
// MinClusterVersion is the min cluster version this etcd binary is compatible with.
|
||||||
MinClusterVersion = "3.0.0"
|
MinClusterVersion = "3.0.0"
|
||||||
Version = "3.2.11"
|
Version = "3.2.13"
|
||||||
APIVersion = "unknown"
|
APIVersion = "unknown"
|
||||||
|
|
||||||
// Git SHA Value will be set during build
|
// Git SHA Value will be set during build
|
||||||
|
|
37
vendor/github.com/docker/distribution/.gitignore
generated
vendored
37
vendor/github.com/docker/distribution/.gitignore
generated
vendored
|
@ -1,37 +0,0 @@
|
||||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
|
||||||
*.o
|
|
||||||
*.a
|
|
||||||
*.so
|
|
||||||
|
|
||||||
# Folders
|
|
||||||
_obj
|
|
||||||
_test
|
|
||||||
|
|
||||||
# Architecture specific extensions/prefixes
|
|
||||||
*.[568vq]
|
|
||||||
[568vq].out
|
|
||||||
|
|
||||||
*.cgo1.go
|
|
||||||
*.cgo2.c
|
|
||||||
_cgo_defun.c
|
|
||||||
_cgo_gotypes.go
|
|
||||||
_cgo_export.*
|
|
||||||
|
|
||||||
_testmain.go
|
|
||||||
|
|
||||||
*.exe
|
|
||||||
*.test
|
|
||||||
*.prof
|
|
||||||
|
|
||||||
# never checkin from the bin file (for now)
|
|
||||||
bin/*
|
|
||||||
|
|
||||||
# Test key files
|
|
||||||
*.pem
|
|
||||||
|
|
||||||
# Cover profiles
|
|
||||||
*.out
|
|
||||||
|
|
||||||
# Editor/IDE specific files.
|
|
||||||
*.sublime-project
|
|
||||||
*.sublime-workspace
|
|
19
vendor/github.com/docker/distribution/.mailmap
generated
vendored
19
vendor/github.com/docker/distribution/.mailmap
generated
vendored
|
@ -1,19 +0,0 @@
|
||||||
Stephen J Day <stephen.day@docker.com> Stephen Day <stevvooe@users.noreply.github.com>
|
|
||||||
Stephen J Day <stephen.day@docker.com> Stephen Day <stevvooe@gmail.com>
|
|
||||||
Olivier Gambier <olivier@docker.com> Olivier Gambier <dmp42@users.noreply.github.com>
|
|
||||||
Brian Bland <brian.bland@docker.com> Brian Bland <r4nd0m1n4t0r@gmail.com>
|
|
||||||
Brian Bland <brian.bland@docker.com> Brian Bland <brian.t.bland@gmail.com>
|
|
||||||
Josh Hawn <josh.hawn@docker.com> Josh Hawn <jlhawn@berkeley.edu>
|
|
||||||
Richard Scothern <richard.scothern@docker.com> Richard <richard.scothern@gmail.com>
|
|
||||||
Richard Scothern <richard.scothern@docker.com> Richard Scothern <richard.scothern@gmail.com>
|
|
||||||
Andrew Meredith <andymeredith@gmail.com> Andrew Meredith <kendru@users.noreply.github.com>
|
|
||||||
harche <p.harshal@gmail.com> harche <harche@users.noreply.github.com>
|
|
||||||
Jessie Frazelle <jessie@docker.com> <jfrazelle@users.noreply.github.com>
|
|
||||||
Sharif Nassar <sharif@mrwacky.com> Sharif Nassar <mrwacky42@users.noreply.github.com>
|
|
||||||
Sven Dowideit <SvenDowideit@home.org.au> Sven Dowideit <SvenDowideit@users.noreply.github.com>
|
|
||||||
Vincent Giersch <vincent.giersch@ovh.net> Vincent Giersch <vincent@giersch.fr>
|
|
||||||
davidli <wenquan.li@hp.com> davidli <wenquan.li@hpe.com>
|
|
||||||
Omer Cohen <git@omer.io> Omer Cohen <git@omerc.net>
|
|
||||||
Eric Yang <windfarer@gmail.com> Eric Yang <Windfarer@users.noreply.github.com>
|
|
||||||
Nikita Tarasov <nikita@mygento.ru> Nikita <luckyraul@users.noreply.github.com>
|
|
||||||
Misty Stanley-Jones <misty@docker.com> Misty Stanley-Jones <misty@apache.org>
|
|
182
vendor/github.com/docker/distribution/AUTHORS
generated
vendored
182
vendor/github.com/docker/distribution/AUTHORS
generated
vendored
|
@ -1,182 +0,0 @@
|
||||||
Aaron Lehmann <aaron.lehmann@docker.com>
|
|
||||||
Aaron Schlesinger <aschlesinger@deis.com>
|
|
||||||
Aaron Vinson <avinson.public@gmail.com>
|
|
||||||
Adam Duke <adam.v.duke@gmail.com>
|
|
||||||
Adam Enger <adamenger@gmail.com>
|
|
||||||
Adrian Mouat <adrian.mouat@gmail.com>
|
|
||||||
Ahmet Alp Balkan <ahmetalpbalkan@gmail.com>
|
|
||||||
Alex Chan <alex.chan@metaswitch.com>
|
|
||||||
Alex Elman <aelman@indeed.com>
|
|
||||||
Alexey Gladkov <gladkov.alexey@gmail.com>
|
|
||||||
allencloud <allen.sun@daocloud.io>
|
|
||||||
amitshukla <ashukla73@hotmail.com>
|
|
||||||
Amy Lindburg <amy.lindburg@docker.com>
|
|
||||||
Andrew Hsu <andrewhsu@acm.org>
|
|
||||||
Andrew Meredith <andymeredith@gmail.com>
|
|
||||||
Andrew T Nguyen <andrew.nguyen@docker.com>
|
|
||||||
Andrey Kostov <kostov.andrey@gmail.com>
|
|
||||||
Andy Goldstein <agoldste@redhat.com>
|
|
||||||
Anis Elleuch <vadmeste@gmail.com>
|
|
||||||
Antonio Mercado <amercado@thinknode.com>
|
|
||||||
Antonio Murdaca <runcom@redhat.com>
|
|
||||||
Anton Tiurin <noxiouz@yandex.ru>
|
|
||||||
Anusha Ragunathan <anusha@docker.com>
|
|
||||||
a-palchikov <deemok@gmail.com>
|
|
||||||
Arien Holthuizen <aholthuizen@schubergphilis.com>
|
|
||||||
Arnaud Porterie <arnaud.porterie@docker.com>
|
|
||||||
Arthur Baars <arthur@semmle.com>
|
|
||||||
Asuka Suzuki <hello@tanksuzuki.com>
|
|
||||||
Avi Miller <avi.miller@oracle.com>
|
|
||||||
Ayose Cazorla <ayosec@gmail.com>
|
|
||||||
BadZen <dave.trombley@gmail.com>
|
|
||||||
Ben Bodenmiller <bbodenmiller@hotmail.com>
|
|
||||||
Ben Firshman <ben@firshman.co.uk>
|
|
||||||
bin liu <liubin0329@gmail.com>
|
|
||||||
Brian Bland <brian.bland@docker.com>
|
|
||||||
burnettk <burnettk@gmail.com>
|
|
||||||
Carson A <ca@carsonoid.net>
|
|
||||||
Cezar Sa Espinola <cezarsa@gmail.com>
|
|
||||||
Charles Smith <charles.smith@docker.com>
|
|
||||||
Chris Dillon <squarism@gmail.com>
|
|
||||||
cuiwei13 <cuiwei13@pku.edu.cn>
|
|
||||||
cyli <cyli@twistedmatrix.com>
|
|
||||||
Daisuke Fujita <dtanshi45@gmail.com>
|
|
||||||
Daniel Huhn <daniel@danielhuhn.de>
|
|
||||||
Darren Shepherd <darren@rancher.com>
|
|
||||||
Dave Trombley <dave.trombley@gmail.com>
|
|
||||||
Dave Tucker <dt@docker.com>
|
|
||||||
David Lawrence <david.lawrence@docker.com>
|
|
||||||
davidli <wenquan.li@hp.com>
|
|
||||||
David Verhasselt <david@crowdway.com>
|
|
||||||
David Xia <dxia@spotify.com>
|
|
||||||
Dejan Golja <dejan@golja.org>
|
|
||||||
Derek McGowan <derek@mcgstyle.net>
|
|
||||||
Diogo Mónica <diogo.monica@gmail.com>
|
|
||||||
DJ Enriquez <dj.enriquez@infospace.com>
|
|
||||||
Donald Huang <don.hcd@gmail.com>
|
|
||||||
Doug Davis <dug@us.ibm.com>
|
|
||||||
Edgar Lee <edgar.lee@docker.com>
|
|
||||||
Eric Yang <windfarer@gmail.com>
|
|
||||||
Fabio Berchtold <jamesclonk@jamesclonk.ch>
|
|
||||||
Fabio Huser <fabio@fh1.ch>
|
|
||||||
farmerworking <farmerworking@gmail.com>
|
|
||||||
Felix Yan <felixonmars@archlinux.org>
|
|
||||||
Florentin Raud <florentin.raud@gmail.com>
|
|
||||||
Frank Chen <frankchn@gmail.com>
|
|
||||||
Frederick F. Kautz IV <fkautz@alumni.cmu.edu>
|
|
||||||
gabriell nascimento <gabriell@bluesoft.com.br>
|
|
||||||
Gleb Schukin <gschukin@ptsecurity.com>
|
|
||||||
harche <p.harshal@gmail.com>
|
|
||||||
Henri Gomez <henri.gomez@gmail.com>
|
|
||||||
Hua Wang <wanghua.humble@gmail.com>
|
|
||||||
Hu Keping <hukeping@huawei.com>
|
|
||||||
HuKeping <hukeping@huawei.com>
|
|
||||||
Ian Babrou <ibobrik@gmail.com>
|
|
||||||
igayoso <igayoso@gmail.com>
|
|
||||||
Jack Griffin <jackpg14@gmail.com>
|
|
||||||
James Findley <jfindley@fastmail.com>
|
|
||||||
Jason Freidman <jason.freidman@gmail.com>
|
|
||||||
Jason Heiss <jheiss@aput.net>
|
|
||||||
Jeff Nickoloff <jeff@allingeek.com>
|
|
||||||
Jess Frazelle <acidburn@google.com>
|
|
||||||
Jessie Frazelle <jessie@docker.com>
|
|
||||||
jhaohai <jhaohai@foxmail.com>
|
|
||||||
Jianqing Wang <tsing@jianqing.org>
|
|
||||||
Jihoon Chung <jihoon@gmail.com>
|
|
||||||
Joao Fernandes <joao.fernandes@docker.com>
|
|
||||||
John Mulhausen <john@docker.com>
|
|
||||||
John Starks <jostarks@microsoft.com>
|
|
||||||
Jonathan Boulle <jonathanboulle@gmail.com>
|
|
||||||
Jon Johnson <jonjohnson@google.com>
|
|
||||||
Jon Poler <jonathan.poler@apcera.com>
|
|
||||||
Jordan Liggitt <jliggitt@redhat.com>
|
|
||||||
Josh Chorlton <josh.chorlton@docker.com>
|
|
||||||
Josh Hawn <josh.hawn@docker.com>
|
|
||||||
Julien Fernandez <julien.fernandez@gmail.com>
|
|
||||||
Keerthan Mala <kmala@engineyard.com>
|
|
||||||
Kelsey Hightower <kelsey.hightower@gmail.com>
|
|
||||||
Kenneth Lim <kennethlimcp@gmail.com>
|
|
||||||
Kenny Leung <kleung@google.com>
|
|
||||||
Ke Xu <leonhartx.k@gmail.com>
|
|
||||||
liuchang0812 <liuchang0812@gmail.com>
|
|
||||||
Liu Hua <sdu.liu@huawei.com>
|
|
||||||
Li Yi <denverdino@gmail.com>
|
|
||||||
Lloyd Ramey <lnr0626@gmail.com>
|
|
||||||
Louis Kottmann <louis.kottmann@gmail.com>
|
|
||||||
Luke Carpenter <x@rubynerd.net>
|
|
||||||
Marcus Martins <marcus@docker.com>
|
|
||||||
Mary Anthony <mary@docker.com>
|
|
||||||
Matt Bentley <mbentley@mbentley.net>
|
|
||||||
Matt Duch <matt@learnmetrics.com>
|
|
||||||
Matthew Green <greenmr@live.co.uk>
|
|
||||||
Matt Moore <mattmoor@google.com>
|
|
||||||
Matt Robenolt <matt@ydekproductions.com>
|
|
||||||
Michael Prokop <mika@grml.org>
|
|
||||||
Michal Minar <miminar@redhat.com>
|
|
||||||
Michal Minář <miminar@redhat.com>
|
|
||||||
Mike Brown <brownwm@us.ibm.com>
|
|
||||||
Miquel Sabaté <msabate@suse.com>
|
|
||||||
Misty Stanley-Jones <misty@docker.com>
|
|
||||||
Morgan Bauer <mbauer@us.ibm.com>
|
|
||||||
moxiegirl <mary@docker.com>
|
|
||||||
Nathan Sullivan <nathan@nightsys.net>
|
|
||||||
nevermosby <robolwq@qq.com>
|
|
||||||
Nghia Tran <tcnghia@gmail.com>
|
|
||||||
Nikita Tarasov <nikita@mygento.ru>
|
|
||||||
Noah Treuhaft <noah.treuhaft@docker.com>
|
|
||||||
Nuutti Kotivuori <nuutti.kotivuori@poplatek.fi>
|
|
||||||
Oilbeater <liumengxinfly@gmail.com>
|
|
||||||
Olivier Gambier <olivier@docker.com>
|
|
||||||
Olivier Jacques <olivier.jacques@hp.com>
|
|
||||||
Omer Cohen <git@omer.io>
|
|
||||||
Patrick Devine <patrick.devine@docker.com>
|
|
||||||
Phil Estes <estesp@linux.vnet.ibm.com>
|
|
||||||
Philip Misiowiec <philip@atlashealth.com>
|
|
||||||
Pierre-Yves Ritschard <pyr@spootnik.org>
|
|
||||||
Qiao Anran <qiaoanran@gmail.com>
|
|
||||||
Randy Barlow <randy@electronsweatshop.com>
|
|
||||||
Richard Scothern <richard.scothern@docker.com>
|
|
||||||
Rodolfo Carvalho <rhcarvalho@gmail.com>
|
|
||||||
Rusty Conover <rusty@luckydinosaur.com>
|
|
||||||
Sean Boran <Boran@users.noreply.github.com>
|
|
||||||
Sebastiaan van Stijn <github@gone.nl>
|
|
||||||
Sebastien Coavoux <s.coavoux@free.fr>
|
|
||||||
Serge Dubrouski <sergeyfd@gmail.com>
|
|
||||||
Sharif Nassar <sharif@mrwacky.com>
|
|
||||||
Shawn Falkner-Horine <dreadpirateshawn@gmail.com>
|
|
||||||
Shreyas Karnik <karnik.shreyas@gmail.com>
|
|
||||||
Simon Thulbourn <simon+github@thulbourn.com>
|
|
||||||
spacexnice <yaoyao.xyy@alibaba-inc.com>
|
|
||||||
Spencer Rinehart <anubis@overthemonkey.com>
|
|
||||||
Stan Hu <stanhu@gmail.com>
|
|
||||||
Stefan Majewsky <stefan.majewsky@sap.com>
|
|
||||||
Stefan Weil <sw@weilnetz.de>
|
|
||||||
Stephen J Day <stephen.day@docker.com>
|
|
||||||
Sungho Moon <sungho.moon@navercorp.com>
|
|
||||||
Sven Dowideit <SvenDowideit@home.org.au>
|
|
||||||
Sylvain Baubeau <sbaubeau@redhat.com>
|
|
||||||
Ted Reed <ted.reed@gmail.com>
|
|
||||||
tgic <farmer1992@gmail.com>
|
|
||||||
Thomas Sjögren <konstruktoid@users.noreply.github.com>
|
|
||||||
Tianon Gravi <admwiggin@gmail.com>
|
|
||||||
Tibor Vass <teabee89@gmail.com>
|
|
||||||
Tonis Tiigi <tonistiigi@gmail.com>
|
|
||||||
Tony Holdstock-Brown <tony@docker.com>
|
|
||||||
Trevor Pounds <trevor.pounds@gmail.com>
|
|
||||||
Troels Thomsen <troels@thomsen.io>
|
|
||||||
Victoria Bialas <victoria.bialas@docker.com>
|
|
||||||
Victor Vieux <vieux@docker.com>
|
|
||||||
Vincent Batts <vbatts@redhat.com>
|
|
||||||
Vincent Demeester <vincent@sbr.pm>
|
|
||||||
Vincent Giersch <vincent.giersch@ovh.net>
|
|
||||||
weiyuan.yl <weiyuan.yl@alibaba-inc.com>
|
|
||||||
W. Trevor King <wking@tremily.us>
|
|
||||||
xg.song <xg.song@venusource.com>
|
|
||||||
xiekeyang <xiekeyang@huawei.com>
|
|
||||||
Yann ROBERT <yann.robert@anantaplex.fr>
|
|
||||||
yaoyao.xyy <yaoyao.xyy@alibaba-inc.com>
|
|
||||||
yixi zhang <yixi@memsql.com>
|
|
||||||
yuexiao-wang <wang.yuexiao@zte.com.cn>
|
|
||||||
yuzou <zouyu7@huawei.com>
|
|
||||||
zhouhaibing089 <zhouhaibing089@gmail.com>
|
|
||||||
姜继忠 <jizhong.jiangjz@alibaba-inc.com>
|
|
119
vendor/github.com/docker/distribution/BUILDING.md
generated
vendored
119
vendor/github.com/docker/distribution/BUILDING.md
generated
vendored
|
@ -1,119 +0,0 @@
|
||||||
|
|
||||||
# Building the registry source
|
|
||||||
|
|
||||||
## Use-case
|
|
||||||
|
|
||||||
This is useful if you intend to actively work on the registry.
|
|
||||||
|
|
||||||
### Alternatives
|
|
||||||
|
|
||||||
Most people should use the [official Registry docker image](https://hub.docker.com/r/library/registry/).
|
|
||||||
|
|
||||||
People looking for advanced operational use cases might consider rolling their own image with a custom Dockerfile inheriting `FROM registry:2`.
|
|
||||||
|
|
||||||
OS X users who want to run natively can do so following [the instructions here](https://github.com/docker/docker.github.io/blob/master/registry/recipes/osx-setup-guide.md).
|
|
||||||
|
|
||||||
### Gotchas
|
|
||||||
|
|
||||||
You are expected to know your way around with go & git.
|
|
||||||
|
|
||||||
If you are a casual user with no development experience, and no preliminary knowledge of go, building from source is probably not a good solution for you.
|
|
||||||
|
|
||||||
## Build the development environment
|
|
||||||
|
|
||||||
The first prerequisite of properly building distribution targets is to have a Go
|
|
||||||
development environment setup. Please follow [How to Write Go Code](https://golang.org/doc/code.html)
|
|
||||||
for proper setup. If done correctly, you should have a GOROOT and GOPATH set in the
|
|
||||||
environment.
|
|
||||||
|
|
||||||
If a Go development environment is setup, one can use `go get` to install the
|
|
||||||
`registry` command from the current latest:
|
|
||||||
|
|
||||||
go get github.com/docker/distribution/cmd/registry
|
|
||||||
|
|
||||||
The above will install the source repository into the `GOPATH`.
|
|
||||||
|
|
||||||
Now create the directory for the registry data (this might require you to set permissions properly)
|
|
||||||
|
|
||||||
mkdir -p /var/lib/registry
|
|
||||||
|
|
||||||
... or alternatively `export REGISTRY_STORAGE_FILESYSTEM_ROOTDIRECTORY=/somewhere` if you want to store data into another location.
|
|
||||||
|
|
||||||
The `registry`
|
|
||||||
binary can then be run with the following:
|
|
||||||
|
|
||||||
$ $GOPATH/bin/registry --version
|
|
||||||
$GOPATH/bin/registry github.com/docker/distribution v2.0.0-alpha.1+unknown
|
|
||||||
|
|
||||||
> __NOTE:__ While you do not need to use `go get` to checkout the distribution
|
|
||||||
> project, for these build instructions to work, the project must be checked
|
|
||||||
> out in the correct location in the `GOPATH`. This should almost always be
|
|
||||||
> `$GOPATH/src/github.com/docker/distribution`.
|
|
||||||
|
|
||||||
The registry can be run with the default config using the following
|
|
||||||
incantation:
|
|
||||||
|
|
||||||
$ $GOPATH/bin/registry serve $GOPATH/src/github.com/docker/distribution/cmd/registry/config-example.yml
|
|
||||||
INFO[0000] endpoint local-5003 disabled, skipping app.id=34bbec38-a91a-494a-9a3f-b72f9010081f version=v2.0.0-alpha.1+unknown
|
|
||||||
INFO[0000] endpoint local-8083 disabled, skipping app.id=34bbec38-a91a-494a-9a3f-b72f9010081f version=v2.0.0-alpha.1+unknown
|
|
||||||
INFO[0000] listening on :5000 app.id=34bbec38-a91a-494a-9a3f-b72f9010081f version=v2.0.0-alpha.1+unknown
|
|
||||||
INFO[0000] debug server listening localhost:5001
|
|
||||||
|
|
||||||
If it is working, one should see the above log messages.
|
|
||||||
|
|
||||||
### Repeatable Builds
|
|
||||||
|
|
||||||
For the full development experience, one should `cd` into
|
|
||||||
`$GOPATH/src/github.com/docker/distribution`. From there, the regular `go`
|
|
||||||
commands, such as `go test`, should work per package (please see
|
|
||||||
[Developing](#developing) if they don't work).
|
|
||||||
|
|
||||||
A `Makefile` has been provided as a convenience to support repeatable builds.
|
|
||||||
Please install the following into `GOPATH` for it to work:
|
|
||||||
|
|
||||||
go get github.com/tools/godep github.com/golang/lint/golint
|
|
||||||
|
|
||||||
**TODO(stevvooe):** Add a `make setup` command to Makefile to run this. Have to think about how to interact with Godeps properly.
|
|
||||||
|
|
||||||
Once these commands are available in the `GOPATH`, run `make` to get a full
|
|
||||||
build:
|
|
||||||
|
|
||||||
$ make
|
|
||||||
+ clean
|
|
||||||
+ fmt
|
|
||||||
+ vet
|
|
||||||
+ lint
|
|
||||||
+ build
|
|
||||||
github.com/docker/docker/vendor/src/code.google.com/p/go/src/pkg/archive/tar
|
|
||||||
github.com/Sirupsen/logrus
|
|
||||||
github.com/docker/libtrust
|
|
||||||
...
|
|
||||||
github.com/yvasiyarov/gorelic
|
|
||||||
github.com/docker/distribution/registry/handlers
|
|
||||||
github.com/docker/distribution/cmd/registry
|
|
||||||
+ test
|
|
||||||
...
|
|
||||||
ok github.com/docker/distribution/digest 7.875s
|
|
||||||
ok github.com/docker/distribution/manifest 0.028s
|
|
||||||
ok github.com/docker/distribution/notifications 17.322s
|
|
||||||
? github.com/docker/distribution/registry [no test files]
|
|
||||||
ok github.com/docker/distribution/registry/api/v2 0.101s
|
|
||||||
? github.com/docker/distribution/registry/auth [no test files]
|
|
||||||
ok github.com/docker/distribution/registry/auth/silly 0.011s
|
|
||||||
...
|
|
||||||
+ /Users/sday/go/src/github.com/docker/distribution/bin/registry
|
|
||||||
+ /Users/sday/go/src/github.com/docker/distribution/bin/registry-api-descriptor-template
|
|
||||||
+ binaries
|
|
||||||
|
|
||||||
The above provides a repeatable build using the contents of the vendored
|
|
||||||
Godeps directory. This includes formatting, vetting, linting, building,
|
|
||||||
testing and generating tagged binaries. We can verify this worked by running
|
|
||||||
the registry binary generated in the "./bin" directory:
|
|
||||||
|
|
||||||
$ ./bin/registry -version
|
|
||||||
./bin/registry github.com/docker/distribution v2.0.0-alpha.2-80-g16d8b2c.m
|
|
||||||
|
|
||||||
### Optional build tags
|
|
||||||
|
|
||||||
Optional [build tags](http://golang.org/pkg/go/build/) can be provided using
|
|
||||||
the environment variable `DOCKER_BUILDTAGS`.
|
|
114
vendor/github.com/docker/distribution/CHANGELOG.md
generated
vendored
114
vendor/github.com/docker/distribution/CHANGELOG.md
generated
vendored
|
@ -1,114 +0,0 @@
|
||||||
# Changelog
|
|
||||||
|
|
||||||
## 2.6.1 (2017-04-05)
|
|
||||||
|
|
||||||
#### Registry
|
|
||||||
- Fix `Forwarded` header handling, revert use of `X-Forwarded-Port`
|
|
||||||
- Use driver `Stat` for registry health check
|
|
||||||
|
|
||||||
## 2.6.0 (2017-01-18)
|
|
||||||
|
|
||||||
#### Storage
|
|
||||||
- S3: fixed bug in delete due to read-after-write inconsistency
|
|
||||||
- S3: allow EC2 IAM roles to be used when authorizing region endpoints
|
|
||||||
- S3: add Object ACL Support
|
|
||||||
- S3: fix delete method's notion of subpaths
|
|
||||||
- S3: use multipart upload API in `Move` method for performance
|
|
||||||
- S3: add v2 signature signing for legacy S3 clones
|
|
||||||
- Swift: add simple heuristic to detect incomplete DLOs during read ops
|
|
||||||
- Swift: support different user and tenant domains
|
|
||||||
- Swift: bulk deletes in chunks
|
|
||||||
- Aliyun OSS: fix delete method's notion of subpaths
|
|
||||||
- Aliyun OSS: optimize data copy after upload finishes
|
|
||||||
- Azure: close leaking response body
|
|
||||||
- Fix storage drivers dropping non-EOF errors when listing repositories
|
|
||||||
- Compare path properly when listing repositories in catalog
|
|
||||||
- Add a foreign layer URL host whitelist
|
|
||||||
- Improve catalog enumerate runtime
|
|
||||||
|
|
||||||
#### Registry
|
|
||||||
- Export `storage.CreateOptions` in top-level package
|
|
||||||
- Enable notifications to endpoints that use self-signed certificates
|
|
||||||
- Properly validate multi-URL foreign layers
|
|
||||||
- Add control over validation of URLs in pushed manifests
|
|
||||||
- Proxy mode: fix socket leak when pull is cancelled
|
|
||||||
- Tag service: properly handle error responses on HEAD request
|
|
||||||
- Support for custom authentication URL in proxying registry
|
|
||||||
- Add configuration option to disable access logging
|
|
||||||
- Add notification filtering by target media type
|
|
||||||
- Manifest: `References()` returns all children
|
|
||||||
- Honor `X-Forwarded-Port` and Forwarded headers
|
|
||||||
- Reference: Preserve tag and digest in With* functions
|
|
||||||
- Add policy configuration for enforcing repository classes
|
|
||||||
|
|
||||||
#### Client
|
|
||||||
- Changes the client Tags `All()` method to follow links
|
|
||||||
- Allow registry clients to connect via HTTP2
|
|
||||||
- Better handling of OAuth errors in client
|
|
||||||
|
|
||||||
#### Spec
|
|
||||||
- Manifest: clarify relationship between urls and foreign layers
|
|
||||||
- Authorization: add support for repository classes
|
|
||||||
|
|
||||||
#### Manifest
|
|
||||||
- Override media type returned from `Stat()` for existing manifests
|
|
||||||
- Add plugin mediatype to distribution manifest
|
|
||||||
|
|
||||||
#### Docs
|
|
||||||
- Document `TOOMANYREQUESTS` error code
|
|
||||||
- Document required Let's Encrypt port
|
|
||||||
- Improve documentation around implementation of OAuth2
|
|
||||||
- Improve documentation for configuration
|
|
||||||
|
|
||||||
#### Auth
|
|
||||||
- Add support for registry type in scope
|
|
||||||
- Add support for using v2 ping challenges for v1
|
|
||||||
- Add leeway to JWT `nbf` and `exp` checking
|
|
||||||
- htpasswd: dynamically parse htpasswd file
|
|
||||||
- Fix missing auth headers with PATCH HTTP request when pushing to default port
|
|
||||||
|
|
||||||
#### Dockerfile
|
|
||||||
- Update to go1.7
|
|
||||||
- Reorder Dockerfile steps for better layer caching
|
|
||||||
|
|
||||||
#### Notes
|
|
||||||
|
|
||||||
Documentation has moved to the documentation repository at
|
|
||||||
`github.com/docker/docker.github.io/tree/master/registry`
|
|
||||||
|
|
||||||
The registry is go 1.7 compliant, and passes newer, more restrictive `lint` and `vet` ing.
|
|
||||||
|
|
||||||
|
|
||||||
## 2.5.0 (2016-06-14)
|
|
||||||
|
|
||||||
#### Storage
|
|
||||||
- Ensure uploads directory is cleaned after upload is committed
|
|
||||||
- Add ability to cap concurrent operations in filesystem driver
|
|
||||||
- S3: Add 'us-gov-west-1' to the valid region list
|
|
||||||
- Swift: Handle ceph not returning Last-Modified header for HEAD requests
|
|
||||||
- Add redirect middleware
|
|
||||||
|
|
||||||
#### Registry
|
|
||||||
- Add support for blobAccessController middleware
|
|
||||||
- Add support for layers from foreign sources
|
|
||||||
- Remove signature store
|
|
||||||
- Add support for Let's Encrypt
|
|
||||||
- Correct yaml key names in configuration
|
|
||||||
|
|
||||||
#### Client
|
|
||||||
- Add option to get content digest from manifest get
|
|
||||||
|
|
||||||
#### Spec
|
|
||||||
- Update the auth spec scope grammar to reflect the fact that hostnames are optionally supported
|
|
||||||
- Clarify API documentation around catalog fetch behavior
|
|
||||||
|
|
||||||
#### API
|
|
||||||
- Support returning HTTP 429 (Too Many Requests)
|
|
||||||
|
|
||||||
#### Documentation
|
|
||||||
- Update auth documentation examples to show "expires in" as int
|
|
||||||
|
|
||||||
#### Docker Image
|
|
||||||
- Use Alpine Linux as base image
|
|
||||||
|
|
||||||
|
|
140
vendor/github.com/docker/distribution/CONTRIBUTING.md
generated
vendored
140
vendor/github.com/docker/distribution/CONTRIBUTING.md
generated
vendored
|
@ -1,140 +0,0 @@
|
||||||
# Contributing to the registry
|
|
||||||
|
|
||||||
## Before reporting an issue...
|
|
||||||
|
|
||||||
### If your problem is with...
|
|
||||||
|
|
||||||
- automated builds
|
|
||||||
- your account on the [Docker Hub](https://hub.docker.com/)
|
|
||||||
- any other [Docker Hub](https://hub.docker.com/) issue
|
|
||||||
|
|
||||||
Then please do not report your issue here - you should instead report it to [https://support.docker.com](https://support.docker.com)
|
|
||||||
|
|
||||||
### If you...
|
|
||||||
|
|
||||||
- need help setting up your registry
|
|
||||||
- can't figure out something
|
|
||||||
- are not sure what's going on or what your problem is
|
|
||||||
|
|
||||||
Then please do not open an issue here yet - you should first try one of the following support forums:
|
|
||||||
|
|
||||||
- irc: #docker-distribution on freenode
|
|
||||||
- mailing-list: <distribution@dockerproject.org> or https://groups.google.com/a/dockerproject.org/forum/#!forum/distribution
|
|
||||||
|
|
||||||
## Reporting an issue properly
|
|
||||||
|
|
||||||
By following these simple rules you will get better and faster feedback on your issue.
|
|
||||||
|
|
||||||
- search the bugtracker for an already reported issue
|
|
||||||
|
|
||||||
### If you found an issue that describes your problem:
|
|
||||||
|
|
||||||
- please read other user comments first, and confirm this is the same issue: a given error condition might be indicative of different problems - you may also find a workaround in the comments
|
|
||||||
- please refrain from adding "same thing here" or "+1" comments
|
|
||||||
- you don't need to comment on an issue to get notified of updates: just hit the "subscribe" button
|
|
||||||
- comment if you have some new, technical and relevant information to add to the case
|
|
||||||
- __DO NOT__ comment on closed issues or merged PRs. If you think you have a related problem, open up a new issue and reference the PR or issue.
|
|
||||||
|
|
||||||
### If you have not found an existing issue that describes your problem:
|
|
||||||
|
|
||||||
1. create a new issue, with a succinct title that describes your issue:
|
|
||||||
- bad title: "It doesn't work with my docker"
|
|
||||||
- good title: "Private registry push fail: 400 error with E_INVALID_DIGEST"
|
|
||||||
2. copy the output of:
|
|
||||||
- `docker version`
|
|
||||||
- `docker info`
|
|
||||||
- `docker exec <registry-container> registry -version`
|
|
||||||
3. copy the command line you used to launch your Registry
|
|
||||||
4. restart your docker daemon in debug mode (add `-D` to the daemon launch arguments)
|
|
||||||
5. reproduce your problem and get your docker daemon logs showing the error
|
|
||||||
6. if relevant, copy your registry logs that show the error
|
|
||||||
7. provide any relevant detail about your specific Registry configuration (e.g., storage backend used)
|
|
||||||
8. indicate if you are using an enterprise proxy, Nginx, or anything else between you and your Registry
|
|
||||||
|
|
||||||
## Contributing a patch for a known bug, or a small correction
|
|
||||||
|
|
||||||
You should follow the basic GitHub workflow:
|
|
||||||
|
|
||||||
1. fork
|
|
||||||
2. commit a change
|
|
||||||
3. make sure the tests pass
|
|
||||||
4. PR
|
|
||||||
|
|
||||||
Additionally, you must [sign your commits](https://github.com/docker/docker/blob/master/CONTRIBUTING.md#sign-your-work). It's very simple:
|
|
||||||
|
|
||||||
- configure your name with git: `git config user.name "Real Name" && git config user.email mail@example.com`
|
|
||||||
- sign your commits using `-s`: `git commit -s -m "My commit"`
|
|
||||||
|
|
||||||
Some simple rules to ensure quick merge:
|
|
||||||
|
|
||||||
- clearly point to the issue(s) you want to fix in your PR comment (e.g., `closes #12345`)
|
|
||||||
- prefer multiple (smaller) PRs addressing individual issues over a big one trying to address multiple issues at once
|
|
||||||
- if you need to amend your PR following comments, please squash instead of adding more commits
|
|
||||||
|
|
||||||
## Contributing new features
|
|
||||||
|
|
||||||
You are heavily encouraged to first discuss what you want to do. You can do so on the irc channel, or by opening an issue that clearly describes the use case you want to fulfill, or the problem you are trying to solve.
|
|
||||||
|
|
||||||
If this is a major new feature, you should then submit a proposal that describes your technical solution and reasoning.
|
|
||||||
If you did discuss it first, this will likely be greenlighted very fast. It's advisable to address all feedback on this proposal before starting actual work.
|
|
||||||
|
|
||||||
Then you should submit your implementation, clearly linking to the issue (and possible proposal).
|
|
||||||
|
|
||||||
Your PR will be reviewed by the community, then ultimately by the project maintainers, before being merged.
|
|
||||||
|
|
||||||
It's mandatory to:
|
|
||||||
|
|
||||||
- interact respectfully with other community members and maintainers - more generally, you are expected to abide by the [Docker community rules](https://github.com/docker/docker/blob/master/CONTRIBUTING.md#docker-community-guidelines)
|
|
||||||
- address maintainers' comments and modify your submission accordingly
|
|
||||||
- write tests for any new code
|
|
||||||
|
|
||||||
Complying to these simple rules will greatly accelerate the review process, and will ensure you have a pleasant experience in contributing code to the Registry.
|
|
||||||
|
|
||||||
Have a look at a great, successful contribution: the [Swift driver PR](https://github.com/docker/distribution/pull/493)
|
|
||||||
|
|
||||||
## Coding Style
|
|
||||||
|
|
||||||
Unless explicitly stated, we follow all coding guidelines from the Go
|
|
||||||
community. While some of these standards may seem arbitrary, they somehow seem
|
|
||||||
to result in a solid, consistent codebase.
|
|
||||||
|
|
||||||
It is possible that the code base does not currently comply with these
|
|
||||||
guidelines. We are not looking for a massive PR that fixes this, since that
|
|
||||||
goes against the spirit of the guidelines. All new contributions should make a
|
|
||||||
best effort to clean up and make the code base better than they left it.
|
|
||||||
Obviously, apply your best judgement. Remember, the goal here is to make the
|
|
||||||
code base easier for humans to navigate and understand. Always keep that in
|
|
||||||
mind when nudging others to comply.
|
|
||||||
|
|
||||||
The rules:
|
|
||||||
|
|
||||||
1. All code should be formatted with `gofmt -s`.
|
|
||||||
2. All code should pass the default levels of
|
|
||||||
[`golint`](https://github.com/golang/lint).
|
|
||||||
3. All code should follow the guidelines covered in [Effective
|
|
||||||
Go](http://golang.org/doc/effective_go.html) and [Go Code Review
|
|
||||||
Comments](https://github.com/golang/go/wiki/CodeReviewComments).
|
|
||||||
4. Comment the code. Tell us the why, the history and the context.
|
|
||||||
5. Document _all_ declarations and methods, even private ones. Declare
|
|
||||||
expectations, caveats and anything else that may be important. If a type
|
|
||||||
gets exported, having the comments already there will ensure it's ready.
|
|
||||||
6. Variable name length should be proportional to its context and no longer.
|
|
||||||
`noCommaALongVariableNameLikeThisIsNotMoreClearWhenASimpleCommentWouldDo`.
|
|
||||||
In practice, short methods will have short variable names and globals will
|
|
||||||
have longer names.
|
|
||||||
7. No underscores in package names. If you need a compound name, step back,
|
|
||||||
and re-examine why you need a compound name. If you still think you need a
|
|
||||||
compound name, lose the underscore.
|
|
||||||
8. No utils or helpers packages. If a function is not general enough to
|
|
||||||
warrant its own package, it has not been written generally enough to be a
|
|
||||||
part of a util package. Just leave it unexported and well-documented.
|
|
||||||
9. All tests should run with `go test` and outside tooling should not be
|
|
||||||
required. No, we don't need another unit testing framework. Assertion
|
|
||||||
packages are acceptable if they provide _real_ incremental value.
|
|
||||||
10. Even though we call these "rules" above, they are actually just
|
|
||||||
guidelines. Since you've read all the rules, you now know that.
|
|
||||||
|
|
||||||
If you are having trouble getting into the mood of idiomatic Go, we recommend
|
|
||||||
reading through [Effective Go](http://golang.org/doc/effective_go.html). The
|
|
||||||
[Go Blog](http://blog.golang.org/) is also a great resource. Drinking the
|
|
||||||
kool-aid is a lot easier than going thirsty.
|
|
18
vendor/github.com/docker/distribution/Dockerfile
generated
vendored
18
vendor/github.com/docker/distribution/Dockerfile
generated
vendored
|
@ -1,18 +0,0 @@
|
||||||
FROM golang:1.7-alpine
|
|
||||||
|
|
||||||
ENV DISTRIBUTION_DIR /go/src/github.com/docker/distribution
|
|
||||||
ENV DOCKER_BUILDTAGS include_oss include_gcs
|
|
||||||
|
|
||||||
RUN set -ex \
|
|
||||||
&& apk add --no-cache make git
|
|
||||||
|
|
||||||
WORKDIR $DISTRIBUTION_DIR
|
|
||||||
COPY . $DISTRIBUTION_DIR
|
|
||||||
COPY cmd/registry/config-dev.yml /etc/docker/registry/config.yml
|
|
||||||
|
|
||||||
RUN make PREFIX=/go clean binaries
|
|
||||||
|
|
||||||
VOLUME ["/var/lib/registry"]
|
|
||||||
EXPOSE 5000
|
|
||||||
ENTRYPOINT ["registry"]
|
|
||||||
CMD ["serve", "/etc/docker/registry/config.yml"]
|
|
202
vendor/github.com/docker/distribution/LICENSE
generated
vendored
202
vendor/github.com/docker/distribution/LICENSE
generated
vendored
|
@ -1,202 +0,0 @@
|
||||||
Apache License
|
|
||||||
Version 2.0, January 2004
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction,
|
|
||||||
and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by
|
|
||||||
the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all
|
|
||||||
other entities that control, are controlled by, or are under common
|
|
||||||
control with that entity. For the purposes of this definition,
|
|
||||||
"control" means (i) the power, direct or indirect, to cause the
|
|
||||||
direction or management of such entity, whether by contract or
|
|
||||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
||||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity
|
|
||||||
exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications,
|
|
||||||
including but not limited to software source code, documentation
|
|
||||||
source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical
|
|
||||||
transformation or translation of a Source form, including but
|
|
||||||
not limited to compiled object code, generated documentation,
|
|
||||||
and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or
|
|
||||||
Object form, made available under the License, as indicated by a
|
|
||||||
copyright notice that is included in or attached to the work
|
|
||||||
(an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object
|
|
||||||
form, that is based on (or derived from) the Work and for which the
|
|
||||||
editorial revisions, annotations, elaborations, or other modifications
|
|
||||||
represent, as a whole, an original work of authorship. For the purposes
|
|
||||||
of this License, Derivative Works shall not include works that remain
|
|
||||||
separable from, or merely link (or bind by name) to the interfaces of,
|
|
||||||
the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including
|
|
||||||
the original version of the Work and any modifications or additions
|
|
||||||
to that Work or Derivative Works thereof, that is intentionally
|
|
||||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
|
||||||
or by an individual or Legal Entity authorized to submit on behalf of
|
|
||||||
the copyright owner. For the purposes of this definition, "submitted"
|
|
||||||
means any form of electronic, verbal, or written communication sent
|
|
||||||
to the Licensor or its representatives, including but not limited to
|
|
||||||
communication on electronic mailing lists, source code control systems,
|
|
||||||
and issue tracking systems that are managed by, or on behalf of, the
|
|
||||||
Licensor for the purpose of discussing and improving the Work, but
|
|
||||||
excluding communication that is conspicuously marked or otherwise
|
|
||||||
designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
|
||||||
on behalf of whom a Contribution has been received by Licensor and
|
|
||||||
subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
copyright license to reproduce, prepare Derivative Works of,
|
|
||||||
publicly display, publicly perform, sublicense, and distribute the
|
|
||||||
Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of
|
|
||||||
this License, each Contributor hereby grants to You a perpetual,
|
|
||||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
||||||
(except as stated in this section) patent license to make, have made,
|
|
||||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
||||||
where such license applies only to those patent claims licensable
|
|
||||||
by such Contributor that are necessarily infringed by their
|
|
||||||
Contribution(s) alone or by combination of their Contribution(s)
|
|
||||||
with the Work to which such Contribution(s) was submitted. If You
|
|
||||||
institute patent litigation against any entity (including a
|
|
||||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
||||||
or a Contribution incorporated within the Work constitutes direct
|
|
||||||
or contributory patent infringement, then any patent licenses
|
|
||||||
granted to You under this License for that Work shall terminate
|
|
||||||
as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the
|
|
||||||
Work or Derivative Works thereof in any medium, with or without
|
|
||||||
modifications, and in Source or Object form, provided that You
|
|
||||||
meet the following conditions:
|
|
||||||
|
|
||||||
(a) You must give any other recipients of the Work or
|
|
||||||
Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
(b) You must cause any modified files to carry prominent notices
|
|
||||||
stating that You changed the files; and
|
|
||||||
|
|
||||||
(c) You must retain, in the Source form of any Derivative Works
|
|
||||||
that You distribute, all copyright, patent, trademark, and
|
|
||||||
attribution notices from the Source form of the Work,
|
|
||||||
excluding those notices that do not pertain to any part of
|
|
||||||
the Derivative Works; and
|
|
||||||
|
|
||||||
(d) If the Work includes a "NOTICE" text file as part of its
|
|
||||||
distribution, then any Derivative Works that You distribute must
|
|
||||||
include a readable copy of the attribution notices contained
|
|
||||||
within such NOTICE file, excluding those notices that do not
|
|
||||||
pertain to any part of the Derivative Works, in at least one
|
|
||||||
of the following places: within a NOTICE text file distributed
|
|
||||||
as part of the Derivative Works; within the Source form or
|
|
||||||
documentation, if provided along with the Derivative Works; or,
|
|
||||||
within a display generated by the Derivative Works, if and
|
|
||||||
wherever such third-party notices normally appear. The contents
|
|
||||||
of the NOTICE file are for informational purposes only and
|
|
||||||
do not modify the License. You may add Your own attribution
|
|
||||||
notices within Derivative Works that You distribute, alongside
|
|
||||||
or as an addendum to the NOTICE text from the Work, provided
|
|
||||||
that such additional attribution notices cannot be construed
|
|
||||||
as modifying the License.
|
|
||||||
|
|
||||||
You may add Your own copyright statement to Your modifications and
|
|
||||||
may provide additional or different license terms and conditions
|
|
||||||
for use, reproduction, or distribution of Your modifications, or
|
|
||||||
for any such Derivative Works as a whole, provided Your use,
|
|
||||||
reproduction, and distribution of the Work otherwise complies with
|
|
||||||
the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
||||||
any Contribution intentionally submitted for inclusion in the Work
|
|
||||||
by You to the Licensor shall be under the terms and conditions of
|
|
||||||
this License, without any additional terms or conditions.
|
|
||||||
Notwithstanding the above, nothing herein shall supersede or modify
|
|
||||||
the terms of any separate license agreement you may have executed
|
|
||||||
with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade
|
|
||||||
names, trademarks, service marks, or product names of the Licensor,
|
|
||||||
except as required for reasonable and customary use in describing the
|
|
||||||
origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
|
||||||
agreed to in writing, Licensor provides the Work (and each
|
|
||||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied, including, without limitation, any warranties or conditions
|
|
||||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
||||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
||||||
appropriateness of using or redistributing the Work and assume any
|
|
||||||
risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory,
|
|
||||||
whether in tort (including negligence), contract, or otherwise,
|
|
||||||
unless required by applicable law (such as deliberate and grossly
|
|
||||||
negligent acts) or agreed to in writing, shall any Contributor be
|
|
||||||
liable to You for damages, including any direct, indirect, special,
|
|
||||||
incidental, or consequential damages of any character arising as a
|
|
||||||
result of this License or out of the use or inability to use the
|
|
||||||
Work (including but not limited to damages for loss of goodwill,
|
|
||||||
work stoppage, computer failure or malfunction, or any and all
|
|
||||||
other commercial damages or losses), even if such Contributor
|
|
||||||
has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing
|
|
||||||
the Work or Derivative Works thereof, You may choose to offer,
|
|
||||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
|
||||||
or other liability obligations and/or rights consistent with this
|
|
||||||
License. However, in accepting such obligations, You may act only
|
|
||||||
on Your own behalf and on Your sole responsibility, not on behalf
|
|
||||||
of any other Contributor, and only if You agree to indemnify,
|
|
||||||
defend, and hold each Contributor harmless for any liability
|
|
||||||
incurred by, or claims asserted against, such Contributor by reason
|
|
||||||
of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
APPENDIX: How to apply the Apache License to your work.
|
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
|
||||||
replaced with your own identifying information. (Don't include
|
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
|
||||||
comment syntax for the file format. We also recommend that a
|
|
||||||
file or class name and description of purpose be included on the
|
|
||||||
same "printed page" as the copyright notice for easier
|
|
||||||
identification within third-party archives.
|
|
||||||
|
|
||||||
Copyright {yyyy} {name of copyright owner}
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
|
|
58
vendor/github.com/docker/distribution/MAINTAINERS
generated
vendored
58
vendor/github.com/docker/distribution/MAINTAINERS
generated
vendored
|
@ -1,58 +0,0 @@
|
||||||
# Distribution maintainers file
|
|
||||||
#
|
|
||||||
# This file describes who runs the docker/distribution project and how.
|
|
||||||
# This is a living document - if you see something out of date or missing, speak up!
|
|
||||||
#
|
|
||||||
# It is structured to be consumable by both humans and programs.
|
|
||||||
# To extract its contents programmatically, use any TOML-compliant parser.
|
|
||||||
#
|
|
||||||
# This file is compiled into the MAINTAINERS file in docker/opensource.
|
|
||||||
#
|
|
||||||
[Org]
|
|
||||||
[Org."Core maintainers"]
|
|
||||||
people = [
|
|
||||||
"aaronlehmann",
|
|
||||||
"dmcgowan",
|
|
||||||
"dmp42",
|
|
||||||
"richardscothern",
|
|
||||||
"shykes",
|
|
||||||
"stevvooe",
|
|
||||||
]
|
|
||||||
|
|
||||||
[people]
|
|
||||||
|
|
||||||
# A reference list of all people associated with the project.
|
|
||||||
# All other sections should refer to people by their canonical key
|
|
||||||
# in the people section.
|
|
||||||
|
|
||||||
# ADD YOURSELF HERE IN ALPHABETICAL ORDER
|
|
||||||
|
|
||||||
[people.aaronlehmann]
|
|
||||||
Name = "Aaron Lehmann"
|
|
||||||
Email = "aaron.lehmann@docker.com"
|
|
||||||
GitHub = "aaronlehmann"
|
|
||||||
|
|
||||||
[people.dmcgowan]
|
|
||||||
Name = "Derek McGowan"
|
|
||||||
Email = "derek@mcgstyle.net"
|
|
||||||
GitHub = "dmcgowan"
|
|
||||||
|
|
||||||
[people.dmp42]
|
|
||||||
Name = "Olivier Gambier"
|
|
||||||
Email = "olivier@docker.com"
|
|
||||||
GitHub = "dmp42"
|
|
||||||
|
|
||||||
[people.richardscothern]
|
|
||||||
Name = "Richard Scothern"
|
|
||||||
Email = "richard.scothern@gmail.com"
|
|
||||||
GitHub = "richardscothern"
|
|
||||||
|
|
||||||
[people.shykes]
|
|
||||||
Name = "Solomon Hykes"
|
|
||||||
Email = "solomon@docker.com"
|
|
||||||
GitHub = "shykes"
|
|
||||||
|
|
||||||
[people.stevvooe]
|
|
||||||
Name = "Stephen Day"
|
|
||||||
Email = "stephen.day@docker.com"
|
|
||||||
GitHub = "stevvooe"
|
|
109
vendor/github.com/docker/distribution/Makefile
generated
vendored
109
vendor/github.com/docker/distribution/Makefile
generated
vendored
|
@ -1,109 +0,0 @@
|
||||||
# Set an output prefix, which is the local directory if not specified
|
|
||||||
PREFIX?=$(shell pwd)
|
|
||||||
|
|
||||||
|
|
||||||
# Used to populate version variable in main package.
|
|
||||||
VERSION=$(shell git describe --match 'v[0-9]*' --dirty='.m' --always)
|
|
||||||
|
|
||||||
# Allow turning off function inlining and variable registerization
|
|
||||||
ifeq (${DISABLE_OPTIMIZATION},true)
|
|
||||||
GO_GCFLAGS=-gcflags "-N -l"
|
|
||||||
VERSION:="$(VERSION)-noopt"
|
|
||||||
endif
|
|
||||||
|
|
||||||
GO_LDFLAGS=-ldflags "-X `go list ./version`.Version=$(VERSION)"
|
|
||||||
|
|
||||||
.PHONY: all build binaries clean dep-restore dep-save dep-validate fmt lint test test-full vet
|
|
||||||
.DEFAULT: all
|
|
||||||
all: fmt vet lint build test binaries
|
|
||||||
|
|
||||||
AUTHORS: .mailmap .git/HEAD
|
|
||||||
git log --format='%aN <%aE>' | sort -fu > $@
|
|
||||||
|
|
||||||
# This only needs to be generated by hand when cutting full releases.
|
|
||||||
version/version.go:
|
|
||||||
./version/version.sh > $@
|
|
||||||
|
|
||||||
# Required for go 1.5 to build
|
|
||||||
GO15VENDOREXPERIMENT := 1
|
|
||||||
|
|
||||||
# Go files
|
|
||||||
GOFILES=$(shell find . -type f -name '*.go')
|
|
||||||
|
|
||||||
# Package list
|
|
||||||
PKGS=$(shell go list -tags "${DOCKER_BUILDTAGS}" ./... | grep -v ^github.com/docker/distribution/vendor/)
|
|
||||||
|
|
||||||
# Resolving binary dependencies for specific targets
|
|
||||||
GOLINT=$(shell which golint || echo '')
|
|
||||||
GODEP=$(shell which godep || echo '')
|
|
||||||
|
|
||||||
${PREFIX}/bin/registry: $(GOFILES)
|
|
||||||
@echo "+ $@"
|
|
||||||
@go build -tags "${DOCKER_BUILDTAGS}" -o $@ ${GO_LDFLAGS} ${GO_GCFLAGS} ./cmd/registry
|
|
||||||
|
|
||||||
${PREFIX}/bin/digest: $(GOFILES)
|
|
||||||
@echo "+ $@"
|
|
||||||
@go build -tags "${DOCKER_BUILDTAGS}" -o $@ ${GO_LDFLAGS} ${GO_GCFLAGS} ./cmd/digest
|
|
||||||
|
|
||||||
${PREFIX}/bin/registry-api-descriptor-template: $(GOFILES)
|
|
||||||
@echo "+ $@"
|
|
||||||
@go build -o $@ ${GO_LDFLAGS} ${GO_GCFLAGS} ./cmd/registry-api-descriptor-template
|
|
||||||
|
|
||||||
docs/spec/api.md: docs/spec/api.md.tmpl ${PREFIX}/bin/registry-api-descriptor-template
|
|
||||||
./bin/registry-api-descriptor-template $< > $@
|
|
||||||
|
|
||||||
vet:
|
|
||||||
@echo "+ $@"
|
|
||||||
@go vet -tags "${DOCKER_BUILDTAGS}" $(PKGS)
|
|
||||||
|
|
||||||
fmt:
|
|
||||||
@echo "+ $@"
|
|
||||||
@test -z "$$(gofmt -s -l . 2>&1 | grep -v ^vendor/ | tee /dev/stderr)" || \
|
|
||||||
(echo >&2 "+ please format Go code with 'gofmt -s'" && false)
|
|
||||||
|
|
||||||
lint:
|
|
||||||
@echo "+ $@"
|
|
||||||
$(if $(GOLINT), , \
|
|
||||||
$(error Please install golint: `go get -u github.com/golang/lint/golint`))
|
|
||||||
@test -z "$$($(GOLINT) ./... 2>&1 | grep -v ^vendor/ | tee /dev/stderr)"
|
|
||||||
|
|
||||||
build:
|
|
||||||
@echo "+ $@"
|
|
||||||
@go build -tags "${DOCKER_BUILDTAGS}" -v ${GO_LDFLAGS} $(PKGS)
|
|
||||||
|
|
||||||
test:
|
|
||||||
@echo "+ $@"
|
|
||||||
@go test -test.short -tags "${DOCKER_BUILDTAGS}" $(PKGS)
|
|
||||||
|
|
||||||
test-full:
|
|
||||||
@echo "+ $@"
|
|
||||||
@go test -tags "${DOCKER_BUILDTAGS}" $(PKGS)
|
|
||||||
|
|
||||||
binaries: ${PREFIX}/bin/registry ${PREFIX}/bin/digest ${PREFIX}/bin/registry-api-descriptor-template
|
|
||||||
@echo "+ $@"
|
|
||||||
|
|
||||||
clean:
|
|
||||||
@echo "+ $@"
|
|
||||||
@rm -rf "${PREFIX}/bin/registry" "${PREFIX}/bin/digest" "${PREFIX}/bin/registry-api-descriptor-template"
|
|
||||||
|
|
||||||
dep-save:
|
|
||||||
@echo "+ $@"
|
|
||||||
$(if $(GODEP), , \
|
|
||||||
$(error Please install godep: go get github.com/tools/godep))
|
|
||||||
@$(GODEP) save $(PKGS)
|
|
||||||
|
|
||||||
dep-restore:
|
|
||||||
@echo "+ $@"
|
|
||||||
$(if $(GODEP), , \
|
|
||||||
$(error Please install godep: go get github.com/tools/godep))
|
|
||||||
@$(GODEP) restore -v
|
|
||||||
|
|
||||||
dep-validate: dep-restore
|
|
||||||
@echo "+ $@"
|
|
||||||
@rm -Rf .vendor.bak
|
|
||||||
@mv vendor .vendor.bak
|
|
||||||
@rm -Rf Godeps
|
|
||||||
@$(GODEP) save ./...
|
|
||||||
@test -z "$$(diff -r vendor .vendor.bak 2>&1 | tee /dev/stderr)" || \
|
|
||||||
(echo >&2 "+ borked dependencies! what you have in Godeps/Godeps.json does not match with what you have in vendor" && false)
|
|
||||||
@rm -Rf .vendor.bak
|
|
131
vendor/github.com/docker/distribution/README.md
generated
vendored
131
vendor/github.com/docker/distribution/README.md
generated
vendored
|
@ -1,131 +0,0 @@
|
||||||
# Distribution
|
|
||||||
|
|
||||||
The Docker toolset to pack, ship, store, and deliver content.
|
|
||||||
|
|
||||||
This repository's main product is the Docker Registry 2.0 implementation
|
|
||||||
for storing and distributing Docker images. It supersedes the
|
|
||||||
[docker/docker-registry](https://github.com/docker/docker-registry)
|
|
||||||
project with a new API design, focused around security and performance.
|
|
||||||
|
|
||||||
<img src="https://www.docker.com/sites/default/files/oyster-registry-3.png" width=200px/>
|
|
||||||
|
|
||||||
[](https://circleci.com/gh/docker/distribution/tree/master)
|
|
||||||
[](https://godoc.org/github.com/docker/distribution)
|
|
||||||
|
|
||||||
This repository contains the following components:
|
|
||||||
|
|
||||||
|**Component** |Description |
|
|
||||||
|--------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
|
|
||||||
| **registry** | An implementation of the [Docker Registry HTTP API V2](docs/spec/api.md) for use with docker 1.6+. |
|
|
||||||
| **libraries** | A rich set of libraries for interacting with distribution components. Please see [godoc](https://godoc.org/github.com/docker/distribution) for details. **Note**: These libraries are **unstable**. |
|
|
||||||
| **specifications** | _Distribution_ related specifications are available in [docs/spec](docs/spec) |
|
|
||||||
| **documentation** | Docker's full documentation set is available at [docs.docker.com](https://docs.docker.com). This repository [contains the subset](docs/) related just to the registry. |
|
|
||||||
|
|
||||||
### How does this integrate with Docker engine?
|
|
||||||
|
|
||||||
This project should provide an implementation to a V2 API for use in the [Docker
|
|
||||||
core project](https://github.com/docker/docker). The API should be embeddable
|
|
||||||
and simplify the process of securely pulling and pushing content from `docker`
|
|
||||||
daemons.
|
|
||||||
|
|
||||||
### What are the long term goals of the Distribution project?
|
|
||||||
|
|
||||||
The _Distribution_ project has the further long term goal of providing a
|
|
||||||
secure tool chain for distributing content. The specifications, APIs and tools
|
|
||||||
should be as useful with Docker as they are without.
|
|
||||||
|
|
||||||
Our goal is to design a professional grade and extensible content distribution
|
|
||||||
system that allow users to:
|
|
||||||
|
|
||||||
* Enjoy an efficient, secured and reliable way to store, manage, package and
|
|
||||||
exchange content
|
|
||||||
* Hack/roll their own on top of healthy open-source components
|
|
||||||
* Implement their own home made solution through good specs, and solid
|
|
||||||
extensions mechanism.
|
|
||||||
|
|
||||||
## More about Registry 2.0
|
|
||||||
|
|
||||||
The new registry implementation provides the following benefits:
|
|
||||||
|
|
||||||
- faster push and pull
|
|
||||||
- new, more efficient implementation
|
|
||||||
- simplified deployment
|
|
||||||
- pluggable storage backend
|
|
||||||
- webhook notifications
|
|
||||||
|
|
||||||
For information on upcoming functionality, please see [ROADMAP.md](ROADMAP.md).
|
|
||||||
|
|
||||||
### Who needs to deploy a registry?
|
|
||||||
|
|
||||||
By default, Docker users pull images from Docker's public registry instance.
|
|
||||||
[Installing Docker](https://docs.docker.com/engine/installation/) gives users this
|
|
||||||
ability. Users can also push images to a repository on Docker's public registry,
|
|
||||||
if they have a [Docker Hub](https://hub.docker.com/) account.
|
|
||||||
|
|
||||||
For some users and even companies, this default behavior is sufficient. For
|
|
||||||
others, it is not.
|
|
||||||
|
|
||||||
For example, users with their own software products may want to maintain a
|
|
||||||
registry for private, company images. Also, you may wish to deploy your own
|
|
||||||
image repository for images used to test or in continuous integration. For these
|
|
||||||
use cases and others, [deploying your own registry instance](https://github.com/docker/docker.github.io/blob/master/registry/deploying.md)
|
|
||||||
may be the better choice.
|
|
||||||
|
|
||||||
### Migration to Registry 2.0
|
|
||||||
|
|
||||||
For those who have previously deployed their own registry based on the Registry
|
|
||||||
1.0 implementation and wish to deploy a Registry 2.0 while retaining images,
|
|
||||||
data migration is required. A tool to assist with migration efforts has been
|
|
||||||
created. For more information see [docker/migrator]
|
|
||||||
(https://github.com/docker/migrator).
|
|
||||||
|
|
||||||
## Contribute
|
|
||||||
|
|
||||||
Please see [CONTRIBUTING.md](CONTRIBUTING.md) for details on how to contribute
|
|
||||||
issues, fixes, and patches to this project. If you are contributing code, see
|
|
||||||
the instructions for [building a development environment](BUILDING.md).
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
If any issues are encountered while using the _Distribution_ project, several
|
|
||||||
avenues are available for support:
|
|
||||||
|
|
||||||
<table>
|
|
||||||
<tr>
|
|
||||||
<th align="left">
|
|
||||||
IRC
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
#docker-distribution on FreeNode
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th align="left">
|
|
||||||
Issue Tracker
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
github.com/docker/distribution/issues
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th align="left">
|
|
||||||
Google Groups
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
https://groups.google.com/a/dockerproject.org/forum/#!forum/distribution
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<th align="left">
|
|
||||||
Mailing List
|
|
||||||
</th>
|
|
||||||
<td>
|
|
||||||
docker@dockerproject.org
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
This project is distributed under [Apache License, Version 2.0](LICENSE).
|
|
36
vendor/github.com/docker/distribution/RELEASE-CHECKLIST.md
generated
vendored
36
vendor/github.com/docker/distribution/RELEASE-CHECKLIST.md
generated
vendored
|
@ -1,36 +0,0 @@
|
||||||
## Registry Release Checklist
|
|
||||||
|
|
||||||
10. Compile release notes detailing features and since the last release. Update the `CHANGELOG.md` file.
|
|
||||||
|
|
||||||
20. Update the version file: `https://github.com/docker/distribution/blob/master/version/version.go`
|
|
||||||
|
|
||||||
30. Update the `MAINTAINERS` (if necessary), `AUTHORS` and `.mailmap` files.
|
|
||||||
|
|
||||||
```
|
|
||||||
make AUTHORS
|
|
||||||
```
|
|
||||||
|
|
||||||
40. Create a signed tag.
|
|
||||||
|
|
||||||
Distribution uses semantic versioning. Tags are of the format `vx.y.z[-rcn]`
|
|
||||||
You will need PGP installed and a PGP key which has been added to your Github account. The comment for the tag should include the release notes.
|
|
||||||
|
|
||||||
50. Push the signed tag
|
|
||||||
|
|
||||||
60. Create a new [release](https://github.com/docker/distribution/releases). In the case of a release candidate, tick the `pre-release` checkbox.
|
|
||||||
|
|
||||||
70. Update the registry binary in [distribution library image repo](https://github.com/docker/distribution-library-image) by running the update script and opening a pull request.
|
|
||||||
|
|
||||||
80. Update the official image. Add the new version in the [official images repo](https://github.com/docker-library/official-images) by appending a new version to the `registry/registry` file with the git hash pointed to by the signed tag. Update the major version to point to the latest version and the minor version to point to new patch release if necessary.
|
|
||||||
e.g. to release `2.3.1`
|
|
||||||
|
|
||||||
`2.3.1 (new)`
|
|
||||||
|
|
||||||
`2.3.0 -> 2.3.0` can be removed
|
|
||||||
|
|
||||||
`2 -> 2.3.1`
|
|
||||||
|
|
||||||
`2.3 -> 2.3.1`
|
|
||||||
|
|
||||||
90. Build a new distribution/registry image on [Docker hub](https://hub.docker.com/u/distribution/dashboard) by adding a new automated build with the new tag and re-building the images.
|
|
||||||
|
|
267
vendor/github.com/docker/distribution/ROADMAP.md
generated
vendored
267
vendor/github.com/docker/distribution/ROADMAP.md
generated
vendored
|
@ -1,267 +0,0 @@
|
||||||
# Roadmap
|
|
||||||
|
|
||||||
The Distribution Project consists of several components, some of which are
|
|
||||||
still being defined. This document defines the high-level goals of the
|
|
||||||
project, identifies the current components, and defines the release-
|
|
||||||
relationship to the Docker Platform.
|
|
||||||
|
|
||||||
* [Distribution Goals](#distribution-goals)
|
|
||||||
* [Distribution Components](#distribution-components)
|
|
||||||
* [Project Planning](#project-planning): release-relationship to the Docker Platform.
|
|
||||||
|
|
||||||
This road map is a living document, providing an overview of the goals and
|
|
||||||
considerations made in respect of the future of the project.
|
|
||||||
|
|
||||||
## Distribution Goals
|
|
||||||
|
|
||||||
- Replace the existing [docker registry](github.com/docker/docker-registry)
|
|
||||||
implementation as the primary implementation.
|
|
||||||
- Replace the existing push and pull code in the docker engine with the
|
|
||||||
distribution package.
|
|
||||||
- Define a strong data model for distributing docker images
|
|
||||||
- Provide a flexible distribution tool kit for use in the docker platform
|
|
||||||
- Unlock new distribution models
|
|
||||||
|
|
||||||
## Distribution Components
|
|
||||||
|
|
||||||
Components of the Distribution Project are managed via github [milestones](https://github.com/docker/distribution/milestones). Upcoming
|
|
||||||
features and bugfixes for a component will be added to the relevant milestone. If a feature or
|
|
||||||
bugfix is not part of a milestone, it is currently unscheduled for
|
|
||||||
implementation.
|
|
||||||
|
|
||||||
* [Registry](#registry)
|
|
||||||
* [Distribution Package](#distribution-package)
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### Registry
|
|
||||||
|
|
||||||
The new Docker registry is the main portion of the distribution repository.
|
|
||||||
Registry 2.0 is the first release of the next-generation registry. This was
|
|
||||||
primarily focused on implementing the [new registry
|
|
||||||
API](https://github.com/docker/distribution/blob/master/docs/spec/api.md),
|
|
||||||
with a focus on security and performance.
|
|
||||||
|
|
||||||
Following from the Distribution project goals above, we have a set of goals
|
|
||||||
for registry v2 that we would like to follow in the design. New features
|
|
||||||
should be compared against these goals.
|
|
||||||
|
|
||||||
#### Data Storage and Distribution First
|
|
||||||
|
|
||||||
The registry's first goal is to provide a reliable, consistent storage
|
|
||||||
location for Docker images. The registry should only provide the minimal
|
|
||||||
amount of indexing required to fetch image data and no more.
|
|
||||||
|
|
||||||
This means we should be selective in new features and API additions, including
|
|
||||||
those that may require expensive, ever growing indexes. Requests should be
|
|
||||||
servable in "constant time".
|
|
||||||
|
|
||||||
#### Content Addressability
|
|
||||||
|
|
||||||
All data objects used in the registry API should be content addressable.
|
|
||||||
Content identifiers should be secure and verifiable. This provides a secure,
|
|
||||||
reliable base from which to build more advanced content distribution systems.
|
|
||||||
|
|
||||||
#### Content Agnostic
|
|
||||||
|
|
||||||
In the past, changes to the image format would require large changes in Docker
|
|
||||||
and the Registry. By decoupling the distribution and image format, we can
|
|
||||||
allow the formats to progress without having to coordinate between the two.
|
|
||||||
This means that we should be focused on decoupling Docker from the registry
|
|
||||||
just as much as decoupling the registry from Docker. Such an approach will
|
|
||||||
allow us to unlock new distribution models that haven't been possible before.
|
|
||||||
|
|
||||||
We can take this further by saying that the new registry should be content
|
|
||||||
agnostic. The registry provides a model of names, tags, manifests and content
|
|
||||||
addresses and that model can be used to work with content.
|
|
||||||
|
|
||||||
#### Simplicity
|
|
||||||
|
|
||||||
The new registry should be closer to a microservice component than its
|
|
||||||
predecessor. This means it should have a narrower API and a low number of
|
|
||||||
service dependencies. It should be easy to deploy.
|
|
||||||
|
|
||||||
This means that other solutions should be explored before changing the API or
|
|
||||||
adding extra dependencies. If functionality is required, can it be added as an
|
|
||||||
extension or companion service.
|
|
||||||
|
|
||||||
#### Extensibility
|
|
||||||
|
|
||||||
The registry should provide extension points to add functionality. By keeping
|
|
||||||
the scope narrow, but providing the ability to add functionality.
|
|
||||||
|
|
||||||
Features like search, indexing, synchronization and registry explorers fall
|
|
||||||
into this category. No such feature should be added unless we've found it
|
|
||||||
impossible to do through an extension.
|
|
||||||
|
|
||||||
#### Active Feature Discussions
|
|
||||||
|
|
||||||
The following are feature discussions that are currently active.
|
|
||||||
|
|
||||||
If you don't see your favorite, unimplemented feature, feel free to contact us
|
|
||||||
via IRC or the mailing list and we can talk about adding it. The goal here is
|
|
||||||
to make sure that new features go through a rigid design process before
|
|
||||||
landing in the registry.
|
|
||||||
|
|
||||||
##### Proxying to other Registries
|
|
||||||
|
|
||||||
A _pull-through caching_ mode exists for the registry, but is restricted from
|
|
||||||
within the docker client to only mirror the official Docker Hub. This functionality
|
|
||||||
can be expanded when image provenance has been specified and implemented in the
|
|
||||||
distribution project.
|
|
||||||
|
|
||||||
##### Metadata storage
|
|
||||||
|
|
||||||
Metadata for the registry is currently stored with the manifest and layer data on
|
|
||||||
the storage backend. While this is a big win for simplicity and reliably maintaining
|
|
||||||
state, it comes with the cost of consistency and high latency. The mutable registry
|
|
||||||
metadata operations should be abstracted behind an API which will allow ACID compliant
|
|
||||||
storage systems to handle metadata.
|
|
||||||
|
|
||||||
##### Peer to Peer transfer
|
|
||||||
|
|
||||||
Discussion has started here: https://docs.google.com/document/d/1rYDpSpJiQWmCQy8Cuiaa3NH-Co33oK_SC9HeXYo87QA/edit
|
|
||||||
|
|
||||||
##### Indexing, Search and Discovery
|
|
||||||
|
|
||||||
The original registry provided some implementation of search for use with
|
|
||||||
private registries. Support has been elided from V2 since we'd like to both
|
|
||||||
decouple search functionality from the registry. The makes the registry
|
|
||||||
simpler to deploy, especially in use cases where search is not needed, and
|
|
||||||
let's us decouple the image format from the registry.
|
|
||||||
|
|
||||||
There are explorations into using the catalog API and notification system to
|
|
||||||
build external indexes. The current line of thought is that we will define a
|
|
||||||
common search API to index and query docker images. Such a system could be run
|
|
||||||
as a companion to a registry or set of registries to power discovery.
|
|
||||||
|
|
||||||
The main issue with search and discovery is that there are so many ways to
|
|
||||||
accomplish it. There are two aspects to this project. The first is deciding on
|
|
||||||
how it will be done, including an API definition that can work with changing
|
|
||||||
data formats. The second is the process of integrating with `docker search`.
|
|
||||||
We expect that someone attempts to address the problem with the existing tools
|
|
||||||
and propose it as a standard search API or uses it to inform a standardization
|
|
||||||
process. Once this has been explored, we integrate with the docker client.
|
|
||||||
|
|
||||||
Please see the following for more detail:
|
|
||||||
|
|
||||||
- https://github.com/docker/distribution/issues/206
|
|
||||||
|
|
||||||
##### Deletes
|
|
||||||
|
|
||||||
> __NOTE:__ Deletes are a much asked for feature. Before requesting this
|
|
||||||
feature or participating in discussion, we ask that you read this section in
|
|
||||||
full and understand the problems behind deletes.
|
|
||||||
|
|
||||||
While, at first glance, implementing deleting seems simple, there are a number
|
|
||||||
mitigating factors that make many solutions not ideal or even pathological in
|
|
||||||
the context of a registry. The following paragraph discuss the background and
|
|
||||||
approaches that could be applied to arrive at a solution.
|
|
||||||
|
|
||||||
The goal of deletes in any system is to remove unused or unneeded data. Only
|
|
||||||
data requested for deletion should be removed and no other data. Removing
|
|
||||||
unintended data is worse than _not_ removing data that was requested for
|
|
||||||
removal but ideally, both are supported. Generally, according to this rule, we
|
|
||||||
err on holding data longer than needed, ensuring that it is only removed when
|
|
||||||
we can be certain that it can be removed. With the current behavior, we opt to
|
|
||||||
hold onto the data forever, ensuring that data cannot be incorrectly removed.
|
|
||||||
|
|
||||||
To understand the problems with implementing deletes, one must understand the
|
|
||||||
data model. All registry data is stored in a filesystem layout, implemented on
|
|
||||||
a "storage driver", effectively a _virtual file system_ (VFS). The storage
|
|
||||||
system must assume that this VFS layer will be eventually consistent and has
|
|
||||||
poor read- after-write consistency, since this is the lower common denominator
|
|
||||||
among the storage drivers. This is mitigated by writing values in reverse-
|
|
||||||
dependent order, but makes wider transactional operations unsafe.
|
|
||||||
|
|
||||||
Layered on the VFS model is a content-addressable _directed, acyclic graph_
|
|
||||||
(DAG) made up of blobs. Manifests reference layers. Tags reference manifests.
|
|
||||||
Since the same data can be referenced by multiple manifests, we only store
|
|
||||||
data once, even if it is in different repositories. Thus, we have a set of
|
|
||||||
blobs, referenced by tags and manifests. If we want to delete a blob we need
|
|
||||||
to be certain that it is no longer referenced by another manifest or tag. When
|
|
||||||
we delete a manifest, we also can try to delete the referenced blobs. Deciding
|
|
||||||
whether or not a blob has an active reference is the crux of the problem.
|
|
||||||
|
|
||||||
Conceptually, deleting a manifest and its resources is quite simple. Just find
|
|
||||||
all the manifests, enumerate the referenced blobs and delete the blobs not in
|
|
||||||
that set. An astute observer will recognize this as a garbage collection
|
|
||||||
problem. As with garbage collection in programming languages, this is very
|
|
||||||
simple when one always has a consistent view. When one adds parallelism and an
|
|
||||||
inconsistent view of data, it becomes very challenging.
|
|
||||||
|
|
||||||
A simple example can demonstrate this. Let's say we are deleting a manifest
|
|
||||||
_A_ in one process. We scan the manifest and decide that all the blobs are
|
|
||||||
ready for deletion. Concurrently, we have another process accepting a new
|
|
||||||
manifest _B_ referencing one or more blobs from the manifest _A_. Manifest _B_
|
|
||||||
is accepted and all the blobs are considered present, so the operation
|
|
||||||
proceeds. The original process then deletes the referenced blobs, assuming
|
|
||||||
they were unreferenced. The manifest _B_, which we thought had all of its data
|
|
||||||
present, can no longer be served by the registry, since the dependent data has
|
|
||||||
been deleted.
|
|
||||||
|
|
||||||
Deleting data from the registry safely requires some way to coordinate this
|
|
||||||
operation. The following approaches are being considered:
|
|
||||||
|
|
||||||
- _Reference Counting_ - Maintain a count of references to each blob. This is
|
|
||||||
challenging for a number of reasons: 1. maintaining a consistent consensus
|
|
||||||
of reference counts across a set of Registries and 2. Building the initial
|
|
||||||
list of reference counts for an existing registry. These challenges can be
|
|
||||||
met with a consensus protocol like Paxos or Raft in the first case and a
|
|
||||||
necessary but simple scan in the second..
|
|
||||||
- _Lock the World GC_ - Halt all writes to the data store. Walk the data store
|
|
||||||
and find all blob references. Delete all unreferenced blobs. This approach
|
|
||||||
is very simple but requires disabling writes for a period of time while the
|
|
||||||
service reads all data. This is slow and expensive but very accurate and
|
|
||||||
effective.
|
|
||||||
- _Generational GC_ - Do something similar to above but instead of blocking
|
|
||||||
writes, writes are sent to another storage backend while reads are broadcast
|
|
||||||
to the new and old backends. GC is then performed on the read-only portion.
|
|
||||||
Because writes land in the new backend, the data in the read-only section
|
|
||||||
can be safely deleted. The main drawbacks of this approach are complexity
|
|
||||||
and coordination.
|
|
||||||
- _Centralized Oracle_ - Using a centralized, transactional database, we can
|
|
||||||
know exactly which data is referenced at any given time. This avoids
|
|
||||||
coordination problem by managing this data in a single location. We trade
|
|
||||||
off metadata scalability for simplicity and performance. This is a very good
|
|
||||||
option for most registry deployments. This would create a bottleneck for
|
|
||||||
registry metadata. However, metadata is generally not the main bottleneck
|
|
||||||
when serving images.
|
|
||||||
|
|
||||||
Please let us know if other solutions exist that we have yet to enumerate.
|
|
||||||
Note that for any approach, implementation is a massive consideration. For
|
|
||||||
example, a mark-sweep based solution may seem simple but the amount of work in
|
|
||||||
coordination offset the extra work it might take to build a _Centralized
|
|
||||||
Oracle_. We'll accept proposals for any solution but please coordinate with us
|
|
||||||
before dropping code.
|
|
||||||
|
|
||||||
At this time, we have traded off simplicity and ease of deployment for disk
|
|
||||||
space. Simplicity and ease of deployment tend to reduce developer involvement,
|
|
||||||
which is currently the most expensive resource in software engineering. Taking
|
|
||||||
on any solution for deletes will greatly effect these factors, trading off
|
|
||||||
very cheap disk space for a complex deployment and operational story.
|
|
||||||
|
|
||||||
Please see the following issues for more detail:
|
|
||||||
|
|
||||||
- https://github.com/docker/distribution/issues/422
|
|
||||||
- https://github.com/docker/distribution/issues/461
|
|
||||||
- https://github.com/docker/distribution/issues/462
|
|
||||||
|
|
||||||
### Distribution Package
|
|
||||||
|
|
||||||
At its core, the Distribution Project is a set of Go packages that make up
|
|
||||||
Distribution Components. At this time, most of these packages make up the
|
|
||||||
Registry implementation.
|
|
||||||
|
|
||||||
The package itself is considered unstable. If you're using it, please take care to vendor the dependent version.
|
|
||||||
|
|
||||||
For feature additions, please see the Registry section. In the future, we may break out a
|
|
||||||
separate Roadmap for distribution-specific features that apply to more than
|
|
||||||
just the registry.
|
|
||||||
|
|
||||||
***
|
|
||||||
|
|
||||||
### Project Planning
|
|
||||||
|
|
||||||
An [Open-Source Planning Process](https://github.com/docker/distribution/wiki/Open-Source-Planning-Process) is used to define the Roadmap. [Project Pages](https://github.com/docker/distribution/wiki) define the goals for each Milestone and identify current progress.
|
|
||||||
|
|
257
vendor/github.com/docker/distribution/blobs.go
generated
vendored
257
vendor/github.com/docker/distribution/blobs.go
generated
vendored
|
@ -1,257 +0,0 @@
|
||||||
package distribution
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
"net/http"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/docker/distribution/context"
|
|
||||||
"github.com/docker/distribution/digest"
|
|
||||||
"github.com/docker/distribution/reference"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// ErrBlobExists returned when blob already exists
|
|
||||||
ErrBlobExists = errors.New("blob exists")
|
|
||||||
|
|
||||||
// ErrBlobDigestUnsupported when blob digest is an unsupported version.
|
|
||||||
ErrBlobDigestUnsupported = errors.New("unsupported blob digest")
|
|
||||||
|
|
||||||
// ErrBlobUnknown when blob is not found.
|
|
||||||
ErrBlobUnknown = errors.New("unknown blob")
|
|
||||||
|
|
||||||
// ErrBlobUploadUnknown returned when upload is not found.
|
|
||||||
ErrBlobUploadUnknown = errors.New("blob upload unknown")
|
|
||||||
|
|
||||||
// ErrBlobInvalidLength returned when the blob has an expected length on
|
|
||||||
// commit, meaning mismatched with the descriptor or an invalid value.
|
|
||||||
ErrBlobInvalidLength = errors.New("blob invalid length")
|
|
||||||
)
|
|
||||||
|
|
||||||
// ErrBlobInvalidDigest returned when digest check fails.
|
|
||||||
type ErrBlobInvalidDigest struct {
|
|
||||||
Digest digest.Digest
|
|
||||||
Reason error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrBlobInvalidDigest) Error() string {
|
|
||||||
return fmt.Sprintf("invalid digest for referenced layer: %v, %v",
|
|
||||||
err.Digest, err.Reason)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrBlobMounted returned when a blob is mounted from another repository
|
|
||||||
// instead of initiating an upload session.
|
|
||||||
type ErrBlobMounted struct {
|
|
||||||
From reference.Canonical
|
|
||||||
Descriptor Descriptor
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrBlobMounted) Error() string {
|
|
||||||
return fmt.Sprintf("blob mounted from: %v to: %v",
|
|
||||||
err.From, err.Descriptor)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Descriptor describes targeted content. Used in conjunction with a blob
|
|
||||||
// store, a descriptor can be used to fetch, store and target any kind of
|
|
||||||
// blob. The struct also describes the wire protocol format. Fields should
|
|
||||||
// only be added but never changed.
|
|
||||||
type Descriptor struct {
|
|
||||||
// MediaType describe the type of the content. All text based formats are
|
|
||||||
// encoded as utf-8.
|
|
||||||
MediaType string `json:"mediaType,omitempty"`
|
|
||||||
|
|
||||||
// Size in bytes of content.
|
|
||||||
Size int64 `json:"size,omitempty"`
|
|
||||||
|
|
||||||
// Digest uniquely identifies the content. A byte stream can be verified
|
|
||||||
// against against this digest.
|
|
||||||
Digest digest.Digest `json:"digest,omitempty"`
|
|
||||||
|
|
||||||
// URLs contains the source URLs of this content.
|
|
||||||
URLs []string `json:"urls,omitempty"`
|
|
||||||
|
|
||||||
// NOTE: Before adding a field here, please ensure that all
|
|
||||||
// other options have been exhausted. Much of the type relationships
|
|
||||||
// depend on the simplicity of this type.
|
|
||||||
}
|
|
||||||
|
|
||||||
// Descriptor returns the descriptor, to make it satisfy the Describable
|
|
||||||
// interface. Note that implementations of Describable are generally objects
|
|
||||||
// which can be described, not simply descriptors; this exception is in place
|
|
||||||
// to make it more convenient to pass actual descriptors to functions that
|
|
||||||
// expect Describable objects.
|
|
||||||
func (d Descriptor) Descriptor() Descriptor {
|
|
||||||
return d
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobStatter makes blob descriptors available by digest. The service may
|
|
||||||
// provide a descriptor of a different digest if the provided digest is not
|
|
||||||
// canonical.
|
|
||||||
type BlobStatter interface {
|
|
||||||
// Stat provides metadata about a blob identified by the digest. If the
|
|
||||||
// blob is unknown to the describer, ErrBlobUnknown will be returned.
|
|
||||||
Stat(ctx context.Context, dgst digest.Digest) (Descriptor, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobDeleter enables deleting blobs from storage.
|
|
||||||
type BlobDeleter interface {
|
|
||||||
Delete(ctx context.Context, dgst digest.Digest) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobEnumerator enables iterating over blobs from storage
|
|
||||||
type BlobEnumerator interface {
|
|
||||||
Enumerate(ctx context.Context, ingester func(dgst digest.Digest) error) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobDescriptorService manages metadata about a blob by digest. Most
|
|
||||||
// implementations will not expose such an interface explicitly. Such mappings
|
|
||||||
// should be maintained by interacting with the BlobIngester. Hence, this is
|
|
||||||
// left off of BlobService and BlobStore.
|
|
||||||
type BlobDescriptorService interface {
|
|
||||||
BlobStatter
|
|
||||||
|
|
||||||
// SetDescriptor assigns the descriptor to the digest. The provided digest and
|
|
||||||
// the digest in the descriptor must map to identical content but they may
|
|
||||||
// differ on their algorithm. The descriptor must have the canonical
|
|
||||||
// digest of the content and the digest algorithm must match the
|
|
||||||
// annotators canonical algorithm.
|
|
||||||
//
|
|
||||||
// Such a facility can be used to map blobs between digest domains, with
|
|
||||||
// the restriction that the algorithm of the descriptor must match the
|
|
||||||
// canonical algorithm (ie sha256) of the annotator.
|
|
||||||
SetDescriptor(ctx context.Context, dgst digest.Digest, desc Descriptor) error
|
|
||||||
|
|
||||||
// Clear enables descriptors to be unlinked
|
|
||||||
Clear(ctx context.Context, dgst digest.Digest) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobDescriptorServiceFactory creates middleware for BlobDescriptorService.
|
|
||||||
type BlobDescriptorServiceFactory interface {
|
|
||||||
BlobAccessController(svc BlobDescriptorService) BlobDescriptorService
|
|
||||||
}
|
|
||||||
|
|
||||||
// ReadSeekCloser is the primary reader type for blob data, combining
|
|
||||||
// io.ReadSeeker with io.Closer.
|
|
||||||
type ReadSeekCloser interface {
|
|
||||||
io.ReadSeeker
|
|
||||||
io.Closer
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobProvider describes operations for getting blob data.
|
|
||||||
type BlobProvider interface {
|
|
||||||
// Get returns the entire blob identified by digest along with the descriptor.
|
|
||||||
Get(ctx context.Context, dgst digest.Digest) ([]byte, error)
|
|
||||||
|
|
||||||
// Open provides a ReadSeekCloser to the blob identified by the provided
|
|
||||||
// descriptor. If the blob is not known to the service, an error will be
|
|
||||||
// returned.
|
|
||||||
Open(ctx context.Context, dgst digest.Digest) (ReadSeekCloser, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobServer can serve blobs via http.
|
|
||||||
type BlobServer interface {
|
|
||||||
// ServeBlob attempts to serve the blob, identifed by dgst, via http. The
|
|
||||||
// service may decide to redirect the client elsewhere or serve the data
|
|
||||||
// directly.
|
|
||||||
//
|
|
||||||
// This handler only issues successful responses, such as 2xx or 3xx,
|
|
||||||
// meaning it serves data or issues a redirect. If the blob is not
|
|
||||||
// available, an error will be returned and the caller may still issue a
|
|
||||||
// response.
|
|
||||||
//
|
|
||||||
// The implementation may serve the same blob from a different digest
|
|
||||||
// domain. The appropriate headers will be set for the blob, unless they
|
|
||||||
// have already been set by the caller.
|
|
||||||
ServeBlob(ctx context.Context, w http.ResponseWriter, r *http.Request, dgst digest.Digest) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobIngester ingests blob data.
|
|
||||||
type BlobIngester interface {
|
|
||||||
// Put inserts the content p into the blob service, returning a descriptor
|
|
||||||
// or an error.
|
|
||||||
Put(ctx context.Context, mediaType string, p []byte) (Descriptor, error)
|
|
||||||
|
|
||||||
// Create allocates a new blob writer to add a blob to this service. The
|
|
||||||
// returned handle can be written to and later resumed using an opaque
|
|
||||||
// identifier. With this approach, one can Close and Resume a BlobWriter
|
|
||||||
// multiple times until the BlobWriter is committed or cancelled.
|
|
||||||
Create(ctx context.Context, options ...BlobCreateOption) (BlobWriter, error)
|
|
||||||
|
|
||||||
// Resume attempts to resume a write to a blob, identified by an id.
|
|
||||||
Resume(ctx context.Context, id string) (BlobWriter, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobCreateOption is a general extensible function argument for blob creation
|
|
||||||
// methods. A BlobIngester may choose to honor any or none of the given
|
|
||||||
// BlobCreateOptions, which can be specific to the implementation of the
|
|
||||||
// BlobIngester receiving them.
|
|
||||||
// TODO (brianbland): unify this with ManifestServiceOption in the future
|
|
||||||
type BlobCreateOption interface {
|
|
||||||
Apply(interface{}) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateOptions is a collection of blob creation modifiers relevant to general
|
|
||||||
// blob storage intended to be configured by the BlobCreateOption.Apply method.
|
|
||||||
type CreateOptions struct {
|
|
||||||
Mount struct {
|
|
||||||
ShouldMount bool
|
|
||||||
From reference.Canonical
|
|
||||||
// Stat allows to pass precalculated descriptor to link and return.
|
|
||||||
// Blob access check will be skipped if set.
|
|
||||||
Stat *Descriptor
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobWriter provides a handle for inserting data into a blob store.
|
|
||||||
// Instances should be obtained from BlobWriteService.Writer and
|
|
||||||
// BlobWriteService.Resume. If supported by the store, a writer can be
|
|
||||||
// recovered with the id.
|
|
||||||
type BlobWriter interface {
|
|
||||||
io.WriteCloser
|
|
||||||
io.ReaderFrom
|
|
||||||
|
|
||||||
// Size returns the number of bytes written to this blob.
|
|
||||||
Size() int64
|
|
||||||
|
|
||||||
// ID returns the identifier for this writer. The ID can be used with the
|
|
||||||
// Blob service to later resume the write.
|
|
||||||
ID() string
|
|
||||||
|
|
||||||
// StartedAt returns the time this blob write was started.
|
|
||||||
StartedAt() time.Time
|
|
||||||
|
|
||||||
// Commit completes the blob writer process. The content is verified
|
|
||||||
// against the provided provisional descriptor, which may result in an
|
|
||||||
// error. Depending on the implementation, written data may be validated
|
|
||||||
// against the provisional descriptor fields. If MediaType is not present,
|
|
||||||
// the implementation may reject the commit or assign "application/octet-
|
|
||||||
// stream" to the blob. The returned descriptor may have a different
|
|
||||||
// digest depending on the blob store, referred to as the canonical
|
|
||||||
// descriptor.
|
|
||||||
Commit(ctx context.Context, provisional Descriptor) (canonical Descriptor, err error)
|
|
||||||
|
|
||||||
// Cancel ends the blob write without storing any data and frees any
|
|
||||||
// associated resources. Any data written thus far will be lost. Cancel
|
|
||||||
// implementations should allow multiple calls even after a commit that
|
|
||||||
// result in a no-op. This allows use of Cancel in a defer statement,
|
|
||||||
// increasing the assurance that it is correctly called.
|
|
||||||
Cancel(ctx context.Context) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobService combines the operations to access, read and write blobs. This
|
|
||||||
// can be used to describe remote blob services.
|
|
||||||
type BlobService interface {
|
|
||||||
BlobStatter
|
|
||||||
BlobProvider
|
|
||||||
BlobIngester
|
|
||||||
}
|
|
||||||
|
|
||||||
// BlobStore represent the entire suite of blob related operations. Such an
|
|
||||||
// implementation can access, read, write, delete and serve blobs.
|
|
||||||
type BlobStore interface {
|
|
||||||
BlobService
|
|
||||||
BlobServer
|
|
||||||
BlobDeleter
|
|
||||||
}
|
|
93
vendor/github.com/docker/distribution/circle.yml
generated
vendored
93
vendor/github.com/docker/distribution/circle.yml
generated
vendored
|
@ -1,93 +0,0 @@
|
||||||
# Pony-up!
|
|
||||||
machine:
|
|
||||||
pre:
|
|
||||||
# Install gvm
|
|
||||||
- bash < <(curl -s -S -L https://raw.githubusercontent.com/moovweb/gvm/1.0.22/binscripts/gvm-installer)
|
|
||||||
# Install codecov for coverage
|
|
||||||
- pip install --user codecov
|
|
||||||
|
|
||||||
post:
|
|
||||||
# go
|
|
||||||
- gvm install go1.7 --prefer-binary --name=stable
|
|
||||||
|
|
||||||
environment:
|
|
||||||
# Convenient shortcuts to "common" locations
|
|
||||||
CHECKOUT: /home/ubuntu/$CIRCLE_PROJECT_REPONAME
|
|
||||||
BASE_DIR: src/github.com/$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME
|
|
||||||
# Trick circle brainflat "no absolute path" behavior
|
|
||||||
BASE_STABLE: ../../../$HOME/.gvm/pkgsets/stable/global/$BASE_DIR
|
|
||||||
DOCKER_BUILDTAGS: "include_oss include_gcs"
|
|
||||||
# Workaround Circle parsing dumb bugs and/or YAML wonkyness
|
|
||||||
CIRCLE_PAIN: "mode: set"
|
|
||||||
|
|
||||||
hosts:
|
|
||||||
# Not used yet
|
|
||||||
fancy: 127.0.0.1
|
|
||||||
|
|
||||||
dependencies:
|
|
||||||
pre:
|
|
||||||
# Copy the code to the gopath of all go versions
|
|
||||||
- >
|
|
||||||
gvm use stable &&
|
|
||||||
mkdir -p "$(dirname $BASE_STABLE)" &&
|
|
||||||
cp -R "$CHECKOUT" "$BASE_STABLE"
|
|
||||||
|
|
||||||
override:
|
|
||||||
# Install dependencies for every copied clone/go version
|
|
||||||
- gvm use stable && go get github.com/tools/godep:
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
post:
|
|
||||||
# For the stable go version, additionally install linting tools
|
|
||||||
- >
|
|
||||||
gvm use stable &&
|
|
||||||
go get github.com/axw/gocov/gocov github.com/golang/lint/golint
|
|
||||||
|
|
||||||
test:
|
|
||||||
pre:
|
|
||||||
# Output the go versions we are going to test
|
|
||||||
# - gvm use old && go version
|
|
||||||
- gvm use stable && go version
|
|
||||||
|
|
||||||
# todo(richard): replace with a more robust vendoring solution. Removed due to a fundamental disagreement in godep philosophies.
|
|
||||||
# Ensure validation of dependencies
|
|
||||||
# - gvm use stable && if test -n "`git diff --stat=1000 master | grep -Ei \"vendor|godeps\"`"; then make dep-validate; fi:
|
|
||||||
# pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
# First thing: build everything. This will catch compile errors, and it's
|
|
||||||
# also necessary for go vet to work properly (see #807).
|
|
||||||
- gvm use stable && godep go install $(go list ./... | grep -v "/vendor/"):
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
# FMT
|
|
||||||
- gvm use stable && make fmt:
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
# VET
|
|
||||||
- gvm use stable && make vet:
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
# LINT
|
|
||||||
- gvm use stable && make lint:
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
override:
|
|
||||||
# Test stable, and report
|
|
||||||
- gvm use stable; export ROOT_PACKAGE=$(go list .); go list -tags "$DOCKER_BUILDTAGS" ./... | grep -v "/vendor/" | xargs -L 1 -I{} bash -c 'export PACKAGE={}; godep go test -tags "$DOCKER_BUILDTAGS" -test.short -coverprofile=$GOPATH/src/$PACKAGE/coverage.out -coverpkg=$(./coverpkg.sh $PACKAGE $ROOT_PACKAGE) $PACKAGE':
|
|
||||||
timeout: 1000
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
# Test stable with race
|
|
||||||
- gvm use stable; export ROOT_PACKAGE=$(go list .); go list -tags "$DOCKER_BUILDTAGS" ./... | grep -v "/vendor/" | grep -v "registry/handlers" | grep -v "registry/storage/driver" | xargs -L 1 -I{} bash -c 'export PACKAGE={}; godep go test -race -tags "$DOCKER_BUILDTAGS" -test.short $PACKAGE':
|
|
||||||
timeout: 1000
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
post:
|
|
||||||
# Report to codecov
|
|
||||||
- bash <(curl -s https://codecov.io/bash):
|
|
||||||
pwd: $BASE_STABLE
|
|
||||||
|
|
||||||
## Notes
|
|
||||||
# Do we want these as well?
|
|
||||||
# - go get code.google.com/p/go.tools/cmd/goimports
|
|
||||||
# - test -z "$(goimports -l -w ./... | tee /dev/stderr)"
|
|
||||||
# http://labix.org/gocheck
|
|
7
vendor/github.com/docker/distribution/coverpkg.sh
generated
vendored
7
vendor/github.com/docker/distribution/coverpkg.sh
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
# Given a subpackage and the containing package, figures out which packages
|
|
||||||
# need to be passed to `go test -coverpkg`: this includes all of the
|
|
||||||
# subpackage's dependencies within the containing package, as well as the
|
|
||||||
# subpackage itself.
|
|
||||||
DEPENDENCIES="$(go list -f $'{{range $f := .Deps}}{{$f}}\n{{end}}' ${1} | grep ${2} | grep -v github.com/docker/distribution/vendor)"
|
|
||||||
echo "${1} ${DEPENDENCIES}" | xargs echo -n | tr ' ' ','
|
|
139
vendor/github.com/docker/distribution/digest/digest.go
generated
vendored
139
vendor/github.com/docker/distribution/digest/digest.go
generated
vendored
|
@ -1,139 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
// DigestSha256EmptyTar is the canonical sha256 digest of empty data
|
|
||||||
DigestSha256EmptyTar = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Digest allows simple protection of hex formatted digest strings, prefixed
|
|
||||||
// by their algorithm. Strings of type Digest have some guarantee of being in
|
|
||||||
// the correct format and it provides quick access to the components of a
|
|
||||||
// digest string.
|
|
||||||
//
|
|
||||||
// The following is an example of the contents of Digest types:
|
|
||||||
//
|
|
||||||
// sha256:7173b809ca12ec5dee4506cd86be934c4596dd234ee82c0662eac04a8c2c71dc
|
|
||||||
//
|
|
||||||
// This allows to abstract the digest behind this type and work only in those
|
|
||||||
// terms.
|
|
||||||
type Digest string
|
|
||||||
|
|
||||||
// NewDigest returns a Digest from alg and a hash.Hash object.
|
|
||||||
func NewDigest(alg Algorithm, h hash.Hash) Digest {
|
|
||||||
return NewDigestFromBytes(alg, h.Sum(nil))
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDigestFromBytes returns a new digest from the byte contents of p.
|
|
||||||
// Typically, this can come from hash.Hash.Sum(...) or xxx.SumXXX(...)
|
|
||||||
// functions. This is also useful for rebuilding digests from binary
|
|
||||||
// serializations.
|
|
||||||
func NewDigestFromBytes(alg Algorithm, p []byte) Digest {
|
|
||||||
return Digest(fmt.Sprintf("%s:%x", alg, p))
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDigestFromHex returns a Digest from alg and a the hex encoded digest.
|
|
||||||
func NewDigestFromHex(alg, hex string) Digest {
|
|
||||||
return Digest(fmt.Sprintf("%s:%s", alg, hex))
|
|
||||||
}
|
|
||||||
|
|
||||||
// DigestRegexp matches valid digest types.
|
|
||||||
var DigestRegexp = regexp.MustCompile(`[a-zA-Z0-9-_+.]+:[a-fA-F0-9]+`)
|
|
||||||
|
|
||||||
// DigestRegexpAnchored matches valid digest types, anchored to the start and end of the match.
|
|
||||||
var DigestRegexpAnchored = regexp.MustCompile(`^` + DigestRegexp.String() + `$`)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// ErrDigestInvalidFormat returned when digest format invalid.
|
|
||||||
ErrDigestInvalidFormat = fmt.Errorf("invalid checksum digest format")
|
|
||||||
|
|
||||||
// ErrDigestInvalidLength returned when digest has invalid length.
|
|
||||||
ErrDigestInvalidLength = fmt.Errorf("invalid checksum digest length")
|
|
||||||
|
|
||||||
// ErrDigestUnsupported returned when the digest algorithm is unsupported.
|
|
||||||
ErrDigestUnsupported = fmt.Errorf("unsupported digest algorithm")
|
|
||||||
)
|
|
||||||
|
|
||||||
// ParseDigest parses s and returns the validated digest object. An error will
|
|
||||||
// be returned if the format is invalid.
|
|
||||||
func ParseDigest(s string) (Digest, error) {
|
|
||||||
d := Digest(s)
|
|
||||||
|
|
||||||
return d, d.Validate()
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromReader returns the most valid digest for the underlying content using
|
|
||||||
// the canonical digest algorithm.
|
|
||||||
func FromReader(rd io.Reader) (Digest, error) {
|
|
||||||
return Canonical.FromReader(rd)
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromBytes digests the input and returns a Digest.
|
|
||||||
func FromBytes(p []byte) Digest {
|
|
||||||
return Canonical.FromBytes(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate checks that the contents of d is a valid digest, returning an
|
|
||||||
// error if not.
|
|
||||||
func (d Digest) Validate() error {
|
|
||||||
s := string(d)
|
|
||||||
|
|
||||||
if !DigestRegexpAnchored.MatchString(s) {
|
|
||||||
return ErrDigestInvalidFormat
|
|
||||||
}
|
|
||||||
|
|
||||||
i := strings.Index(s, ":")
|
|
||||||
if i < 0 {
|
|
||||||
return ErrDigestInvalidFormat
|
|
||||||
}
|
|
||||||
|
|
||||||
// case: "sha256:" with no hex.
|
|
||||||
if i+1 == len(s) {
|
|
||||||
return ErrDigestInvalidFormat
|
|
||||||
}
|
|
||||||
|
|
||||||
switch algorithm := Algorithm(s[:i]); algorithm {
|
|
||||||
case SHA256, SHA384, SHA512:
|
|
||||||
if algorithm.Size()*2 != len(s[i+1:]) {
|
|
||||||
return ErrDigestInvalidLength
|
|
||||||
}
|
|
||||||
break
|
|
||||||
default:
|
|
||||||
return ErrDigestUnsupported
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Algorithm returns the algorithm portion of the digest. This will panic if
|
|
||||||
// the underlying digest is not in a valid format.
|
|
||||||
func (d Digest) Algorithm() Algorithm {
|
|
||||||
return Algorithm(d[:d.sepIndex()])
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hex returns the hex digest portion of the digest. This will panic if the
|
|
||||||
// underlying digest is not in a valid format.
|
|
||||||
func (d Digest) Hex() string {
|
|
||||||
return string(d[d.sepIndex()+1:])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Digest) String() string {
|
|
||||||
return string(d)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d Digest) sepIndex() int {
|
|
||||||
i := strings.Index(string(d), ":")
|
|
||||||
|
|
||||||
if i < 0 {
|
|
||||||
panic("could not find ':' in digest: " + d)
|
|
||||||
}
|
|
||||||
|
|
||||||
return i
|
|
||||||
}
|
|
82
vendor/github.com/docker/distribution/digest/digest_test.go
generated
vendored
82
vendor/github.com/docker/distribution/digest/digest_test.go
generated
vendored
|
@ -1,82 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestParseDigest(t *testing.T) {
|
|
||||||
for _, testcase := range []struct {
|
|
||||||
input string
|
|
||||||
err error
|
|
||||||
algorithm Algorithm
|
|
||||||
hex string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
input: "sha256:e58fcf7418d4390dec8e8fb69d88c06ec07039d651fedd3aa72af9972e7d046b",
|
|
||||||
algorithm: "sha256",
|
|
||||||
hex: "e58fcf7418d4390dec8e8fb69d88c06ec07039d651fedd3aa72af9972e7d046b",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "sha384:d3fc7881460b7e22e3d172954463dddd7866d17597e7248453c48b3e9d26d9596bf9c4a9cf8072c9d5bad76e19af801d",
|
|
||||||
algorithm: "sha384",
|
|
||||||
hex: "d3fc7881460b7e22e3d172954463dddd7866d17597e7248453c48b3e9d26d9596bf9c4a9cf8072c9d5bad76e19af801d",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// empty hex
|
|
||||||
input: "sha256:",
|
|
||||||
err: ErrDigestInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// just hex
|
|
||||||
input: "d41d8cd98f00b204e9800998ecf8427e",
|
|
||||||
err: ErrDigestInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// not hex
|
|
||||||
input: "sha256:d41d8cd98f00b204e9800m98ecf8427e",
|
|
||||||
err: ErrDigestInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// too short
|
|
||||||
input: "sha256:abcdef0123456789",
|
|
||||||
err: ErrDigestInvalidLength,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// too short (from different algorithm)
|
|
||||||
input: "sha512:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789",
|
|
||||||
err: ErrDigestInvalidLength,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo:d41d8cd98f00b204e9800998ecf8427e",
|
|
||||||
err: ErrDigestUnsupported,
|
|
||||||
},
|
|
||||||
} {
|
|
||||||
digest, err := ParseDigest(testcase.input)
|
|
||||||
if err != testcase.err {
|
|
||||||
t.Fatalf("error differed from expected while parsing %q: %v != %v", testcase.input, err, testcase.err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if testcase.err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if digest.Algorithm() != testcase.algorithm {
|
|
||||||
t.Fatalf("incorrect algorithm for parsed digest: %q != %q", digest.Algorithm(), testcase.algorithm)
|
|
||||||
}
|
|
||||||
|
|
||||||
if digest.Hex() != testcase.hex {
|
|
||||||
t.Fatalf("incorrect hex for parsed digest: %q != %q", digest.Hex(), testcase.hex)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse string return value and check equality
|
|
||||||
newParsed, err := ParseDigest(digest.String())
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error parsing input %q: %v", testcase.input, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if newParsed != digest {
|
|
||||||
t.Fatalf("expected equal: %q != %q", newParsed, digest)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
155
vendor/github.com/docker/distribution/digest/digester.go
generated
vendored
155
vendor/github.com/docker/distribution/digest/digester.go
generated
vendored
|
@ -1,155 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto"
|
|
||||||
"fmt"
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Algorithm identifies and implementation of a digester by an identifier.
|
|
||||||
// Note the that this defines both the hash algorithm used and the string
|
|
||||||
// encoding.
|
|
||||||
type Algorithm string
|
|
||||||
|
|
||||||
// supported digest types
|
|
||||||
const (
|
|
||||||
SHA256 Algorithm = "sha256" // sha256 with hex encoding
|
|
||||||
SHA384 Algorithm = "sha384" // sha384 with hex encoding
|
|
||||||
SHA512 Algorithm = "sha512" // sha512 with hex encoding
|
|
||||||
|
|
||||||
// Canonical is the primary digest algorithm used with the distribution
|
|
||||||
// project. Other digests may be used but this one is the primary storage
|
|
||||||
// digest.
|
|
||||||
Canonical = SHA256
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// TODO(stevvooe): Follow the pattern of the standard crypto package for
|
|
||||||
// registration of digests. Effectively, we are a registerable set and
|
|
||||||
// common symbol access.
|
|
||||||
|
|
||||||
// algorithms maps values to hash.Hash implementations. Other algorithms
|
|
||||||
// may be available but they cannot be calculated by the digest package.
|
|
||||||
algorithms = map[Algorithm]crypto.Hash{
|
|
||||||
SHA256: crypto.SHA256,
|
|
||||||
SHA384: crypto.SHA384,
|
|
||||||
SHA512: crypto.SHA512,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
// Available returns true if the digest type is available for use. If this
|
|
||||||
// returns false, New and Hash will return nil.
|
|
||||||
func (a Algorithm) Available() bool {
|
|
||||||
h, ok := algorithms[a]
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// check availability of the hash, as well
|
|
||||||
return h.Available()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (a Algorithm) String() string {
|
|
||||||
return string(a)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Size returns number of bytes returned by the hash.
|
|
||||||
func (a Algorithm) Size() int {
|
|
||||||
h, ok := algorithms[a]
|
|
||||||
if !ok {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return h.Size()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set implemented to allow use of Algorithm as a command line flag.
|
|
||||||
func (a *Algorithm) Set(value string) error {
|
|
||||||
if value == "" {
|
|
||||||
*a = Canonical
|
|
||||||
} else {
|
|
||||||
// just do a type conversion, support is queried with Available.
|
|
||||||
*a = Algorithm(value)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// New returns a new digester for the specified algorithm. If the algorithm
|
|
||||||
// does not have a digester implementation, nil will be returned. This can be
|
|
||||||
// checked by calling Available before calling New.
|
|
||||||
func (a Algorithm) New() Digester {
|
|
||||||
return &digester{
|
|
||||||
alg: a,
|
|
||||||
hash: a.Hash(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Hash returns a new hash as used by the algorithm. If not available, the
|
|
||||||
// method will panic. Check Algorithm.Available() before calling.
|
|
||||||
func (a Algorithm) Hash() hash.Hash {
|
|
||||||
if !a.Available() {
|
|
||||||
// NOTE(stevvooe): A missing hash is usually a programming error that
|
|
||||||
// must be resolved at compile time. We don't import in the digest
|
|
||||||
// package to allow users to choose their hash implementation (such as
|
|
||||||
// when using stevvooe/resumable or a hardware accelerated package).
|
|
||||||
//
|
|
||||||
// Applications that may want to resolve the hash at runtime should
|
|
||||||
// call Algorithm.Available before call Algorithm.Hash().
|
|
||||||
panic(fmt.Sprintf("%v not available (make sure it is imported)", a))
|
|
||||||
}
|
|
||||||
|
|
||||||
return algorithms[a].New()
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromReader returns the digest of the reader using the algorithm.
|
|
||||||
func (a Algorithm) FromReader(rd io.Reader) (Digest, error) {
|
|
||||||
digester := a.New()
|
|
||||||
|
|
||||||
if _, err := io.Copy(digester.Hash(), rd); err != nil {
|
|
||||||
return "", err
|
|
||||||
}
|
|
||||||
|
|
||||||
return digester.Digest(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// FromBytes digests the input and returns a Digest.
|
|
||||||
func (a Algorithm) FromBytes(p []byte) Digest {
|
|
||||||
digester := a.New()
|
|
||||||
|
|
||||||
if _, err := digester.Hash().Write(p); err != nil {
|
|
||||||
// Writes to a Hash should never fail. None of the existing
|
|
||||||
// hash implementations in the stdlib or hashes vendored
|
|
||||||
// here can return errors from Write. Having a panic in this
|
|
||||||
// condition instead of having FromBytes return an error value
|
|
||||||
// avoids unnecessary error handling paths in all callers.
|
|
||||||
panic("write to hash function returned error: " + err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return digester.Digest()
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(stevvooe): Allow resolution of verifiers using the digest type and
|
|
||||||
// this registration system.
|
|
||||||
|
|
||||||
// Digester calculates the digest of written data. Writes should go directly
|
|
||||||
// to the return value of Hash, while calling Digest will return the current
|
|
||||||
// value of the digest.
|
|
||||||
type Digester interface {
|
|
||||||
Hash() hash.Hash // provides direct access to underlying hash instance.
|
|
||||||
Digest() Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
// digester provides a simple digester definition that embeds a hasher.
|
|
||||||
type digester struct {
|
|
||||||
alg Algorithm
|
|
||||||
hash hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *digester) Hash() hash.Hash {
|
|
||||||
return d.hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d *digester) Digest() Digest {
|
|
||||||
return NewDigest(d.alg, d.hash)
|
|
||||||
}
|
|
21
vendor/github.com/docker/distribution/digest/digester_resumable_test.go
generated
vendored
21
vendor/github.com/docker/distribution/digest/digester_resumable_test.go
generated
vendored
|
@ -1,21 +0,0 @@
|
||||||
// +build !noresumabledigest
|
|
||||||
|
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/stevvooe/resumable"
|
|
||||||
_ "github.com/stevvooe/resumable/sha256"
|
|
||||||
)
|
|
||||||
|
|
||||||
// TestResumableDetection just ensures that the resumable capability of a hash
|
|
||||||
// is exposed through the digester type, which is just a hash plus a Digest
|
|
||||||
// method.
|
|
||||||
func TestResumableDetection(t *testing.T) {
|
|
||||||
d := Canonical.New()
|
|
||||||
|
|
||||||
if _, ok := d.Hash().(resumable.Hash); !ok {
|
|
||||||
t.Fatalf("expected digester to implement resumable.Hash: %#v, %v", d, d.Hash())
|
|
||||||
}
|
|
||||||
}
|
|
42
vendor/github.com/docker/distribution/digest/doc.go
generated
vendored
42
vendor/github.com/docker/distribution/digest/doc.go
generated
vendored
|
@ -1,42 +0,0 @@
|
||||||
// Package digest provides a generalized type to opaquely represent message
|
|
||||||
// digests and their operations within the registry. The Digest type is
|
|
||||||
// designed to serve as a flexible identifier in a content-addressable system.
|
|
||||||
// More importantly, it provides tools and wrappers to work with
|
|
||||||
// hash.Hash-based digests with little effort.
|
|
||||||
//
|
|
||||||
// Basics
|
|
||||||
//
|
|
||||||
// The format of a digest is simply a string with two parts, dubbed the
|
|
||||||
// "algorithm" and the "digest", separated by a colon:
|
|
||||||
//
|
|
||||||
// <algorithm>:<digest>
|
|
||||||
//
|
|
||||||
// An example of a sha256 digest representation follows:
|
|
||||||
//
|
|
||||||
// sha256:7173b809ca12ec5dee4506cd86be934c4596dd234ee82c0662eac04a8c2c71dc
|
|
||||||
//
|
|
||||||
// In this case, the string "sha256" is the algorithm and the hex bytes are
|
|
||||||
// the "digest".
|
|
||||||
//
|
|
||||||
// Because the Digest type is simply a string, once a valid Digest is
|
|
||||||
// obtained, comparisons are cheap, quick and simple to express with the
|
|
||||||
// standard equality operator.
|
|
||||||
//
|
|
||||||
// Verification
|
|
||||||
//
|
|
||||||
// The main benefit of using the Digest type is simple verification against a
|
|
||||||
// given digest. The Verifier interface, modeled after the stdlib hash.Hash
|
|
||||||
// interface, provides a common write sink for digest verification. After
|
|
||||||
// writing is complete, calling the Verifier.Verified method will indicate
|
|
||||||
// whether or not the stream of bytes matches the target digest.
|
|
||||||
//
|
|
||||||
// Missing Features
|
|
||||||
//
|
|
||||||
// In addition to the above, we intend to add the following features to this
|
|
||||||
// package:
|
|
||||||
//
|
|
||||||
// 1. A Digester type that supports write sink digest calculation.
|
|
||||||
//
|
|
||||||
// 2. Suspend and resume of ongoing digest calculations to support efficient digest verification in the registry.
|
|
||||||
//
|
|
||||||
package digest
|
|
245
vendor/github.com/docker/distribution/digest/set.go
generated
vendored
245
vendor/github.com/docker/distribution/digest/set.go
generated
vendored
|
@ -1,245 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
"sync"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// ErrDigestNotFound is used when a matching digest
|
|
||||||
// could not be found in a set.
|
|
||||||
ErrDigestNotFound = errors.New("digest not found")
|
|
||||||
|
|
||||||
// ErrDigestAmbiguous is used when multiple digests
|
|
||||||
// are found in a set. None of the matching digests
|
|
||||||
// should be considered valid matches.
|
|
||||||
ErrDigestAmbiguous = errors.New("ambiguous digest string")
|
|
||||||
)
|
|
||||||
|
|
||||||
// Set is used to hold a unique set of digests which
|
|
||||||
// may be easily referenced by easily referenced by a string
|
|
||||||
// representation of the digest as well as short representation.
|
|
||||||
// The uniqueness of the short representation is based on other
|
|
||||||
// digests in the set. If digests are omitted from this set,
|
|
||||||
// collisions in a larger set may not be detected, therefore it
|
|
||||||
// is important to always do short representation lookups on
|
|
||||||
// the complete set of digests. To mitigate collisions, an
|
|
||||||
// appropriately long short code should be used.
|
|
||||||
type Set struct {
|
|
||||||
mutex sync.RWMutex
|
|
||||||
entries digestEntries
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewSet creates an empty set of digests
|
|
||||||
// which may have digests added.
|
|
||||||
func NewSet() *Set {
|
|
||||||
return &Set{
|
|
||||||
entries: digestEntries{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkShortMatch checks whether two digests match as either whole
|
|
||||||
// values or short values. This function does not test equality,
|
|
||||||
// rather whether the second value could match against the first
|
|
||||||
// value.
|
|
||||||
func checkShortMatch(alg Algorithm, hex, shortAlg, shortHex string) bool {
|
|
||||||
if len(hex) == len(shortHex) {
|
|
||||||
if hex != shortHex {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if len(shortAlg) > 0 && string(alg) != shortAlg {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
} else if !strings.HasPrefix(hex, shortHex) {
|
|
||||||
return false
|
|
||||||
} else if len(shortAlg) > 0 && string(alg) != shortAlg {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// Lookup looks for a digest matching the given string representation.
|
|
||||||
// If no digests could be found ErrDigestNotFound will be returned
|
|
||||||
// with an empty digest value. If multiple matches are found
|
|
||||||
// ErrDigestAmbiguous will be returned with an empty digest value.
|
|
||||||
func (dst *Set) Lookup(d string) (Digest, error) {
|
|
||||||
dst.mutex.RLock()
|
|
||||||
defer dst.mutex.RUnlock()
|
|
||||||
if len(dst.entries) == 0 {
|
|
||||||
return "", ErrDigestNotFound
|
|
||||||
}
|
|
||||||
var (
|
|
||||||
searchFunc func(int) bool
|
|
||||||
alg Algorithm
|
|
||||||
hex string
|
|
||||||
)
|
|
||||||
dgst, err := ParseDigest(d)
|
|
||||||
if err == ErrDigestInvalidFormat {
|
|
||||||
hex = d
|
|
||||||
searchFunc = func(i int) bool {
|
|
||||||
return dst.entries[i].val >= d
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
hex = dgst.Hex()
|
|
||||||
alg = dgst.Algorithm()
|
|
||||||
searchFunc = func(i int) bool {
|
|
||||||
if dst.entries[i].val == hex {
|
|
||||||
return dst.entries[i].alg >= alg
|
|
||||||
}
|
|
||||||
return dst.entries[i].val >= hex
|
|
||||||
}
|
|
||||||
}
|
|
||||||
idx := sort.Search(len(dst.entries), searchFunc)
|
|
||||||
if idx == len(dst.entries) || !checkShortMatch(dst.entries[idx].alg, dst.entries[idx].val, string(alg), hex) {
|
|
||||||
return "", ErrDigestNotFound
|
|
||||||
}
|
|
||||||
if dst.entries[idx].alg == alg && dst.entries[idx].val == hex {
|
|
||||||
return dst.entries[idx].digest, nil
|
|
||||||
}
|
|
||||||
if idx+1 < len(dst.entries) && checkShortMatch(dst.entries[idx+1].alg, dst.entries[idx+1].val, string(alg), hex) {
|
|
||||||
return "", ErrDigestAmbiguous
|
|
||||||
}
|
|
||||||
|
|
||||||
return dst.entries[idx].digest, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add adds the given digest to the set. An error will be returned
|
|
||||||
// if the given digest is invalid. If the digest already exists in the
|
|
||||||
// set, this operation will be a no-op.
|
|
||||||
func (dst *Set) Add(d Digest) error {
|
|
||||||
if err := d.Validate(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
dst.mutex.Lock()
|
|
||||||
defer dst.mutex.Unlock()
|
|
||||||
entry := &digestEntry{alg: d.Algorithm(), val: d.Hex(), digest: d}
|
|
||||||
searchFunc := func(i int) bool {
|
|
||||||
if dst.entries[i].val == entry.val {
|
|
||||||
return dst.entries[i].alg >= entry.alg
|
|
||||||
}
|
|
||||||
return dst.entries[i].val >= entry.val
|
|
||||||
}
|
|
||||||
idx := sort.Search(len(dst.entries), searchFunc)
|
|
||||||
if idx == len(dst.entries) {
|
|
||||||
dst.entries = append(dst.entries, entry)
|
|
||||||
return nil
|
|
||||||
} else if dst.entries[idx].digest == d {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
entries := append(dst.entries, nil)
|
|
||||||
copy(entries[idx+1:], entries[idx:len(entries)-1])
|
|
||||||
entries[idx] = entry
|
|
||||||
dst.entries = entries
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remove removes the given digest from the set. An err will be
|
|
||||||
// returned if the given digest is invalid. If the digest does
|
|
||||||
// not exist in the set, this operation will be a no-op.
|
|
||||||
func (dst *Set) Remove(d Digest) error {
|
|
||||||
if err := d.Validate(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
dst.mutex.Lock()
|
|
||||||
defer dst.mutex.Unlock()
|
|
||||||
entry := &digestEntry{alg: d.Algorithm(), val: d.Hex(), digest: d}
|
|
||||||
searchFunc := func(i int) bool {
|
|
||||||
if dst.entries[i].val == entry.val {
|
|
||||||
return dst.entries[i].alg >= entry.alg
|
|
||||||
}
|
|
||||||
return dst.entries[i].val >= entry.val
|
|
||||||
}
|
|
||||||
idx := sort.Search(len(dst.entries), searchFunc)
|
|
||||||
// Not found if idx is after or value at idx is not digest
|
|
||||||
if idx == len(dst.entries) || dst.entries[idx].digest != d {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
entries := dst.entries
|
|
||||||
copy(entries[idx:], entries[idx+1:])
|
|
||||||
entries = entries[:len(entries)-1]
|
|
||||||
dst.entries = entries
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// All returns all the digests in the set
|
|
||||||
func (dst *Set) All() []Digest {
|
|
||||||
dst.mutex.RLock()
|
|
||||||
defer dst.mutex.RUnlock()
|
|
||||||
retValues := make([]Digest, len(dst.entries))
|
|
||||||
for i := range dst.entries {
|
|
||||||
retValues[i] = dst.entries[i].digest
|
|
||||||
}
|
|
||||||
|
|
||||||
return retValues
|
|
||||||
}
|
|
||||||
|
|
||||||
// ShortCodeTable returns a map of Digest to unique short codes. The
|
|
||||||
// length represents the minimum value, the maximum length may be the
|
|
||||||
// entire value of digest if uniqueness cannot be achieved without the
|
|
||||||
// full value. This function will attempt to make short codes as short
|
|
||||||
// as possible to be unique.
|
|
||||||
func ShortCodeTable(dst *Set, length int) map[Digest]string {
|
|
||||||
dst.mutex.RLock()
|
|
||||||
defer dst.mutex.RUnlock()
|
|
||||||
m := make(map[Digest]string, len(dst.entries))
|
|
||||||
l := length
|
|
||||||
resetIdx := 0
|
|
||||||
for i := 0; i < len(dst.entries); i++ {
|
|
||||||
var short string
|
|
||||||
extended := true
|
|
||||||
for extended {
|
|
||||||
extended = false
|
|
||||||
if len(dst.entries[i].val) <= l {
|
|
||||||
short = dst.entries[i].digest.String()
|
|
||||||
} else {
|
|
||||||
short = dst.entries[i].val[:l]
|
|
||||||
for j := i + 1; j < len(dst.entries); j++ {
|
|
||||||
if checkShortMatch(dst.entries[j].alg, dst.entries[j].val, "", short) {
|
|
||||||
if j > resetIdx {
|
|
||||||
resetIdx = j
|
|
||||||
}
|
|
||||||
extended = true
|
|
||||||
} else {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if extended {
|
|
||||||
l++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
m[dst.entries[i].digest] = short
|
|
||||||
if i >= resetIdx {
|
|
||||||
l = length
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
type digestEntry struct {
|
|
||||||
alg Algorithm
|
|
||||||
val string
|
|
||||||
digest Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
type digestEntries []*digestEntry
|
|
||||||
|
|
||||||
func (d digestEntries) Len() int {
|
|
||||||
return len(d)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d digestEntries) Less(i, j int) bool {
|
|
||||||
if d[i].val != d[j].val {
|
|
||||||
return d[i].val < d[j].val
|
|
||||||
}
|
|
||||||
return d[i].alg < d[j].alg
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d digestEntries) Swap(i, j int) {
|
|
||||||
d[i], d[j] = d[j], d[i]
|
|
||||||
}
|
|
368
vendor/github.com/docker/distribution/digest/set_test.go
generated
vendored
368
vendor/github.com/docker/distribution/digest/set_test.go
generated
vendored
|
@ -1,368 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/sha256"
|
|
||||||
"encoding/binary"
|
|
||||||
"math/rand"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func assertEqualDigests(t *testing.T, d1, d2 Digest) {
|
|
||||||
if d1 != d2 {
|
|
||||||
t.Fatalf("Digests do not match:\n\tActual: %s\n\tExpected: %s", d1, d2)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestLookup(t *testing.T) {
|
|
||||||
digests := []Digest{
|
|
||||||
"sha256:1234511111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234611111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:5432111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6543111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6432111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6542111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6532111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
}
|
|
||||||
|
|
||||||
dset := NewSet()
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dgst, err := dset.Lookup("54")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assertEqualDigests(t, dgst, digests[3])
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("1234")
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("Expected ambiguous error looking up: 1234")
|
|
||||||
}
|
|
||||||
if err != ErrDigestAmbiguous {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("9876")
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("Expected ambiguous error looking up: 9876")
|
|
||||||
}
|
|
||||||
if err != ErrDigestNotFound {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("sha256:1234")
|
|
||||||
if err == nil {
|
|
||||||
t.Fatal("Expected ambiguous error looking up: sha256:1234")
|
|
||||||
}
|
|
||||||
if err != ErrDigestAmbiguous {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("sha256:12345")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assertEqualDigests(t, dgst, digests[0])
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("sha256:12346")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assertEqualDigests(t, dgst, digests[2])
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("12346")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assertEqualDigests(t, dgst, digests[2])
|
|
||||||
|
|
||||||
dgst, err = dset.Lookup("12345")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
assertEqualDigests(t, dgst, digests[0])
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAddDuplication(t *testing.T) {
|
|
||||||
digests := []Digest{
|
|
||||||
"sha256:1234111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234511111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234611111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:5432111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6543111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha512:65431111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha512:65421111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha512:65321111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
}
|
|
||||||
|
|
||||||
dset := NewSet()
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(dset.entries) != 8 {
|
|
||||||
t.Fatal("Invalid dset size")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := dset.Add(Digest("sha256:1234511111111111111111111111111111111111111111111111111111111111")); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(dset.entries) != 8 {
|
|
||||||
t.Fatal("Duplicate digest insert allowed")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := dset.Add(Digest("sha384:123451111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111")); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(dset.entries) != 9 {
|
|
||||||
t.Fatal("Insert with different algorithm not allowed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRemove(t *testing.T) {
|
|
||||||
digests, err := createDigests(10)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dset := NewSet()
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dgst, err := dset.Lookup(digests[0].String())
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
if dgst != digests[0] {
|
|
||||||
t.Fatalf("Unexpected digest value:\n\tExpected: %s\n\tActual: %s", digests[0], dgst)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := dset.Remove(digests[0]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, err := dset.Lookup(digests[0].String()); err != ErrDigestNotFound {
|
|
||||||
t.Fatalf("Expected error %v when looking up removed digest, got %v", ErrDigestNotFound, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAll(t *testing.T) {
|
|
||||||
digests, err := createDigests(100)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
dset := NewSet()
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
all := map[Digest]struct{}{}
|
|
||||||
for _, dgst := range dset.All() {
|
|
||||||
all[dgst] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(all) != len(digests) {
|
|
||||||
t.Fatalf("Unexpected number of unique digests found:\n\tExpected: %d\n\tActual: %d", len(digests), len(all))
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, dgst := range digests {
|
|
||||||
if _, ok := all[dgst]; !ok {
|
|
||||||
t.Fatalf("Missing element at position %d: %s", i, dgst)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func assertEqualShort(t *testing.T, actual, expected string) {
|
|
||||||
if actual != expected {
|
|
||||||
t.Fatalf("Unexpected short value:\n\tExpected: %s\n\tActual: %s", expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestShortCodeTable(t *testing.T) {
|
|
||||||
digests := []Digest{
|
|
||||||
"sha256:1234111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234511111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:1234611111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:5432111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6543111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6432111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6542111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
"sha256:6532111111111111111111111111111111111111111111111111111111111111",
|
|
||||||
}
|
|
||||||
|
|
||||||
dset := NewSet()
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
dump := ShortCodeTable(dset, 2)
|
|
||||||
|
|
||||||
if len(dump) < len(digests) {
|
|
||||||
t.Fatalf("Error unexpected size: %d, expecting %d", len(dump), len(digests))
|
|
||||||
}
|
|
||||||
assertEqualShort(t, dump[digests[0]], "12341")
|
|
||||||
assertEqualShort(t, dump[digests[1]], "12345")
|
|
||||||
assertEqualShort(t, dump[digests[2]], "12346")
|
|
||||||
assertEqualShort(t, dump[digests[3]], "54")
|
|
||||||
assertEqualShort(t, dump[digests[4]], "6543")
|
|
||||||
assertEqualShort(t, dump[digests[5]], "64")
|
|
||||||
assertEqualShort(t, dump[digests[6]], "6542")
|
|
||||||
assertEqualShort(t, dump[digests[7]], "653")
|
|
||||||
}
|
|
||||||
|
|
||||||
func createDigests(count int) ([]Digest, error) {
|
|
||||||
r := rand.New(rand.NewSource(25823))
|
|
||||||
digests := make([]Digest, count)
|
|
||||||
for i := range digests {
|
|
||||||
h := sha256.New()
|
|
||||||
if err := binary.Write(h, binary.BigEndian, r.Int63()); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
digests[i] = NewDigest("sha256", h)
|
|
||||||
}
|
|
||||||
return digests, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchAddNTable(b *testing.B, n int) {
|
|
||||||
digests, err := createDigests(n)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
b.ResetTimer()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
dset := &Set{entries: digestEntries(make([]*digestEntry, 0, n))}
|
|
||||||
for j := range digests {
|
|
||||||
if err = dset.Add(digests[j]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchLookupNTable(b *testing.B, n int, shortLen int) {
|
|
||||||
digests, err := createDigests(n)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
dset := &Set{entries: digestEntries(make([]*digestEntry, 0, n))}
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
shorts := make([]string, 0, n)
|
|
||||||
for _, short := range ShortCodeTable(dset, shortLen) {
|
|
||||||
shorts = append(shorts, short)
|
|
||||||
}
|
|
||||||
|
|
||||||
b.ResetTimer()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
if _, err = dset.Lookup(shorts[i%n]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchRemoveNTable(b *testing.B, n int) {
|
|
||||||
digests, err := createDigests(n)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
b.ResetTimer()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
dset := &Set{entries: digestEntries(make([]*digestEntry, 0, n))}
|
|
||||||
b.StopTimer()
|
|
||||||
for j := range digests {
|
|
||||||
if err = dset.Add(digests[j]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
b.StartTimer()
|
|
||||||
for j := range digests {
|
|
||||||
if err = dset.Remove(digests[j]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func benchShortCodeNTable(b *testing.B, n int, shortLen int) {
|
|
||||||
digests, err := createDigests(n)
|
|
||||||
if err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
dset := &Set{entries: digestEntries(make([]*digestEntry, 0, n))}
|
|
||||||
for i := range digests {
|
|
||||||
if err := dset.Add(digests[i]); err != nil {
|
|
||||||
b.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
b.ResetTimer()
|
|
||||||
for i := 0; i < b.N; i++ {
|
|
||||||
ShortCodeTable(dset, shortLen)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkAdd10(b *testing.B) {
|
|
||||||
benchAddNTable(b, 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkAdd100(b *testing.B) {
|
|
||||||
benchAddNTable(b, 100)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkAdd1000(b *testing.B) {
|
|
||||||
benchAddNTable(b, 1000)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkRemove10(b *testing.B) {
|
|
||||||
benchRemoveNTable(b, 10)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkRemove100(b *testing.B) {
|
|
||||||
benchRemoveNTable(b, 100)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkRemove1000(b *testing.B) {
|
|
||||||
benchRemoveNTable(b, 1000)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkLookup10(b *testing.B) {
|
|
||||||
benchLookupNTable(b, 10, 12)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkLookup100(b *testing.B) {
|
|
||||||
benchLookupNTable(b, 100, 12)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkLookup1000(b *testing.B) {
|
|
||||||
benchLookupNTable(b, 1000, 12)
|
|
||||||
}
|
|
||||||
|
|
||||||
func BenchmarkShortCode10(b *testing.B) {
|
|
||||||
benchShortCodeNTable(b, 10, 12)
|
|
||||||
}
|
|
||||||
func BenchmarkShortCode100(b *testing.B) {
|
|
||||||
benchShortCodeNTable(b, 100, 12)
|
|
||||||
}
|
|
||||||
func BenchmarkShortCode1000(b *testing.B) {
|
|
||||||
benchShortCodeNTable(b, 1000, 12)
|
|
||||||
}
|
|
44
vendor/github.com/docker/distribution/digest/verifiers.go
generated
vendored
44
vendor/github.com/docker/distribution/digest/verifiers.go
generated
vendored
|
@ -1,44 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"hash"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Verifier presents a general verification interface to be used with message
|
|
||||||
// digests and other byte stream verifications. Users instantiate a Verifier
|
|
||||||
// from one of the various methods, write the data under test to it then check
|
|
||||||
// the result with the Verified method.
|
|
||||||
type Verifier interface {
|
|
||||||
io.Writer
|
|
||||||
|
|
||||||
// Verified will return true if the content written to Verifier matches
|
|
||||||
// the digest.
|
|
||||||
Verified() bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewDigestVerifier returns a verifier that compares the written bytes
|
|
||||||
// against a passed in digest.
|
|
||||||
func NewDigestVerifier(d Digest) (Verifier, error) {
|
|
||||||
if err := d.Validate(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
return hashVerifier{
|
|
||||||
hash: d.Algorithm().Hash(),
|
|
||||||
digest: d,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type hashVerifier struct {
|
|
||||||
digest Digest
|
|
||||||
hash hash.Hash
|
|
||||||
}
|
|
||||||
|
|
||||||
func (hv hashVerifier) Write(p []byte) (n int, err error) {
|
|
||||||
return hv.hash.Write(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (hv hashVerifier) Verified() bool {
|
|
||||||
return hv.digest == NewDigest(hv.digest.Algorithm(), hv.hash)
|
|
||||||
}
|
|
49
vendor/github.com/docker/distribution/digest/verifiers_test.go
generated
vendored
49
vendor/github.com/docker/distribution/digest/verifiers_test.go
generated
vendored
|
@ -1,49 +0,0 @@
|
||||||
package digest
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"crypto/rand"
|
|
||||||
"io"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestDigestVerifier(t *testing.T) {
|
|
||||||
p := make([]byte, 1<<20)
|
|
||||||
rand.Read(p)
|
|
||||||
digest := FromBytes(p)
|
|
||||||
|
|
||||||
verifier, err := NewDigestVerifier(digest)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("unexpected error getting digest verifier: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.Copy(verifier, bytes.NewReader(p))
|
|
||||||
|
|
||||||
if !verifier.Verified() {
|
|
||||||
t.Fatalf("bytes not verified")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestVerifierUnsupportedDigest ensures that unsupported digest validation is
|
|
||||||
// flowing through verifier creation.
|
|
||||||
func TestVerifierUnsupportedDigest(t *testing.T) {
|
|
||||||
unsupported := Digest("bean:0123456789abcdef")
|
|
||||||
|
|
||||||
_, err := NewDigestVerifier(unsupported)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatalf("expected error when creating verifier")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != ErrDigestUnsupported {
|
|
||||||
t.Fatalf("incorrect error for unsupported digest: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(stevvooe): Add benchmarks to measure bytes/second throughput for
|
|
||||||
// DigestVerifier.
|
|
||||||
//
|
|
||||||
// The relevant benchmark for comparison can be run with the following
|
|
||||||
// commands:
|
|
||||||
//
|
|
||||||
// go test -bench . crypto/sha1
|
|
||||||
//
|
|
7
vendor/github.com/docker/distribution/doc.go
generated
vendored
7
vendor/github.com/docker/distribution/doc.go
generated
vendored
|
@ -1,7 +0,0 @@
|
||||||
// Package distribution will define the interfaces for the components of
|
|
||||||
// docker distribution. The goal is to allow users to reliably package, ship
|
|
||||||
// and store content related to docker images.
|
|
||||||
//
|
|
||||||
// This is currently a work in progress. More details are available in the
|
|
||||||
// README.md.
|
|
||||||
package distribution
|
|
115
vendor/github.com/docker/distribution/errors.go
generated
vendored
115
vendor/github.com/docker/distribution/errors.go
generated
vendored
|
@ -1,115 +0,0 @@
|
||||||
package distribution
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/docker/distribution/digest"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ErrAccessDenied is returned when an access to a requested resource is
|
|
||||||
// denied.
|
|
||||||
var ErrAccessDenied = errors.New("access denied")
|
|
||||||
|
|
||||||
// ErrManifestNotModified is returned when a conditional manifest GetByTag
|
|
||||||
// returns nil due to the client indicating it has the latest version
|
|
||||||
var ErrManifestNotModified = errors.New("manifest not modified")
|
|
||||||
|
|
||||||
// ErrUnsupported is returned when an unimplemented or unsupported action is
|
|
||||||
// performed
|
|
||||||
var ErrUnsupported = errors.New("operation unsupported")
|
|
||||||
|
|
||||||
// ErrTagUnknown is returned if the given tag is not known by the tag service
|
|
||||||
type ErrTagUnknown struct {
|
|
||||||
Tag string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrTagUnknown) Error() string {
|
|
||||||
return fmt.Sprintf("unknown tag=%s", err.Tag)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrRepositoryUnknown is returned if the named repository is not known by
|
|
||||||
// the registry.
|
|
||||||
type ErrRepositoryUnknown struct {
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrRepositoryUnknown) Error() string {
|
|
||||||
return fmt.Sprintf("unknown repository name=%s", err.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrRepositoryNameInvalid should be used to denote an invalid repository
|
|
||||||
// name. Reason may set, indicating the cause of invalidity.
|
|
||||||
type ErrRepositoryNameInvalid struct {
|
|
||||||
Name string
|
|
||||||
Reason error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrRepositoryNameInvalid) Error() string {
|
|
||||||
return fmt.Sprintf("repository name %q invalid: %v", err.Name, err.Reason)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestUnknown is returned if the manifest is not known by the
|
|
||||||
// registry.
|
|
||||||
type ErrManifestUnknown struct {
|
|
||||||
Name string
|
|
||||||
Tag string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrManifestUnknown) Error() string {
|
|
||||||
return fmt.Sprintf("unknown manifest name=%s tag=%s", err.Name, err.Tag)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestUnknownRevision is returned when a manifest cannot be found by
|
|
||||||
// revision within a repository.
|
|
||||||
type ErrManifestUnknownRevision struct {
|
|
||||||
Name string
|
|
||||||
Revision digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrManifestUnknownRevision) Error() string {
|
|
||||||
return fmt.Sprintf("unknown manifest name=%s revision=%s", err.Name, err.Revision)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestUnverified is returned when the registry is unable to verify
|
|
||||||
// the manifest.
|
|
||||||
type ErrManifestUnverified struct{}
|
|
||||||
|
|
||||||
func (ErrManifestUnverified) Error() string {
|
|
||||||
return fmt.Sprintf("unverified manifest")
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestVerification provides a type to collect errors encountered
|
|
||||||
// during manifest verification. Currently, it accepts errors of all types,
|
|
||||||
// but it may be narrowed to those involving manifest verification.
|
|
||||||
type ErrManifestVerification []error
|
|
||||||
|
|
||||||
func (errs ErrManifestVerification) Error() string {
|
|
||||||
var parts []string
|
|
||||||
for _, err := range errs {
|
|
||||||
parts = append(parts, err.Error())
|
|
||||||
}
|
|
||||||
|
|
||||||
return fmt.Sprintf("errors verifying manifest: %v", strings.Join(parts, ","))
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestBlobUnknown returned when a referenced blob cannot be found.
|
|
||||||
type ErrManifestBlobUnknown struct {
|
|
||||||
Digest digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrManifestBlobUnknown) Error() string {
|
|
||||||
return fmt.Sprintf("unknown blob %v on manifest", err.Digest)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ErrManifestNameInvalid should be used to denote an invalid manifest
|
|
||||||
// name. Reason may set, indicating the cause of invalidity.
|
|
||||||
type ErrManifestNameInvalid struct {
|
|
||||||
Name string
|
|
||||||
Reason error
|
|
||||||
}
|
|
||||||
|
|
||||||
func (err ErrManifestNameInvalid) Error() string {
|
|
||||||
return fmt.Sprintf("manifest name %q invalid: %v", err.Name, err.Reason)
|
|
||||||
}
|
|
125
vendor/github.com/docker/distribution/manifests.go
generated
vendored
125
vendor/github.com/docker/distribution/manifests.go
generated
vendored
|
@ -1,125 +0,0 @@
|
||||||
package distribution
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"mime"
|
|
||||||
|
|
||||||
"github.com/docker/distribution/context"
|
|
||||||
"github.com/docker/distribution/digest"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Manifest represents a registry object specifying a set of
|
|
||||||
// references and an optional target
|
|
||||||
type Manifest interface {
|
|
||||||
// References returns a list of objects which make up this manifest.
|
|
||||||
// A reference is anything which can be represented by a
|
|
||||||
// distribution.Descriptor. These can consist of layers, resources or other
|
|
||||||
// manifests.
|
|
||||||
//
|
|
||||||
// While no particular order is required, implementations should return
|
|
||||||
// them from highest to lowest priority. For example, one might want to
|
|
||||||
// return the base layer before the top layer.
|
|
||||||
References() []Descriptor
|
|
||||||
|
|
||||||
// Payload provides the serialized format of the manifest, in addition to
|
|
||||||
// the mediatype.
|
|
||||||
Payload() (mediatype string, payload []byte, err error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ManifestBuilder creates a manifest allowing one to include dependencies.
|
|
||||||
// Instances can be obtained from a version-specific manifest package. Manifest
|
|
||||||
// specific data is passed into the function which creates the builder.
|
|
||||||
type ManifestBuilder interface {
|
|
||||||
// Build creates the manifest from his builder.
|
|
||||||
Build(ctx context.Context) (Manifest, error)
|
|
||||||
|
|
||||||
// References returns a list of objects which have been added to this
|
|
||||||
// builder. The dependencies are returned in the order they were added,
|
|
||||||
// which should be from base to head.
|
|
||||||
References() []Descriptor
|
|
||||||
|
|
||||||
// AppendReference includes the given object in the manifest after any
|
|
||||||
// existing dependencies. If the add fails, such as when adding an
|
|
||||||
// unsupported dependency, an error may be returned.
|
|
||||||
//
|
|
||||||
// The destination of the reference is dependent on the manifest type and
|
|
||||||
// the dependency type.
|
|
||||||
AppendReference(dependency Describable) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// ManifestService describes operations on image manifests.
|
|
||||||
type ManifestService interface {
|
|
||||||
// Exists returns true if the manifest exists.
|
|
||||||
Exists(ctx context.Context, dgst digest.Digest) (bool, error)
|
|
||||||
|
|
||||||
// Get retrieves the manifest specified by the given digest
|
|
||||||
Get(ctx context.Context, dgst digest.Digest, options ...ManifestServiceOption) (Manifest, error)
|
|
||||||
|
|
||||||
// Put creates or updates the given manifest returning the manifest digest
|
|
||||||
Put(ctx context.Context, manifest Manifest, options ...ManifestServiceOption) (digest.Digest, error)
|
|
||||||
|
|
||||||
// Delete removes the manifest specified by the given digest. Deleting
|
|
||||||
// a manifest that doesn't exist will return ErrManifestNotFound
|
|
||||||
Delete(ctx context.Context, dgst digest.Digest) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// ManifestEnumerator enables iterating over manifests
|
|
||||||
type ManifestEnumerator interface {
|
|
||||||
// Enumerate calls ingester for each manifest.
|
|
||||||
Enumerate(ctx context.Context, ingester func(digest.Digest) error) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// Describable is an interface for descriptors
|
|
||||||
type Describable interface {
|
|
||||||
Descriptor() Descriptor
|
|
||||||
}
|
|
||||||
|
|
||||||
// ManifestMediaTypes returns the supported media types for manifests.
|
|
||||||
func ManifestMediaTypes() (mediaTypes []string) {
|
|
||||||
for t := range mappings {
|
|
||||||
if t != "" {
|
|
||||||
mediaTypes = append(mediaTypes, t)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalFunc implements manifest unmarshalling a given MediaType
|
|
||||||
type UnmarshalFunc func([]byte) (Manifest, Descriptor, error)
|
|
||||||
|
|
||||||
var mappings = make(map[string]UnmarshalFunc, 0)
|
|
||||||
|
|
||||||
// UnmarshalManifest looks up manifest unmarshal functions based on
|
|
||||||
// MediaType
|
|
||||||
func UnmarshalManifest(ctHeader string, p []byte) (Manifest, Descriptor, error) {
|
|
||||||
// Need to look up by the actual media type, not the raw contents of
|
|
||||||
// the header. Strip semicolons and anything following them.
|
|
||||||
var mediatype string
|
|
||||||
if ctHeader != "" {
|
|
||||||
var err error
|
|
||||||
mediatype, _, err = mime.ParseMediaType(ctHeader)
|
|
||||||
if err != nil {
|
|
||||||
return nil, Descriptor{}, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
unmarshalFunc, ok := mappings[mediatype]
|
|
||||||
if !ok {
|
|
||||||
unmarshalFunc, ok = mappings[""]
|
|
||||||
if !ok {
|
|
||||||
return nil, Descriptor{}, fmt.Errorf("unsupported manifest mediatype and no default available: %s", mediatype)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return unmarshalFunc(p)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RegisterManifestSchema registers an UnmarshalFunc for a given schema type. This
|
|
||||||
// should be called from specific
|
|
||||||
func RegisterManifestSchema(mediatype string, u UnmarshalFunc) error {
|
|
||||||
if _, ok := mappings[mediatype]; ok {
|
|
||||||
return fmt.Errorf("manifest mediatype registration would overwrite existing: %s", mediatype)
|
|
||||||
}
|
|
||||||
mappings[mediatype] = u
|
|
||||||
return nil
|
|
||||||
}
|
|
370
vendor/github.com/docker/distribution/reference/reference.go
generated
vendored
370
vendor/github.com/docker/distribution/reference/reference.go
generated
vendored
|
@ -1,370 +0,0 @@
|
||||||
// Package reference provides a general type to represent any way of referencing images within the registry.
|
|
||||||
// Its main purpose is to abstract tags and digests (content-addressable hash).
|
|
||||||
//
|
|
||||||
// Grammar
|
|
||||||
//
|
|
||||||
// reference := name [ ":" tag ] [ "@" digest ]
|
|
||||||
// name := [hostname '/'] component ['/' component]*
|
|
||||||
// hostname := hostcomponent ['.' hostcomponent]* [':' port-number]
|
|
||||||
// hostcomponent := /([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])/
|
|
||||||
// port-number := /[0-9]+/
|
|
||||||
// component := alpha-numeric [separator alpha-numeric]*
|
|
||||||
// alpha-numeric := /[a-z0-9]+/
|
|
||||||
// separator := /[_.]|__|[-]*/
|
|
||||||
//
|
|
||||||
// tag := /[\w][\w.-]{0,127}/
|
|
||||||
//
|
|
||||||
// digest := digest-algorithm ":" digest-hex
|
|
||||||
// digest-algorithm := digest-algorithm-component [ digest-algorithm-separator digest-algorithm-component ]
|
|
||||||
// digest-algorithm-separator := /[+.-_]/
|
|
||||||
// digest-algorithm-component := /[A-Za-z][A-Za-z0-9]*/
|
|
||||||
// digest-hex := /[0-9a-fA-F]{32,}/ ; At least 128 bit digest value
|
|
||||||
package reference
|
|
||||||
|
|
||||||
import (
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/docker/distribution/digest"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
// NameTotalLengthMax is the maximum total number of characters in a repository name.
|
|
||||||
NameTotalLengthMax = 255
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// ErrReferenceInvalidFormat represents an error while trying to parse a string as a reference.
|
|
||||||
ErrReferenceInvalidFormat = errors.New("invalid reference format")
|
|
||||||
|
|
||||||
// ErrTagInvalidFormat represents an error while trying to parse a string as a tag.
|
|
||||||
ErrTagInvalidFormat = errors.New("invalid tag format")
|
|
||||||
|
|
||||||
// ErrDigestInvalidFormat represents an error while trying to parse a string as a tag.
|
|
||||||
ErrDigestInvalidFormat = errors.New("invalid digest format")
|
|
||||||
|
|
||||||
// ErrNameContainsUppercase is returned for invalid repository names that contain uppercase characters.
|
|
||||||
ErrNameContainsUppercase = errors.New("repository name must be lowercase")
|
|
||||||
|
|
||||||
// ErrNameEmpty is returned for empty, invalid repository names.
|
|
||||||
ErrNameEmpty = errors.New("repository name must have at least one component")
|
|
||||||
|
|
||||||
// ErrNameTooLong is returned when a repository name is longer than NameTotalLengthMax.
|
|
||||||
ErrNameTooLong = fmt.Errorf("repository name must not be more than %v characters", NameTotalLengthMax)
|
|
||||||
)
|
|
||||||
|
|
||||||
// Reference is an opaque object reference identifier that may include
|
|
||||||
// modifiers such as a hostname, name, tag, and digest.
|
|
||||||
type Reference interface {
|
|
||||||
// String returns the full reference
|
|
||||||
String() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Field provides a wrapper type for resolving correct reference types when
|
|
||||||
// working with encoding.
|
|
||||||
type Field struct {
|
|
||||||
reference Reference
|
|
||||||
}
|
|
||||||
|
|
||||||
// AsField wraps a reference in a Field for encoding.
|
|
||||||
func AsField(reference Reference) Field {
|
|
||||||
return Field{reference}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reference unwraps the reference type from the field to
|
|
||||||
// return the Reference object. This object should be
|
|
||||||
// of the appropriate type to further check for different
|
|
||||||
// reference types.
|
|
||||||
func (f Field) Reference() Reference {
|
|
||||||
return f.reference
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalText serializes the field to byte text which
|
|
||||||
// is the string of the reference.
|
|
||||||
func (f Field) MarshalText() (p []byte, err error) {
|
|
||||||
return []byte(f.reference.String()), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalText parses text bytes by invoking the
|
|
||||||
// reference parser to ensure the appropriately
|
|
||||||
// typed reference object is wrapped by field.
|
|
||||||
func (f *Field) UnmarshalText(p []byte) error {
|
|
||||||
r, err := Parse(string(p))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
f.reference = r
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Named is an object with a full name
|
|
||||||
type Named interface {
|
|
||||||
Reference
|
|
||||||
Name() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tagged is an object which has a tag
|
|
||||||
type Tagged interface {
|
|
||||||
Reference
|
|
||||||
Tag() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// NamedTagged is an object including a name and tag.
|
|
||||||
type NamedTagged interface {
|
|
||||||
Named
|
|
||||||
Tag() string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Digested is an object which has a digest
|
|
||||||
// in which it can be referenced by
|
|
||||||
type Digested interface {
|
|
||||||
Reference
|
|
||||||
Digest() digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
// Canonical reference is an object with a fully unique
|
|
||||||
// name including a name with hostname and digest
|
|
||||||
type Canonical interface {
|
|
||||||
Named
|
|
||||||
Digest() digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
// SplitHostname splits a named reference into a
|
|
||||||
// hostname and name string. If no valid hostname is
|
|
||||||
// found, the hostname is empty and the full value
|
|
||||||
// is returned as name
|
|
||||||
func SplitHostname(named Named) (string, string) {
|
|
||||||
name := named.Name()
|
|
||||||
match := anchoredNameRegexp.FindStringSubmatch(name)
|
|
||||||
if len(match) != 3 {
|
|
||||||
return "", name
|
|
||||||
}
|
|
||||||
return match[1], match[2]
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parse parses s and returns a syntactically valid Reference.
|
|
||||||
// If an error was encountered it is returned, along with a nil Reference.
|
|
||||||
// NOTE: Parse will not handle short digests.
|
|
||||||
func Parse(s string) (Reference, error) {
|
|
||||||
matches := ReferenceRegexp.FindStringSubmatch(s)
|
|
||||||
if matches == nil {
|
|
||||||
if s == "" {
|
|
||||||
return nil, ErrNameEmpty
|
|
||||||
}
|
|
||||||
if ReferenceRegexp.FindStringSubmatch(strings.ToLower(s)) != nil {
|
|
||||||
return nil, ErrNameContainsUppercase
|
|
||||||
}
|
|
||||||
return nil, ErrReferenceInvalidFormat
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(matches[1]) > NameTotalLengthMax {
|
|
||||||
return nil, ErrNameTooLong
|
|
||||||
}
|
|
||||||
|
|
||||||
ref := reference{
|
|
||||||
name: matches[1],
|
|
||||||
tag: matches[2],
|
|
||||||
}
|
|
||||||
if matches[3] != "" {
|
|
||||||
var err error
|
|
||||||
ref.digest, err = digest.ParseDigest(matches[3])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
r := getBestReferenceType(ref)
|
|
||||||
if r == nil {
|
|
||||||
return nil, ErrNameEmpty
|
|
||||||
}
|
|
||||||
|
|
||||||
return r, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseNamed parses s and returns a syntactically valid reference implementing
|
|
||||||
// the Named interface. The reference must have a name, otherwise an error is
|
|
||||||
// returned.
|
|
||||||
// If an error was encountered it is returned, along with a nil Reference.
|
|
||||||
// NOTE: ParseNamed will not handle short digests.
|
|
||||||
func ParseNamed(s string) (Named, error) {
|
|
||||||
ref, err := Parse(s)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
named, isNamed := ref.(Named)
|
|
||||||
if !isNamed {
|
|
||||||
return nil, fmt.Errorf("reference %s has no name", ref.String())
|
|
||||||
}
|
|
||||||
return named, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithName returns a named object representing the given string. If the input
|
|
||||||
// is invalid ErrReferenceInvalidFormat will be returned.
|
|
||||||
func WithName(name string) (Named, error) {
|
|
||||||
if len(name) > NameTotalLengthMax {
|
|
||||||
return nil, ErrNameTooLong
|
|
||||||
}
|
|
||||||
if !anchoredNameRegexp.MatchString(name) {
|
|
||||||
return nil, ErrReferenceInvalidFormat
|
|
||||||
}
|
|
||||||
return repository(name), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithTag combines the name from "name" and the tag from "tag" to form a
|
|
||||||
// reference incorporating both the name and the tag.
|
|
||||||
func WithTag(name Named, tag string) (NamedTagged, error) {
|
|
||||||
if !anchoredTagRegexp.MatchString(tag) {
|
|
||||||
return nil, ErrTagInvalidFormat
|
|
||||||
}
|
|
||||||
if canonical, ok := name.(Canonical); ok {
|
|
||||||
return reference{
|
|
||||||
name: name.Name(),
|
|
||||||
tag: tag,
|
|
||||||
digest: canonical.Digest(),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
return taggedReference{
|
|
||||||
name: name.Name(),
|
|
||||||
tag: tag,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithDigest combines the name from "name" and the digest from "digest" to form
|
|
||||||
// a reference incorporating both the name and the digest.
|
|
||||||
func WithDigest(name Named, digest digest.Digest) (Canonical, error) {
|
|
||||||
if !anchoredDigestRegexp.MatchString(digest.String()) {
|
|
||||||
return nil, ErrDigestInvalidFormat
|
|
||||||
}
|
|
||||||
if tagged, ok := name.(Tagged); ok {
|
|
||||||
return reference{
|
|
||||||
name: name.Name(),
|
|
||||||
tag: tagged.Tag(),
|
|
||||||
digest: digest,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
return canonicalReference{
|
|
||||||
name: name.Name(),
|
|
||||||
digest: digest,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Match reports whether ref matches the specified pattern.
|
|
||||||
// See https://godoc.org/path#Match for supported patterns.
|
|
||||||
func Match(pattern string, ref Reference) (bool, error) {
|
|
||||||
matched, err := path.Match(pattern, ref.String())
|
|
||||||
if namedRef, isNamed := ref.(Named); isNamed && !matched {
|
|
||||||
matched, _ = path.Match(pattern, namedRef.Name())
|
|
||||||
}
|
|
||||||
return matched, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// TrimNamed removes any tag or digest from the named reference.
|
|
||||||
func TrimNamed(ref Named) Named {
|
|
||||||
return repository(ref.Name())
|
|
||||||
}
|
|
||||||
|
|
||||||
func getBestReferenceType(ref reference) Reference {
|
|
||||||
if ref.name == "" {
|
|
||||||
// Allow digest only references
|
|
||||||
if ref.digest != "" {
|
|
||||||
return digestReference(ref.digest)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if ref.tag == "" {
|
|
||||||
if ref.digest != "" {
|
|
||||||
return canonicalReference{
|
|
||||||
name: ref.name,
|
|
||||||
digest: ref.digest,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return repository(ref.name)
|
|
||||||
}
|
|
||||||
if ref.digest == "" {
|
|
||||||
return taggedReference{
|
|
||||||
name: ref.name,
|
|
||||||
tag: ref.tag,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ref
|
|
||||||
}
|
|
||||||
|
|
||||||
type reference struct {
|
|
||||||
name string
|
|
||||||
tag string
|
|
||||||
digest digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r reference) String() string {
|
|
||||||
return r.name + ":" + r.tag + "@" + r.digest.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r reference) Name() string {
|
|
||||||
return r.name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r reference) Tag() string {
|
|
||||||
return r.tag
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r reference) Digest() digest.Digest {
|
|
||||||
return r.digest
|
|
||||||
}
|
|
||||||
|
|
||||||
type repository string
|
|
||||||
|
|
||||||
func (r repository) String() string {
|
|
||||||
return string(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r repository) Name() string {
|
|
||||||
return string(r)
|
|
||||||
}
|
|
||||||
|
|
||||||
type digestReference digest.Digest
|
|
||||||
|
|
||||||
func (d digestReference) String() string {
|
|
||||||
return d.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (d digestReference) Digest() digest.Digest {
|
|
||||||
return digest.Digest(d)
|
|
||||||
}
|
|
||||||
|
|
||||||
type taggedReference struct {
|
|
||||||
name string
|
|
||||||
tag string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t taggedReference) String() string {
|
|
||||||
return t.name + ":" + t.tag
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t taggedReference) Name() string {
|
|
||||||
return t.name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t taggedReference) Tag() string {
|
|
||||||
return t.tag
|
|
||||||
}
|
|
||||||
|
|
||||||
type canonicalReference struct {
|
|
||||||
name string
|
|
||||||
digest digest.Digest
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c canonicalReference) String() string {
|
|
||||||
return c.name + "@" + c.digest.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c canonicalReference) Name() string {
|
|
||||||
return c.name
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c canonicalReference) Digest() digest.Digest {
|
|
||||||
return c.digest
|
|
||||||
}
|
|
661
vendor/github.com/docker/distribution/reference/reference_test.go
generated
vendored
661
vendor/github.com/docker/distribution/reference/reference_test.go
generated
vendored
|
@ -1,661 +0,0 @@
|
||||||
package reference
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/docker/distribution/digest"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestReferenceParse(t *testing.T) {
|
|
||||||
// referenceTestcases is a unified set of testcases for
|
|
||||||
// testing the parsing of references
|
|
||||||
referenceTestcases := []struct {
|
|
||||||
// input is the repository name or name component testcase
|
|
||||||
input string
|
|
||||||
// err is the error expected from Parse, or nil
|
|
||||||
err error
|
|
||||||
// repository is the string representation for the reference
|
|
||||||
repository string
|
|
||||||
// hostname is the hostname expected in the reference
|
|
||||||
hostname string
|
|
||||||
// tag is the tag for the reference
|
|
||||||
tag string
|
|
||||||
// digest is the digest for the reference (enforces digest reference)
|
|
||||||
digest string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
input: "test_com",
|
|
||||||
repository: "test_com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com:tag",
|
|
||||||
repository: "test.com",
|
|
||||||
tag: "tag",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com:5000",
|
|
||||||
repository: "test.com",
|
|
||||||
tag: "5000",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com/repo:tag",
|
|
||||||
hostname: "test.com",
|
|
||||||
repository: "test.com/repo",
|
|
||||||
tag: "tag",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:5000/repo",
|
|
||||||
hostname: "test:5000",
|
|
||||||
repository: "test:5000/repo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:5000/repo:tag",
|
|
||||||
hostname: "test:5000",
|
|
||||||
repository: "test:5000/repo",
|
|
||||||
tag: "tag",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:5000/repo@sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
hostname: "test:5000",
|
|
||||||
repository: "test:5000/repo",
|
|
||||||
digest: "sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:5000/repo:tag@sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
hostname: "test:5000",
|
|
||||||
repository: "test:5000/repo",
|
|
||||||
tag: "tag",
|
|
||||||
digest: "sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:5000/repo",
|
|
||||||
hostname: "test:5000",
|
|
||||||
repository: "test:5000/repo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "",
|
|
||||||
err: ErrNameEmpty,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: ":justtag",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "@sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "repo@sha256:ffffffffffffffffffffffffffffffffff",
|
|
||||||
err: digest.ErrDigestInvalidLength,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "validname@invaliddigest:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
err: digest.ErrDigestUnsupported,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "Uppercase:tag",
|
|
||||||
err: ErrNameContainsUppercase,
|
|
||||||
},
|
|
||||||
// FIXME "Uppercase" is incorrectly handled as a domain-name here, therefore passes.
|
|
||||||
// See https://github.com/docker/distribution/pull/1778, and https://github.com/docker/docker/pull/20175
|
|
||||||
//{
|
|
||||||
// input: "Uppercase/lowercase:tag",
|
|
||||||
// err: ErrNameContainsUppercase,
|
|
||||||
//},
|
|
||||||
{
|
|
||||||
input: "test:5000/Uppercase/lowercase:tag",
|
|
||||||
err: ErrNameContainsUppercase,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "lowercase:Uppercase",
|
|
||||||
repository: "lowercase",
|
|
||||||
tag: "Uppercase",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: strings.Repeat("a/", 128) + "a:tag",
|
|
||||||
err: ErrNameTooLong,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: strings.Repeat("a/", 127) + "a:tag-puts-this-over-max",
|
|
||||||
hostname: "a",
|
|
||||||
repository: strings.Repeat("a/", 127) + "a",
|
|
||||||
tag: "tag-puts-this-over-max",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa/asdf$$^/aa",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "sub-dom1.foo.com/bar/baz/quux",
|
|
||||||
hostname: "sub-dom1.foo.com",
|
|
||||||
repository: "sub-dom1.foo.com/bar/baz/quux",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "sub-dom1.foo.com/bar/baz/quux:some-long-tag",
|
|
||||||
hostname: "sub-dom1.foo.com",
|
|
||||||
repository: "sub-dom1.foo.com/bar/baz/quux",
|
|
||||||
tag: "some-long-tag",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "b.gcr.io/test.example.com/my-app:test.example.com",
|
|
||||||
hostname: "b.gcr.io",
|
|
||||||
repository: "b.gcr.io/test.example.com/my-app",
|
|
||||||
tag: "test.example.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--n3h.com/myimage:xn--n3h.com", // ☃.com in punycode
|
|
||||||
hostname: "xn--n3h.com",
|
|
||||||
repository: "xn--n3h.com/myimage",
|
|
||||||
tag: "xn--n3h.com",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--7o8h.com/myimage:xn--7o8h.com@sha512:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", // 🐳.com in punycode
|
|
||||||
hostname: "xn--7o8h.com",
|
|
||||||
repository: "xn--7o8h.com/myimage",
|
|
||||||
tag: "xn--7o8h.com",
|
|
||||||
digest: "sha512:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo_bar.com:8080",
|
|
||||||
repository: "foo_bar.com",
|
|
||||||
tag: "8080",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo/foo_bar.com:8080",
|
|
||||||
hostname: "foo",
|
|
||||||
repository: "foo/foo_bar.com",
|
|
||||||
tag: "8080",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range referenceTestcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.input)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
repo, err := Parse(testcase.input)
|
|
||||||
if testcase.err != nil {
|
|
||||||
if err == nil {
|
|
||||||
failf("missing expected error: %v", testcase.err)
|
|
||||||
} else if testcase.err != err {
|
|
||||||
failf("mismatched error: got %v, expected %v", err, testcase.err)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
} else if err != nil {
|
|
||||||
failf("unexpected parse error: %v", err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if repo.String() != testcase.input {
|
|
||||||
failf("mismatched repo: got %q, expected %q", repo.String(), testcase.input)
|
|
||||||
}
|
|
||||||
|
|
||||||
if named, ok := repo.(Named); ok {
|
|
||||||
if named.Name() != testcase.repository {
|
|
||||||
failf("unexpected repository: got %q, expected %q", named.Name(), testcase.repository)
|
|
||||||
}
|
|
||||||
hostname, _ := SplitHostname(named)
|
|
||||||
if hostname != testcase.hostname {
|
|
||||||
failf("unexpected hostname: got %q, expected %q", hostname, testcase.hostname)
|
|
||||||
}
|
|
||||||
} else if testcase.repository != "" || testcase.hostname != "" {
|
|
||||||
failf("expected named type, got %T", repo)
|
|
||||||
}
|
|
||||||
|
|
||||||
tagged, ok := repo.(Tagged)
|
|
||||||
if testcase.tag != "" {
|
|
||||||
if ok {
|
|
||||||
if tagged.Tag() != testcase.tag {
|
|
||||||
failf("unexpected tag: got %q, expected %q", tagged.Tag(), testcase.tag)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
failf("expected tagged type, got %T", repo)
|
|
||||||
}
|
|
||||||
} else if ok {
|
|
||||||
failf("unexpected tagged type")
|
|
||||||
}
|
|
||||||
|
|
||||||
digested, ok := repo.(Digested)
|
|
||||||
if testcase.digest != "" {
|
|
||||||
if ok {
|
|
||||||
if digested.Digest().String() != testcase.digest {
|
|
||||||
failf("unexpected digest: got %q, expected %q", digested.Digest().String(), testcase.digest)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
failf("expected digested type, got %T", repo)
|
|
||||||
}
|
|
||||||
} else if ok {
|
|
||||||
failf("unexpected digested type")
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// TestWithNameFailure tests cases where WithName should fail. Cases where it
|
|
||||||
// should succeed are covered by TestSplitHostname, below.
|
|
||||||
func TestWithNameFailure(t *testing.T) {
|
|
||||||
testcases := []struct {
|
|
||||||
input string
|
|
||||||
err error
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
input: "",
|
|
||||||
err: ErrNameEmpty,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: ":justtag",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "@sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "validname@invaliddigest:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: strings.Repeat("a/", 128) + "a:tag",
|
|
||||||
err: ErrNameTooLong,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa/asdf$$^/aa",
|
|
||||||
err: ErrReferenceInvalidFormat,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range testcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.input)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := WithName(testcase.input)
|
|
||||||
if err == nil {
|
|
||||||
failf("no error parsing name. expected: %s", testcase.err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSplitHostname(t *testing.T) {
|
|
||||||
testcases := []struct {
|
|
||||||
input string
|
|
||||||
hostname string
|
|
||||||
name string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
input: "test.com/foo",
|
|
||||||
hostname: "test.com",
|
|
||||||
name: "foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test_com/foo",
|
|
||||||
hostname: "",
|
|
||||||
name: "test_com/foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test:8080/foo",
|
|
||||||
hostname: "test:8080",
|
|
||||||
name: "foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com:8080/foo",
|
|
||||||
hostname: "test.com:8080",
|
|
||||||
name: "foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test-com:8080/foo",
|
|
||||||
hostname: "test-com:8080",
|
|
||||||
name: "foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--n3h.com:18080/foo",
|
|
||||||
hostname: "xn--n3h.com:18080",
|
|
||||||
name: "foo",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range testcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.input)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
named, err := WithName(testcase.input)
|
|
||||||
if err != nil {
|
|
||||||
failf("error parsing name: %s", err)
|
|
||||||
}
|
|
||||||
hostname, name := SplitHostname(named)
|
|
||||||
if hostname != testcase.hostname {
|
|
||||||
failf("unexpected hostname: got %q, expected %q", hostname, testcase.hostname)
|
|
||||||
}
|
|
||||||
if name != testcase.name {
|
|
||||||
failf("unexpected name: got %q, expected %q", name, testcase.name)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type serializationType struct {
|
|
||||||
Description string
|
|
||||||
Field Field
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSerialization(t *testing.T) {
|
|
||||||
testcases := []struct {
|
|
||||||
description string
|
|
||||||
input string
|
|
||||||
name string
|
|
||||||
tag string
|
|
||||||
digest string
|
|
||||||
err error
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
description: "empty value",
|
|
||||||
err: ErrNameEmpty,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
description: "just a name",
|
|
||||||
input: "example.com:8000/named",
|
|
||||||
name: "example.com:8000/named",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
description: "name with a tag",
|
|
||||||
input: "example.com:8000/named:tagged",
|
|
||||||
name: "example.com:8000/named",
|
|
||||||
tag: "tagged",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
description: "name with digest",
|
|
||||||
input: "other.com/named@sha256:1234567890098765432112345667890098765432112345667890098765432112",
|
|
||||||
name: "other.com/named",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765432112345667890098765432112",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range testcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.input)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
m := map[string]string{
|
|
||||||
"Description": testcase.description,
|
|
||||||
"Field": testcase.input,
|
|
||||||
}
|
|
||||||
b, err := json.Marshal(m)
|
|
||||||
if err != nil {
|
|
||||||
failf("error marshalling: %v", err)
|
|
||||||
}
|
|
||||||
t := serializationType{}
|
|
||||||
|
|
||||||
if err := json.Unmarshal(b, &t); err != nil {
|
|
||||||
if testcase.err == nil {
|
|
||||||
failf("error unmarshalling: %v", err)
|
|
||||||
}
|
|
||||||
if err != testcase.err {
|
|
||||||
failf("wrong error, expected %v, got %v", testcase.err, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
continue
|
|
||||||
} else if testcase.err != nil {
|
|
||||||
failf("expected error unmarshalling: %v", testcase.err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Description != testcase.description {
|
|
||||||
failf("wrong description, expected %q, got %q", testcase.description, t.Description)
|
|
||||||
}
|
|
||||||
|
|
||||||
ref := t.Field.Reference()
|
|
||||||
|
|
||||||
if named, ok := ref.(Named); ok {
|
|
||||||
if named.Name() != testcase.name {
|
|
||||||
failf("unexpected repository: got %q, expected %q", named.Name(), testcase.name)
|
|
||||||
}
|
|
||||||
} else if testcase.name != "" {
|
|
||||||
failf("expected named type, got %T", ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
tagged, ok := ref.(Tagged)
|
|
||||||
if testcase.tag != "" {
|
|
||||||
if ok {
|
|
||||||
if tagged.Tag() != testcase.tag {
|
|
||||||
failf("unexpected tag: got %q, expected %q", tagged.Tag(), testcase.tag)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
failf("expected tagged type, got %T", ref)
|
|
||||||
}
|
|
||||||
} else if ok {
|
|
||||||
failf("unexpected tagged type")
|
|
||||||
}
|
|
||||||
|
|
||||||
digested, ok := ref.(Digested)
|
|
||||||
if testcase.digest != "" {
|
|
||||||
if ok {
|
|
||||||
if digested.Digest().String() != testcase.digest {
|
|
||||||
failf("unexpected digest: got %q, expected %q", digested.Digest().String(), testcase.digest)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
failf("expected digested type, got %T", ref)
|
|
||||||
}
|
|
||||||
} else if ok {
|
|
||||||
failf("unexpected digested type")
|
|
||||||
}
|
|
||||||
|
|
||||||
t = serializationType{
|
|
||||||
Description: testcase.description,
|
|
||||||
Field: AsField(ref),
|
|
||||||
}
|
|
||||||
|
|
||||||
b2, err := json.Marshal(t)
|
|
||||||
if err != nil {
|
|
||||||
failf("error marshing serialization type: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if string(b) != string(b2) {
|
|
||||||
failf("unexpected serialized value: expected %q, got %q", string(b), string(b2))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Ensure t.Field is not implementing "Reference" directly, getting
|
|
||||||
// around the Reference type system
|
|
||||||
var fieldInterface interface{} = t.Field
|
|
||||||
if _, ok := fieldInterface.(Reference); ok {
|
|
||||||
failf("field should not implement Reference interface")
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWithTag(t *testing.T) {
|
|
||||||
testcases := []struct {
|
|
||||||
name string
|
|
||||||
digest digest.Digest
|
|
||||||
tag string
|
|
||||||
combined string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "test.com/foo",
|
|
||||||
tag: "tag",
|
|
||||||
combined: "test.com/foo:tag",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "foo",
|
|
||||||
tag: "tag2",
|
|
||||||
combined: "foo:tag2",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test.com:8000/foo",
|
|
||||||
tag: "tag4",
|
|
||||||
combined: "test.com:8000/foo:tag4",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test.com:8000/foo",
|
|
||||||
tag: "TAG5",
|
|
||||||
combined: "test.com:8000/foo:TAG5",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test.com:8000/foo",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765",
|
|
||||||
tag: "TAG5",
|
|
||||||
combined: "test.com:8000/foo:TAG5@sha256:1234567890098765432112345667890098765",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range testcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.name)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
named, err := WithName(testcase.name)
|
|
||||||
if err != nil {
|
|
||||||
failf("error parsing name: %s", err)
|
|
||||||
}
|
|
||||||
if testcase.digest != "" {
|
|
||||||
canonical, err := WithDigest(named, testcase.digest)
|
|
||||||
if err != nil {
|
|
||||||
failf("error adding digest")
|
|
||||||
}
|
|
||||||
named = canonical
|
|
||||||
}
|
|
||||||
|
|
||||||
tagged, err := WithTag(named, testcase.tag)
|
|
||||||
if err != nil {
|
|
||||||
failf("WithTag failed: %s", err)
|
|
||||||
}
|
|
||||||
if tagged.String() != testcase.combined {
|
|
||||||
failf("unexpected: got %q, expected %q", tagged.String(), testcase.combined)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWithDigest(t *testing.T) {
|
|
||||||
testcases := []struct {
|
|
||||||
name string
|
|
||||||
digest digest.Digest
|
|
||||||
tag string
|
|
||||||
combined string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "test.com/foo",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765",
|
|
||||||
combined: "test.com/foo@sha256:1234567890098765432112345667890098765",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "foo",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765",
|
|
||||||
combined: "foo@sha256:1234567890098765432112345667890098765",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test.com:8000/foo",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765",
|
|
||||||
combined: "test.com:8000/foo@sha256:1234567890098765432112345667890098765",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "test.com:8000/foo",
|
|
||||||
digest: "sha256:1234567890098765432112345667890098765",
|
|
||||||
tag: "latest",
|
|
||||||
combined: "test.com:8000/foo:latest@sha256:1234567890098765432112345667890098765",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, testcase := range testcases {
|
|
||||||
failf := func(format string, v ...interface{}) {
|
|
||||||
t.Logf(strconv.Quote(testcase.name)+": "+format, v...)
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
|
|
||||||
named, err := WithName(testcase.name)
|
|
||||||
if err != nil {
|
|
||||||
failf("error parsing name: %s", err)
|
|
||||||
}
|
|
||||||
if testcase.tag != "" {
|
|
||||||
tagged, err := WithTag(named, testcase.tag)
|
|
||||||
if err != nil {
|
|
||||||
failf("error adding tag")
|
|
||||||
}
|
|
||||||
named = tagged
|
|
||||||
}
|
|
||||||
digested, err := WithDigest(named, testcase.digest)
|
|
||||||
if err != nil {
|
|
||||||
failf("WithDigest failed: %s", err)
|
|
||||||
}
|
|
||||||
if digested.String() != testcase.combined {
|
|
||||||
failf("unexpected: got %q, expected %q", digested.String(), testcase.combined)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMatchError(t *testing.T) {
|
|
||||||
named, err := Parse("foo")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
_, err = Match("[-x]", named)
|
|
||||||
if err == nil {
|
|
||||||
t.Fatalf("expected an error, got nothing")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMatch(t *testing.T) {
|
|
||||||
matchCases := []struct {
|
|
||||||
reference string
|
|
||||||
pattern string
|
|
||||||
expected bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
reference: "foo",
|
|
||||||
pattern: "foo/**/ba[rz]",
|
|
||||||
expected: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/any/bat",
|
|
||||||
pattern: "foo/**/ba[rz]",
|
|
||||||
expected: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/a/bar",
|
|
||||||
pattern: "foo/**/ba[rz]",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/b/baz",
|
|
||||||
pattern: "foo/**/ba[rz]",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/c/baz:tag",
|
|
||||||
pattern: "foo/**/ba[rz]",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/c/baz:tag",
|
|
||||||
pattern: "foo/*/baz:tag",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "foo/c/baz:tag",
|
|
||||||
pattern: "foo/c/baz:tag",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "example.com/foo/c/baz:tag",
|
|
||||||
pattern: "*/foo/c/baz",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
reference: "example.com/foo/c/baz:tag",
|
|
||||||
pattern: "example.com/foo/c/baz",
|
|
||||||
expected: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, c := range matchCases {
|
|
||||||
named, err := Parse(c.reference)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
actual, err := Match(c.pattern, named)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
if actual != c.expected {
|
|
||||||
t.Fatalf("expected %s match %s to be %v, was %v", c.reference, c.pattern, c.expected, actual)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
124
vendor/github.com/docker/distribution/reference/regexp.go
generated
vendored
124
vendor/github.com/docker/distribution/reference/regexp.go
generated
vendored
|
@ -1,124 +0,0 @@
|
||||||
package reference
|
|
||||||
|
|
||||||
import "regexp"
|
|
||||||
|
|
||||||
var (
|
|
||||||
// alphaNumericRegexp defines the alpha numeric atom, typically a
|
|
||||||
// component of names. This only allows lower case characters and digits.
|
|
||||||
alphaNumericRegexp = match(`[a-z0-9]+`)
|
|
||||||
|
|
||||||
// separatorRegexp defines the separators allowed to be embedded in name
|
|
||||||
// components. This allow one period, one or two underscore and multiple
|
|
||||||
// dashes.
|
|
||||||
separatorRegexp = match(`(?:[._]|__|[-]*)`)
|
|
||||||
|
|
||||||
// nameComponentRegexp restricts registry path component names to start
|
|
||||||
// with at least one letter or number, with following parts able to be
|
|
||||||
// separated by one period, one or two underscore and multiple dashes.
|
|
||||||
nameComponentRegexp = expression(
|
|
||||||
alphaNumericRegexp,
|
|
||||||
optional(repeated(separatorRegexp, alphaNumericRegexp)))
|
|
||||||
|
|
||||||
// hostnameComponentRegexp restricts the registry hostname component of a
|
|
||||||
// repository name to start with a component as defined by hostnameRegexp
|
|
||||||
// and followed by an optional port.
|
|
||||||
hostnameComponentRegexp = match(`(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9-]*[a-zA-Z0-9])`)
|
|
||||||
|
|
||||||
// hostnameRegexp defines the structure of potential hostname components
|
|
||||||
// that may be part of image names. This is purposely a subset of what is
|
|
||||||
// allowed by DNS to ensure backwards compatibility with Docker image
|
|
||||||
// names.
|
|
||||||
hostnameRegexp = expression(
|
|
||||||
hostnameComponentRegexp,
|
|
||||||
optional(repeated(literal(`.`), hostnameComponentRegexp)),
|
|
||||||
optional(literal(`:`), match(`[0-9]+`)))
|
|
||||||
|
|
||||||
// TagRegexp matches valid tag names. From docker/docker:graph/tags.go.
|
|
||||||
TagRegexp = match(`[\w][\w.-]{0,127}`)
|
|
||||||
|
|
||||||
// anchoredTagRegexp matches valid tag names, anchored at the start and
|
|
||||||
// end of the matched string.
|
|
||||||
anchoredTagRegexp = anchored(TagRegexp)
|
|
||||||
|
|
||||||
// DigestRegexp matches valid digests.
|
|
||||||
DigestRegexp = match(`[A-Za-z][A-Za-z0-9]*(?:[-_+.][A-Za-z][A-Za-z0-9]*)*[:][[:xdigit:]]{32,}`)
|
|
||||||
|
|
||||||
// anchoredDigestRegexp matches valid digests, anchored at the start and
|
|
||||||
// end of the matched string.
|
|
||||||
anchoredDigestRegexp = anchored(DigestRegexp)
|
|
||||||
|
|
||||||
// NameRegexp is the format for the name component of references. The
|
|
||||||
// regexp has capturing groups for the hostname and name part omitting
|
|
||||||
// the separating forward slash from either.
|
|
||||||
NameRegexp = expression(
|
|
||||||
optional(hostnameRegexp, literal(`/`)),
|
|
||||||
nameComponentRegexp,
|
|
||||||
optional(repeated(literal(`/`), nameComponentRegexp)))
|
|
||||||
|
|
||||||
// anchoredNameRegexp is used to parse a name value, capturing the
|
|
||||||
// hostname and trailing components.
|
|
||||||
anchoredNameRegexp = anchored(
|
|
||||||
optional(capture(hostnameRegexp), literal(`/`)),
|
|
||||||
capture(nameComponentRegexp,
|
|
||||||
optional(repeated(literal(`/`), nameComponentRegexp))))
|
|
||||||
|
|
||||||
// ReferenceRegexp is the full supported format of a reference. The regexp
|
|
||||||
// is anchored and has capturing groups for name, tag, and digest
|
|
||||||
// components.
|
|
||||||
ReferenceRegexp = anchored(capture(NameRegexp),
|
|
||||||
optional(literal(":"), capture(TagRegexp)),
|
|
||||||
optional(literal("@"), capture(DigestRegexp)))
|
|
||||||
)
|
|
||||||
|
|
||||||
// match compiles the string to a regular expression.
|
|
||||||
var match = regexp.MustCompile
|
|
||||||
|
|
||||||
// literal compiles s into a literal regular expression, escaping any regexp
|
|
||||||
// reserved characters.
|
|
||||||
func literal(s string) *regexp.Regexp {
|
|
||||||
re := match(regexp.QuoteMeta(s))
|
|
||||||
|
|
||||||
if _, complete := re.LiteralPrefix(); !complete {
|
|
||||||
panic("must be a literal")
|
|
||||||
}
|
|
||||||
|
|
||||||
return re
|
|
||||||
}
|
|
||||||
|
|
||||||
// expression defines a full expression, where each regular expression must
|
|
||||||
// follow the previous.
|
|
||||||
func expression(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
var s string
|
|
||||||
for _, re := range res {
|
|
||||||
s += re.String()
|
|
||||||
}
|
|
||||||
|
|
||||||
return match(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
// optional wraps the expression in a non-capturing group and makes the
|
|
||||||
// production optional.
|
|
||||||
func optional(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
return match(group(expression(res...)).String() + `?`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// repeated wraps the regexp in a non-capturing group to get one or more
|
|
||||||
// matches.
|
|
||||||
func repeated(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
return match(group(expression(res...)).String() + `+`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// group wraps the regexp in a non-capturing group.
|
|
||||||
func group(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
return match(`(?:` + expression(res...).String() + `)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// capture wraps the expression in a capturing group.
|
|
||||||
func capture(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
return match(`(` + expression(res...).String() + `)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// anchored anchors the regular expression by adding start and end delimiters.
|
|
||||||
func anchored(res ...*regexp.Regexp) *regexp.Regexp {
|
|
||||||
return match(`^` + expression(res...).String() + `$`)
|
|
||||||
}
|
|
489
vendor/github.com/docker/distribution/reference/regexp_test.go
generated
vendored
489
vendor/github.com/docker/distribution/reference/regexp_test.go
generated
vendored
|
@ -1,489 +0,0 @@
|
||||||
package reference
|
|
||||||
|
|
||||||
import (
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
type regexpMatch struct {
|
|
||||||
input string
|
|
||||||
match bool
|
|
||||||
subs []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkRegexp(t *testing.T, r *regexp.Regexp, m regexpMatch) {
|
|
||||||
matches := r.FindStringSubmatch(m.input)
|
|
||||||
if m.match && matches != nil {
|
|
||||||
if len(matches) != (r.NumSubexp()+1) || matches[0] != m.input {
|
|
||||||
t.Fatalf("Bad match result %#v for %q", matches, m.input)
|
|
||||||
}
|
|
||||||
if len(matches) < (len(m.subs) + 1) {
|
|
||||||
t.Errorf("Expected %d sub matches, only have %d for %q", len(m.subs), len(matches)-1, m.input)
|
|
||||||
}
|
|
||||||
for i := range m.subs {
|
|
||||||
if m.subs[i] != matches[i+1] {
|
|
||||||
t.Errorf("Unexpected submatch %d: %q, expected %q for %q", i+1, matches[i+1], m.subs[i], m.input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else if m.match {
|
|
||||||
t.Errorf("Expected match for %q", m.input)
|
|
||||||
} else if matches != nil {
|
|
||||||
t.Errorf("Unexpected match for %q", m.input)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHostRegexp(t *testing.T) {
|
|
||||||
hostcases := []regexpMatch{
|
|
||||||
{
|
|
||||||
input: "test.com",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com:10304",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "test.com:http",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "localhost",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "localhost:8080",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a.b",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "ab.cd.com",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a-b.com",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "-ab.com",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "ab-.com",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "ab.c-om",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "ab.-com",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "ab.com-",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "0101.com",
|
|
||||||
match: true, // TODO(dmcgowan): valid if this should be allowed
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "001a.com",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "b.gbc.io:443",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "b.gbc.io",
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--n3h.com", // ☃.com in punycode
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "Asdf.com", // uppercase character
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
r := regexp.MustCompile(`^` + hostnameRegexp.String() + `$`)
|
|
||||||
for i := range hostcases {
|
|
||||||
checkRegexp(t, r, hostcases[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFullNameRegexp(t *testing.T) {
|
|
||||||
if anchoredNameRegexp.NumSubexp() != 2 {
|
|
||||||
t.Fatalf("anchored name regexp should have two submatches: %v, %v != 2",
|
|
||||||
anchoredNameRegexp, anchoredNameRegexp.NumSubexp())
|
|
||||||
}
|
|
||||||
|
|
||||||
testcases := []regexpMatch{
|
|
||||||
{
|
|
||||||
input: "",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "short",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "short"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "simple/name",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"simple", "name"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "library/ubuntu",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"library", "ubuntu"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "docker/stevvooe/app",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"docker", "stevvooe/app"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa/aa/aa/aa/aa/aa/aa/aa/aa/bb/bb/bb/bb/bb/bb",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"aa", "aa/aa/aa/aa/aa/aa/aa/aa/bb/bb/bb/bb/bb/bb"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa/aa/bb/bb/bb",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"aa", "aa/bb/bb/bb"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a/a/a/a",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"a", "a/a/a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a/a/a/a/",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a//a/a",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a/aa",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"a", "aa"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a/aa/a",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"a", "aa/a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "foo.com"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com/",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com:8080/bar",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"foo.com:8080", "bar"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com:http/bar",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com/bar",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"foo.com", "bar"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com/bar/baz",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"foo.com", "bar/baz"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "localhost:8080/bar",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"localhost:8080", "bar"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "sub-dom1.foo.com/bar/baz/quux",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"sub-dom1.foo.com", "bar/baz/quux"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "blog.foo.com/bar/baz",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"blog.foo.com", "bar/baz"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a^a",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa/asdf$$^/aa",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "asdf$$^/aa",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "aa-a/a",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"aa-a", "a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: strings.Repeat("a/", 128) + "a",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"a", strings.Repeat("a/", 127) + "a"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "a-/a/a/a",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com/a-/a/a",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "-foo/bar",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo/bar-",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo-/bar",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo/-bar",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "_foo/bar",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo_bar",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "foo_bar"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo_bar.com",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "foo_bar.com"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo_bar.com:8080",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo_bar.com:8080/app",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "foo.com/foo_bar",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"foo.com", "foo_bar"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "____/____",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "_docker/_docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "docker_/docker_",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "b.gcr.io/test.example.com/my-app",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"b.gcr.io", "test.example.com/my-app"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--n3h.com/myimage", // ☃.com in punycode
|
|
||||||
match: true,
|
|
||||||
subs: []string{"xn--n3h.com", "myimage"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "xn--7o8h.com/myimage", // 🐳.com in punycode
|
|
||||||
match: true,
|
|
||||||
subs: []string{"xn--7o8h.com", "myimage"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/xn--7o8h.com/myimage", // 🐳.com in punycode
|
|
||||||
match: true,
|
|
||||||
subs: []string{"example.com", "xn--7o8h.com/myimage"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/some_separator__underscore/myimage",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"example.com", "some_separator__underscore/myimage"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/__underscore/myimage",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/..dots/myimage",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/.dots/myimage",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/nodouble..dots/myimage",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "example.com/nodouble..dots/myimage",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "docker./docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: ".docker/docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "docker-/docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "-docker/docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "do..cker/docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "do__cker:8080/docker",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "do__cker/docker",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"", "do__cker/docker"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "b.gcr.io/test.example.com/my-app",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"b.gcr.io", "test.example.com/my-app"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.io/foo/project--id.module--name.ver---sion--name",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"registry.io", "foo/project--id.module--name.ver---sion--name"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "Asdf.com/foo/bar", // uppercase character in hostname
|
|
||||||
match: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "Foo/FarB", // uppercase characters in remote name
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for i := range testcases {
|
|
||||||
checkRegexp(t, anchoredNameRegexp, testcases[i])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReferenceRegexp(t *testing.T) {
|
|
||||||
if ReferenceRegexp.NumSubexp() != 3 {
|
|
||||||
t.Fatalf("anchored name regexp should have three submatches: %v, %v != 3",
|
|
||||||
ReferenceRegexp, ReferenceRegexp.NumSubexp())
|
|
||||||
}
|
|
||||||
|
|
||||||
testcases := []regexpMatch{
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp:tag",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"registry.com:8080/myapp", "tag", ""},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"registry.com:8080/myapp", "", "sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp:tag2@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"registry.com:8080/myapp", "tag2", "sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp@sha256:badbadbadbad",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp:invalid~tag",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "bad_hostname.com:8080/myapp:tag",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input:// localhost treated as name, missing tag with 8080 as tag
|
|
||||||
"localhost:8080@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"localhost", "8080", "sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "localhost:8080/name@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"localhost:8080/name", "", "sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "localhost:http/name@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
// localhost will be treated as an image name without a host
|
|
||||||
input: "localhost@sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912",
|
|
||||||
match: true,
|
|
||||||
subs: []string{"localhost", "", "sha256:be178c0543eb17f5f3043021c9e5fcf30285e557a4fc309cce97ff9ca6182912"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp@bad",
|
|
||||||
match: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
input: "registry.com:8080/myapp@2bad",
|
|
||||||
match: false, // TODO(dmcgowan): Support this as valid
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i := range testcases {
|
|
||||||
checkRegexp(t, ReferenceRegexp, testcases[i])
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
97
vendor/github.com/docker/distribution/registry.go
generated
vendored
97
vendor/github.com/docker/distribution/registry.go
generated
vendored
|
@ -1,97 +0,0 @@
|
||||||
package distribution
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/docker/distribution/context"
|
|
||||||
"github.com/docker/distribution/reference"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Scope defines the set of items that match a namespace.
|
|
||||||
type Scope interface {
|
|
||||||
// Contains returns true if the name belongs to the namespace.
|
|
||||||
Contains(name string) bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type fullScope struct{}
|
|
||||||
|
|
||||||
func (f fullScope) Contains(string) bool {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// GlobalScope represents the full namespace scope which contains
|
|
||||||
// all other scopes.
|
|
||||||
var GlobalScope = Scope(fullScope{})
|
|
||||||
|
|
||||||
// Namespace represents a collection of repositories, addressable by name.
|
|
||||||
// Generally, a namespace is backed by a set of one or more services,
|
|
||||||
// providing facilities such as registry access, trust, and indexing.
|
|
||||||
type Namespace interface {
|
|
||||||
// Scope describes the names that can be used with this Namespace. The
|
|
||||||
// global namespace will have a scope that matches all names. The scope
|
|
||||||
// effectively provides an identity for the namespace.
|
|
||||||
Scope() Scope
|
|
||||||
|
|
||||||
// Repository should return a reference to the named repository. The
|
|
||||||
// registry may or may not have the repository but should always return a
|
|
||||||
// reference.
|
|
||||||
Repository(ctx context.Context, name reference.Named) (Repository, error)
|
|
||||||
|
|
||||||
// Repositories fills 'repos' with a lexigraphically sorted catalog of repositories
|
|
||||||
// up to the size of 'repos' and returns the value 'n' for the number of entries
|
|
||||||
// which were filled. 'last' contains an offset in the catalog, and 'err' will be
|
|
||||||
// set to io.EOF if there are no more entries to obtain.
|
|
||||||
Repositories(ctx context.Context, repos []string, last string) (n int, err error)
|
|
||||||
|
|
||||||
// Blobs returns a blob enumerator to access all blobs
|
|
||||||
Blobs() BlobEnumerator
|
|
||||||
|
|
||||||
// BlobStatter returns a BlobStatter to control
|
|
||||||
BlobStatter() BlobStatter
|
|
||||||
}
|
|
||||||
|
|
||||||
// RepositoryEnumerator describes an operation to enumerate repositories
|
|
||||||
type RepositoryEnumerator interface {
|
|
||||||
Enumerate(ctx context.Context, ingester func(string) error) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// ManifestServiceOption is a function argument for Manifest Service methods
|
|
||||||
type ManifestServiceOption interface {
|
|
||||||
Apply(ManifestService) error
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithTag allows a tag to be passed into Put
|
|
||||||
func WithTag(tag string) ManifestServiceOption {
|
|
||||||
return WithTagOption{tag}
|
|
||||||
}
|
|
||||||
|
|
||||||
// WithTagOption holds a tag
|
|
||||||
type WithTagOption struct{ Tag string }
|
|
||||||
|
|
||||||
// Apply conforms to the ManifestServiceOption interface
|
|
||||||
func (o WithTagOption) Apply(m ManifestService) error {
|
|
||||||
// no implementation
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Repository is a named collection of manifests and layers.
|
|
||||||
type Repository interface {
|
|
||||||
// Named returns the name of the repository.
|
|
||||||
Named() reference.Named
|
|
||||||
|
|
||||||
// Manifests returns a reference to this repository's manifest service.
|
|
||||||
// with the supplied options applied.
|
|
||||||
Manifests(ctx context.Context, options ...ManifestServiceOption) (ManifestService, error)
|
|
||||||
|
|
||||||
// Blobs returns a reference to this repository's blob service.
|
|
||||||
Blobs(ctx context.Context) BlobStore
|
|
||||||
|
|
||||||
// TODO(stevvooe): The above BlobStore return can probably be relaxed to
|
|
||||||
// be a BlobService for use with clients. This will allow such
|
|
||||||
// implementations to avoid implementing ServeBlob.
|
|
||||||
|
|
||||||
// Tags returns a reference to this repositories tag service
|
|
||||||
Tags(ctx context.Context) TagService
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(stevvooe): Must add close methods to all these. May want to change the
|
|
||||||
// way instances are created to better reflect internal dependency
|
|
||||||
// relationships.
|
|
27
vendor/github.com/docker/distribution/tags.go
generated
vendored
27
vendor/github.com/docker/distribution/tags.go
generated
vendored
|
@ -1,27 +0,0 @@
|
||||||
package distribution
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/docker/distribution/context"
|
|
||||||
)
|
|
||||||
|
|
||||||
// TagService provides access to information about tagged objects.
|
|
||||||
type TagService interface {
|
|
||||||
// Get retrieves the descriptor identified by the tag. Some
|
|
||||||
// implementations may differentiate between "trusted" tags and
|
|
||||||
// "untrusted" tags. If a tag is "untrusted", the mapping will be returned
|
|
||||||
// as an ErrTagUntrusted error, with the target descriptor.
|
|
||||||
Get(ctx context.Context, tag string) (Descriptor, error)
|
|
||||||
|
|
||||||
// Tag associates the tag with the provided descriptor, updating the
|
|
||||||
// current association, if needed.
|
|
||||||
Tag(ctx context.Context, tag string, desc Descriptor) error
|
|
||||||
|
|
||||||
// Untag removes the given tag association
|
|
||||||
Untag(ctx context.Context, tag string) error
|
|
||||||
|
|
||||||
// All returns the set of tags managed by this tag service
|
|
||||||
All(ctx context.Context) ([]string, error)
|
|
||||||
|
|
||||||
// Lookup returns the set of tags referencing the given digest.
|
|
||||||
Lookup(ctx context.Context, digest Descriptor) ([]string, error)
|
|
||||||
}
|
|
46
vendor/github.com/emicklei/go-restful-swagger12/CHANGES.md
generated
vendored
46
vendor/github.com/emicklei/go-restful-swagger12/CHANGES.md
generated
vendored
|
@ -1,46 +0,0 @@
|
||||||
Change history of swagger
|
|
||||||
=
|
|
||||||
2017-01-30
|
|
||||||
- moved from go-restful/swagger to go-restful-swagger12
|
|
||||||
|
|
||||||
2015-10-16
|
|
||||||
- add type override mechanism for swagger models (MR 254, nathanejohnson)
|
|
||||||
- replace uses of wildcard in generated apidocs (issue 251)
|
|
||||||
|
|
||||||
2015-05-25
|
|
||||||
- (api break) changed the type of Properties in Model
|
|
||||||
- (api break) changed the type of Models in ApiDeclaration
|
|
||||||
- (api break) changed the parameter type of PostBuildDeclarationMapFunc
|
|
||||||
|
|
||||||
2015-04-09
|
|
||||||
- add ModelBuildable interface for customization of Model
|
|
||||||
|
|
||||||
2015-03-17
|
|
||||||
- preserve order of Routes per WebService in Swagger listing
|
|
||||||
- fix use of $ref and type in Swagger models
|
|
||||||
- add api version to listing
|
|
||||||
|
|
||||||
2014-11-14
|
|
||||||
- operation parameters are now sorted using ordering path,query,form,header,body
|
|
||||||
|
|
||||||
2014-11-12
|
|
||||||
- respect omitempty tag value for embedded structs
|
|
||||||
- expose ApiVersion of WebService to Swagger ApiDeclaration
|
|
||||||
|
|
||||||
2014-05-29
|
|
||||||
- (api add) Ability to define custom http.Handler to serve swagger-ui static files
|
|
||||||
|
|
||||||
2014-05-04
|
|
||||||
- (fix) include model for array element type of response
|
|
||||||
|
|
||||||
2014-01-03
|
|
||||||
- (fix) do not add primitive type to the Api models
|
|
||||||
|
|
||||||
2013-11-27
|
|
||||||
- (fix) make Swagger work for WebServices with root ("/" or "") paths
|
|
||||||
|
|
||||||
2013-10-29
|
|
||||||
- (api add) package variable LogInfo to customize logging function
|
|
||||||
|
|
||||||
2013-10-15
|
|
||||||
- upgraded to spec version 1.2 (https://github.com/wordnik/swagger-core/wiki/1.2-transition)
|
|
22
vendor/github.com/emicklei/go-restful-swagger12/LICENSE
generated
vendored
22
vendor/github.com/emicklei/go-restful-swagger12/LICENSE
generated
vendored
|
@ -1,22 +0,0 @@
|
||||||
Copyright (c) 2017 Ernest Micklei
|
|
||||||
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
83
vendor/github.com/emicklei/go-restful-swagger12/README.md
generated
vendored
83
vendor/github.com/emicklei/go-restful-swagger12/README.md
generated
vendored
|
@ -1,83 +0,0 @@
|
||||||
# go-restful-swagger12
|
|
||||||
|
|
||||||
[](https://travis-ci.org/emicklei/go-restful-swagger12)
|
|
||||||
[](https://godoc.org/github.com/emicklei/go-restful-swagger12)
|
|
||||||
|
|
||||||
How to use Swagger UI with go-restful
|
|
||||||
=
|
|
||||||
|
|
||||||
Get the Swagger UI sources (version 1.2 only)
|
|
||||||
|
|
||||||
git clone https://github.com/wordnik/swagger-ui.git
|
|
||||||
|
|
||||||
The project contains a "dist" folder.
|
|
||||||
Its contents has all the Swagger UI files you need.
|
|
||||||
|
|
||||||
The `index.html` has an `url` set to `http://petstore.swagger.wordnik.com/api/api-docs`.
|
|
||||||
You need to change that to match your WebService JSON endpoint e.g. `http://localhost:8080/apidocs.json`
|
|
||||||
|
|
||||||
Now, you can install the Swagger WebService for serving the Swagger specification in JSON.
|
|
||||||
|
|
||||||
config := swagger.Config{
|
|
||||||
WebServices: restful.RegisteredWebServices(),
|
|
||||||
ApiPath: "/apidocs.json",
|
|
||||||
SwaggerPath: "/apidocs/",
|
|
||||||
SwaggerFilePath: "/Users/emicklei/Projects/swagger-ui/dist"}
|
|
||||||
swagger.InstallSwaggerService(config)
|
|
||||||
|
|
||||||
|
|
||||||
Documenting Structs
|
|
||||||
--
|
|
||||||
|
|
||||||
Currently there are 2 ways to document your structs in the go-restful Swagger.
|
|
||||||
|
|
||||||
###### By using struct tags
|
|
||||||
- Use tag "description" to annotate a struct field with a description to show in the UI
|
|
||||||
- Use tag "modelDescription" to annotate the struct itself with a description to show in the UI. The tag can be added in an field of the struct and in case that there are multiple definition, they will be appended with an empty line.
|
|
||||||
|
|
||||||
###### By using the SwaggerDoc method
|
|
||||||
Here is an example with an `Address` struct and the documentation for each of the fields. The `""` is a special entry for **documenting the struct itself**.
|
|
||||||
|
|
||||||
type Address struct {
|
|
||||||
Country string `json:"country,omitempty"`
|
|
||||||
PostCode int `json:"postcode,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func (Address) SwaggerDoc() map[string]string {
|
|
||||||
return map[string]string{
|
|
||||||
"": "Address doc",
|
|
||||||
"country": "Country doc",
|
|
||||||
"postcode": "PostCode doc",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
This example will generate a JSON like this
|
|
||||||
|
|
||||||
{
|
|
||||||
"Address": {
|
|
||||||
"id": "Address",
|
|
||||||
"description": "Address doc",
|
|
||||||
"properties": {
|
|
||||||
"country": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "Country doc"
|
|
||||||
},
|
|
||||||
"postcode": {
|
|
||||||
"type": "integer",
|
|
||||||
"format": "int32",
|
|
||||||
"description": "PostCode doc"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
**Very Important Notes:**
|
|
||||||
- `SwaggerDoc()` is using a **NON-Pointer** receiver (e.g. func (Address) and not func (*Address))
|
|
||||||
- The returned map should use as key the name of the field as defined in the JSON parameter (e.g. `"postcode"` and not `"PostCode"`)
|
|
||||||
|
|
||||||
Notes
|
|
||||||
--
|
|
||||||
- The Nickname of an Operation is automatically set by finding the name of the function. You can override it using RouteBuilder.Operation(..)
|
|
||||||
- The WebServices field of swagger.Config can be used to control which service you want to expose and document ; you can have multiple configs and therefore multiple endpoints.
|
|
||||||
|
|
||||||
© 2017, ernestmicklei.com. MIT License. Contributions welcome.
|
|
64
vendor/github.com/emicklei/go-restful-swagger12/api_declaration_list.go
generated
vendored
64
vendor/github.com/emicklei/go-restful-swagger12/api_declaration_list.go
generated
vendored
|
@ -1,64 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
// Copyright 2015 Ernest Micklei. All rights reserved.
|
|
||||||
// Use of this source code is governed by a license
|
|
||||||
// that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ApiDeclarationList maintains an ordered list of ApiDeclaration.
|
|
||||||
type ApiDeclarationList struct {
|
|
||||||
List []ApiDeclaration
|
|
||||||
}
|
|
||||||
|
|
||||||
// At returns the ApiDeclaration by its path unless absent, then ok is false
|
|
||||||
func (l *ApiDeclarationList) At(path string) (a ApiDeclaration, ok bool) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
if each.ResourcePath == path {
|
|
||||||
return each, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return a, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Put adds or replaces a ApiDeclaration with this name
|
|
||||||
func (l *ApiDeclarationList) Put(path string, a ApiDeclaration) {
|
|
||||||
// maybe replace existing
|
|
||||||
for i, each := range l.List {
|
|
||||||
if each.ResourcePath == path {
|
|
||||||
// replace
|
|
||||||
l.List[i] = a
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// add
|
|
||||||
l.List = append(l.List, a)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do enumerates all the properties, each with its assigned name
|
|
||||||
func (l *ApiDeclarationList) Do(block func(path string, decl ApiDeclaration)) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
block(each.ResourcePath, each)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalJSON writes the ModelPropertyList as if it was a map[string]ModelProperty
|
|
||||||
func (l ApiDeclarationList) MarshalJSON() ([]byte, error) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
encoder := json.NewEncoder(&buf)
|
|
||||||
buf.WriteString("{\n")
|
|
||||||
for i, each := range l.List {
|
|
||||||
buf.WriteString("\"")
|
|
||||||
buf.WriteString(each.ResourcePath)
|
|
||||||
buf.WriteString("\": ")
|
|
||||||
encoder.Encode(each)
|
|
||||||
if i < len(l.List)-1 {
|
|
||||||
buf.WriteString(",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf.WriteString("}")
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
46
vendor/github.com/emicklei/go-restful-swagger12/config.go
generated
vendored
46
vendor/github.com/emicklei/go-restful-swagger12/config.go
generated
vendored
|
@ -1,46 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net/http"
|
|
||||||
"reflect"
|
|
||||||
|
|
||||||
"github.com/emicklei/go-restful"
|
|
||||||
)
|
|
||||||
|
|
||||||
// PostBuildDeclarationMapFunc can be used to modify the api declaration map.
|
|
||||||
type PostBuildDeclarationMapFunc func(apiDeclarationMap *ApiDeclarationList)
|
|
||||||
|
|
||||||
// MapSchemaFormatFunc can be used to modify typeName at definition time.
|
|
||||||
type MapSchemaFormatFunc func(typeName string) string
|
|
||||||
|
|
||||||
// MapModelTypeNameFunc can be used to return the desired typeName for a given
|
|
||||||
// type. It will return false if the default name should be used.
|
|
||||||
type MapModelTypeNameFunc func(t reflect.Type) (string, bool)
|
|
||||||
|
|
||||||
type Config struct {
|
|
||||||
// url where the services are available, e.g. http://localhost:8080
|
|
||||||
// if left empty then the basePath of Swagger is taken from the actual request
|
|
||||||
WebServicesUrl string
|
|
||||||
// path where the JSON api is avaiable , e.g. /apidocs
|
|
||||||
ApiPath string
|
|
||||||
// [optional] path where the swagger UI will be served, e.g. /swagger
|
|
||||||
SwaggerPath string
|
|
||||||
// [optional] location of folder containing Swagger HTML5 application index.html
|
|
||||||
SwaggerFilePath string
|
|
||||||
// api listing is constructed from this list of restful WebServices.
|
|
||||||
WebServices []*restful.WebService
|
|
||||||
// will serve all static content (scripts,pages,images)
|
|
||||||
StaticHandler http.Handler
|
|
||||||
// [optional] on default CORS (Cross-Origin-Resource-Sharing) is enabled.
|
|
||||||
DisableCORS bool
|
|
||||||
// Top-level API version. Is reflected in the resource listing.
|
|
||||||
ApiVersion string
|
|
||||||
// If set then call this handler after building the complete ApiDeclaration Map
|
|
||||||
PostBuildHandler PostBuildDeclarationMapFunc
|
|
||||||
// Swagger global info struct
|
|
||||||
Info Info
|
|
||||||
// [optional] If set, model builder should call this handler to get addition typename-to-swagger-format-field conversion.
|
|
||||||
SchemaFormatHandler MapSchemaFormatFunc
|
|
||||||
// [optional] If set, model builder should call this handler to retrieve the name for a given type.
|
|
||||||
ModelTypeNameHandler MapModelTypeNameFunc
|
|
||||||
}
|
|
467
vendor/github.com/emicklei/go-restful-swagger12/model_builder.go
generated
vendored
467
vendor/github.com/emicklei/go-restful-swagger12/model_builder.go
generated
vendored
|
@ -1,467 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ModelBuildable is used for extending Structs that need more control over
|
|
||||||
// how the Model appears in the Swagger api declaration.
|
|
||||||
type ModelBuildable interface {
|
|
||||||
PostBuildModel(m *Model) *Model
|
|
||||||
}
|
|
||||||
|
|
||||||
type modelBuilder struct {
|
|
||||||
Models *ModelList
|
|
||||||
Config *Config
|
|
||||||
}
|
|
||||||
|
|
||||||
type documentable interface {
|
|
||||||
SwaggerDoc() map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if this structure has a method with signature func (<theModel>) SwaggerDoc() map[string]string
|
|
||||||
// If it exists, retrive the documentation and overwrite all struct tag descriptions
|
|
||||||
func getDocFromMethodSwaggerDoc2(model reflect.Type) map[string]string {
|
|
||||||
if docable, ok := reflect.New(model).Elem().Interface().(documentable); ok {
|
|
||||||
return docable.SwaggerDoc()
|
|
||||||
}
|
|
||||||
return make(map[string]string)
|
|
||||||
}
|
|
||||||
|
|
||||||
// addModelFrom creates and adds a Model to the builder and detects and calls
|
|
||||||
// the post build hook for customizations
|
|
||||||
func (b modelBuilder) addModelFrom(sample interface{}) {
|
|
||||||
if modelOrNil := b.addModel(reflect.TypeOf(sample), ""); modelOrNil != nil {
|
|
||||||
// allow customizations
|
|
||||||
if buildable, ok := sample.(ModelBuildable); ok {
|
|
||||||
modelOrNil = buildable.PostBuildModel(modelOrNil)
|
|
||||||
b.Models.Put(modelOrNil.Id, *modelOrNil)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) addModel(st reflect.Type, nameOverride string) *Model {
|
|
||||||
// Turn pointers into simpler types so further checks are
|
|
||||||
// correct.
|
|
||||||
if st.Kind() == reflect.Ptr {
|
|
||||||
st = st.Elem()
|
|
||||||
}
|
|
||||||
|
|
||||||
modelName := b.keyFrom(st)
|
|
||||||
if nameOverride != "" {
|
|
||||||
modelName = nameOverride
|
|
||||||
}
|
|
||||||
// no models needed for primitive types
|
|
||||||
if b.isPrimitiveType(modelName) {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// golang encoding/json packages says array and slice values encode as
|
|
||||||
// JSON arrays, except that []byte encodes as a base64-encoded string.
|
|
||||||
// If we see a []byte here, treat it at as a primitive type (string)
|
|
||||||
// and deal with it in buildArrayTypeProperty.
|
|
||||||
if (st.Kind() == reflect.Slice || st.Kind() == reflect.Array) &&
|
|
||||||
st.Elem().Kind() == reflect.Uint8 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
// see if we already have visited this model
|
|
||||||
if _, ok := b.Models.At(modelName); ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
sm := Model{
|
|
||||||
Id: modelName,
|
|
||||||
Required: []string{},
|
|
||||||
Properties: ModelPropertyList{}}
|
|
||||||
|
|
||||||
// reference the model before further initializing (enables recursive structs)
|
|
||||||
b.Models.Put(modelName, sm)
|
|
||||||
|
|
||||||
// check for slice or array
|
|
||||||
if st.Kind() == reflect.Slice || st.Kind() == reflect.Array {
|
|
||||||
b.addModel(st.Elem(), "")
|
|
||||||
return &sm
|
|
||||||
}
|
|
||||||
// check for structure or primitive type
|
|
||||||
if st.Kind() != reflect.Struct {
|
|
||||||
return &sm
|
|
||||||
}
|
|
||||||
|
|
||||||
fullDoc := getDocFromMethodSwaggerDoc2(st)
|
|
||||||
modelDescriptions := []string{}
|
|
||||||
|
|
||||||
for i := 0; i < st.NumField(); i++ {
|
|
||||||
field := st.Field(i)
|
|
||||||
jsonName, modelDescription, prop := b.buildProperty(field, &sm, modelName)
|
|
||||||
if len(modelDescription) > 0 {
|
|
||||||
modelDescriptions = append(modelDescriptions, modelDescription)
|
|
||||||
}
|
|
||||||
|
|
||||||
// add if not omitted
|
|
||||||
if len(jsonName) != 0 {
|
|
||||||
// update description
|
|
||||||
if fieldDoc, ok := fullDoc[jsonName]; ok {
|
|
||||||
prop.Description = fieldDoc
|
|
||||||
}
|
|
||||||
// update Required
|
|
||||||
if b.isPropertyRequired(field) {
|
|
||||||
sm.Required = append(sm.Required, jsonName)
|
|
||||||
}
|
|
||||||
sm.Properties.Put(jsonName, prop)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// We always overwrite documentation if SwaggerDoc method exists
|
|
||||||
// "" is special for documenting the struct itself
|
|
||||||
if modelDoc, ok := fullDoc[""]; ok {
|
|
||||||
sm.Description = modelDoc
|
|
||||||
} else if len(modelDescriptions) != 0 {
|
|
||||||
sm.Description = strings.Join(modelDescriptions, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
// update model builder with completed model
|
|
||||||
b.Models.Put(modelName, sm)
|
|
||||||
|
|
||||||
return &sm
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) isPropertyRequired(field reflect.StructField) bool {
|
|
||||||
required := true
|
|
||||||
if jsonTag := field.Tag.Get("json"); jsonTag != "" {
|
|
||||||
s := strings.Split(jsonTag, ",")
|
|
||||||
if len(s) > 1 && s[1] == "omitempty" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return required
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) buildProperty(field reflect.StructField, model *Model, modelName string) (jsonName, modelDescription string, prop ModelProperty) {
|
|
||||||
jsonName = b.jsonNameOfField(field)
|
|
||||||
if len(jsonName) == 0 {
|
|
||||||
// empty name signals skip property
|
|
||||||
return "", "", prop
|
|
||||||
}
|
|
||||||
|
|
||||||
if field.Name == "XMLName" && field.Type.String() == "xml.Name" {
|
|
||||||
// property is metadata for the xml.Name attribute, can be skipped
|
|
||||||
return "", "", prop
|
|
||||||
}
|
|
||||||
|
|
||||||
if tag := field.Tag.Get("modelDescription"); tag != "" {
|
|
||||||
modelDescription = tag
|
|
||||||
}
|
|
||||||
|
|
||||||
prop.setPropertyMetadata(field)
|
|
||||||
if prop.Type != nil {
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
fieldType := field.Type
|
|
||||||
|
|
||||||
// check if type is doing its own marshalling
|
|
||||||
marshalerType := reflect.TypeOf((*json.Marshaler)(nil)).Elem()
|
|
||||||
if fieldType.Implements(marshalerType) {
|
|
||||||
var pType = "string"
|
|
||||||
if prop.Type == nil {
|
|
||||||
prop.Type = &pType
|
|
||||||
}
|
|
||||||
if prop.Format == "" {
|
|
||||||
prop.Format = b.jsonSchemaFormat(b.keyFrom(fieldType))
|
|
||||||
}
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
// check if annotation says it is a string
|
|
||||||
if jsonTag := field.Tag.Get("json"); jsonTag != "" {
|
|
||||||
s := strings.Split(jsonTag, ",")
|
|
||||||
if len(s) > 1 && s[1] == "string" {
|
|
||||||
stringt := "string"
|
|
||||||
prop.Type = &stringt
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fieldKind := fieldType.Kind()
|
|
||||||
switch {
|
|
||||||
case fieldKind == reflect.Struct:
|
|
||||||
jsonName, prop := b.buildStructTypeProperty(field, jsonName, model)
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
case fieldKind == reflect.Slice || fieldKind == reflect.Array:
|
|
||||||
jsonName, prop := b.buildArrayTypeProperty(field, jsonName, modelName)
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
case fieldKind == reflect.Ptr:
|
|
||||||
jsonName, prop := b.buildPointerTypeProperty(field, jsonName, modelName)
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
case fieldKind == reflect.String:
|
|
||||||
stringt := "string"
|
|
||||||
prop.Type = &stringt
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
case fieldKind == reflect.Map:
|
|
||||||
// if it's a map, it's unstructured, and swagger 1.2 can't handle it
|
|
||||||
objectType := "object"
|
|
||||||
prop.Type = &objectType
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
fieldTypeName := b.keyFrom(fieldType)
|
|
||||||
if b.isPrimitiveType(fieldTypeName) {
|
|
||||||
mapped := b.jsonSchemaType(fieldTypeName)
|
|
||||||
prop.Type = &mapped
|
|
||||||
prop.Format = b.jsonSchemaFormat(fieldTypeName)
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
modelType := b.keyFrom(fieldType)
|
|
||||||
prop.Ref = &modelType
|
|
||||||
|
|
||||||
if fieldType.Name() == "" { // override type of anonymous structs
|
|
||||||
nestedTypeName := modelName + "." + jsonName
|
|
||||||
prop.Ref = &nestedTypeName
|
|
||||||
b.addModel(fieldType, nestedTypeName)
|
|
||||||
}
|
|
||||||
return jsonName, modelDescription, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
func hasNamedJSONTag(field reflect.StructField) bool {
|
|
||||||
parts := strings.Split(field.Tag.Get("json"), ",")
|
|
||||||
if len(parts) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, s := range parts[1:] {
|
|
||||||
if s == "inline" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return len(parts[0]) > 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) buildStructTypeProperty(field reflect.StructField, jsonName string, model *Model) (nameJson string, prop ModelProperty) {
|
|
||||||
prop.setPropertyMetadata(field)
|
|
||||||
// Check for type override in tag
|
|
||||||
if prop.Type != nil {
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
fieldType := field.Type
|
|
||||||
// check for anonymous
|
|
||||||
if len(fieldType.Name()) == 0 {
|
|
||||||
// anonymous
|
|
||||||
anonType := model.Id + "." + jsonName
|
|
||||||
b.addModel(fieldType, anonType)
|
|
||||||
prop.Ref = &anonType
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
if field.Name == fieldType.Name() && field.Anonymous && !hasNamedJSONTag(field) {
|
|
||||||
// embedded struct
|
|
||||||
sub := modelBuilder{new(ModelList), b.Config}
|
|
||||||
sub.addModel(fieldType, "")
|
|
||||||
subKey := sub.keyFrom(fieldType)
|
|
||||||
// merge properties from sub
|
|
||||||
subModel, _ := sub.Models.At(subKey)
|
|
||||||
subModel.Properties.Do(func(k string, v ModelProperty) {
|
|
||||||
model.Properties.Put(k, v)
|
|
||||||
// if subModel says this property is required then include it
|
|
||||||
required := false
|
|
||||||
for _, each := range subModel.Required {
|
|
||||||
if k == each {
|
|
||||||
required = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if required {
|
|
||||||
model.Required = append(model.Required, k)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
// add all new referenced models
|
|
||||||
sub.Models.Do(func(key string, sub Model) {
|
|
||||||
if key != subKey {
|
|
||||||
if _, ok := b.Models.At(key); !ok {
|
|
||||||
b.Models.Put(key, sub)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
// empty name signals skip property
|
|
||||||
return "", prop
|
|
||||||
}
|
|
||||||
// simple struct
|
|
||||||
b.addModel(fieldType, "")
|
|
||||||
var pType = b.keyFrom(fieldType)
|
|
||||||
prop.Ref = &pType
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) buildArrayTypeProperty(field reflect.StructField, jsonName, modelName string) (nameJson string, prop ModelProperty) {
|
|
||||||
// check for type override in tags
|
|
||||||
prop.setPropertyMetadata(field)
|
|
||||||
if prop.Type != nil {
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
fieldType := field.Type
|
|
||||||
if fieldType.Elem().Kind() == reflect.Uint8 {
|
|
||||||
stringt := "string"
|
|
||||||
prop.Type = &stringt
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
var pType = "array"
|
|
||||||
prop.Type = &pType
|
|
||||||
isPrimitive := b.isPrimitiveType(fieldType.Elem().Name())
|
|
||||||
elemTypeName := b.getElementTypeName(modelName, jsonName, fieldType.Elem())
|
|
||||||
prop.Items = new(Item)
|
|
||||||
if isPrimitive {
|
|
||||||
mapped := b.jsonSchemaType(elemTypeName)
|
|
||||||
prop.Items.Type = &mapped
|
|
||||||
} else {
|
|
||||||
prop.Items.Ref = &elemTypeName
|
|
||||||
}
|
|
||||||
// add|overwrite model for element type
|
|
||||||
if fieldType.Elem().Kind() == reflect.Ptr {
|
|
||||||
fieldType = fieldType.Elem()
|
|
||||||
}
|
|
||||||
if !isPrimitive {
|
|
||||||
b.addModel(fieldType.Elem(), elemTypeName)
|
|
||||||
}
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) buildPointerTypeProperty(field reflect.StructField, jsonName, modelName string) (nameJson string, prop ModelProperty) {
|
|
||||||
prop.setPropertyMetadata(field)
|
|
||||||
// Check for type override in tags
|
|
||||||
if prop.Type != nil {
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
fieldType := field.Type
|
|
||||||
|
|
||||||
// override type of pointer to list-likes
|
|
||||||
if fieldType.Elem().Kind() == reflect.Slice || fieldType.Elem().Kind() == reflect.Array {
|
|
||||||
var pType = "array"
|
|
||||||
prop.Type = &pType
|
|
||||||
isPrimitive := b.isPrimitiveType(fieldType.Elem().Elem().Name())
|
|
||||||
elemName := b.getElementTypeName(modelName, jsonName, fieldType.Elem().Elem())
|
|
||||||
if isPrimitive {
|
|
||||||
primName := b.jsonSchemaType(elemName)
|
|
||||||
prop.Items = &Item{Ref: &primName}
|
|
||||||
} else {
|
|
||||||
prop.Items = &Item{Ref: &elemName}
|
|
||||||
}
|
|
||||||
if !isPrimitive {
|
|
||||||
// add|overwrite model for element type
|
|
||||||
b.addModel(fieldType.Elem().Elem(), elemName)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// non-array, pointer type
|
|
||||||
fieldTypeName := b.keyFrom(fieldType.Elem())
|
|
||||||
var pType = b.jsonSchemaType(fieldTypeName) // no star, include pkg path
|
|
||||||
if b.isPrimitiveType(fieldTypeName) {
|
|
||||||
prop.Type = &pType
|
|
||||||
prop.Format = b.jsonSchemaFormat(fieldTypeName)
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
prop.Ref = &pType
|
|
||||||
elemName := ""
|
|
||||||
if fieldType.Elem().Name() == "" {
|
|
||||||
elemName = modelName + "." + jsonName
|
|
||||||
prop.Ref = &elemName
|
|
||||||
}
|
|
||||||
b.addModel(fieldType.Elem(), elemName)
|
|
||||||
}
|
|
||||||
return jsonName, prop
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) getElementTypeName(modelName, jsonName string, t reflect.Type) string {
|
|
||||||
if t.Kind() == reflect.Ptr {
|
|
||||||
t = t.Elem()
|
|
||||||
}
|
|
||||||
if t.Name() == "" {
|
|
||||||
return modelName + "." + jsonName
|
|
||||||
}
|
|
||||||
return b.keyFrom(t)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) keyFrom(st reflect.Type) string {
|
|
||||||
key := st.String()
|
|
||||||
if b.Config != nil && b.Config.ModelTypeNameHandler != nil {
|
|
||||||
if name, ok := b.Config.ModelTypeNameHandler(st); ok {
|
|
||||||
key = name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(st.Name()) == 0 { // unnamed type
|
|
||||||
// Swagger UI has special meaning for [
|
|
||||||
key = strings.Replace(key, "[]", "||", -1)
|
|
||||||
}
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
|
|
||||||
// see also https://golang.org/ref/spec#Numeric_types
|
|
||||||
func (b modelBuilder) isPrimitiveType(modelName string) bool {
|
|
||||||
if len(modelName) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return strings.Contains("uint uint8 uint16 uint32 uint64 int int8 int16 int32 int64 float32 float64 bool string byte rune time.Time", modelName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// jsonNameOfField returns the name of the field as it should appear in JSON format
|
|
||||||
// An empty string indicates that this field is not part of the JSON representation
|
|
||||||
func (b modelBuilder) jsonNameOfField(field reflect.StructField) string {
|
|
||||||
if jsonTag := field.Tag.Get("json"); jsonTag != "" {
|
|
||||||
s := strings.Split(jsonTag, ",")
|
|
||||||
if s[0] == "-" {
|
|
||||||
// empty name signals skip property
|
|
||||||
return ""
|
|
||||||
} else if s[0] != "" {
|
|
||||||
return s[0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return field.Name
|
|
||||||
}
|
|
||||||
|
|
||||||
// see also http://json-schema.org/latest/json-schema-core.html#anchor8
|
|
||||||
func (b modelBuilder) jsonSchemaType(modelName string) string {
|
|
||||||
schemaMap := map[string]string{
|
|
||||||
"uint": "integer",
|
|
||||||
"uint8": "integer",
|
|
||||||
"uint16": "integer",
|
|
||||||
"uint32": "integer",
|
|
||||||
"uint64": "integer",
|
|
||||||
|
|
||||||
"int": "integer",
|
|
||||||
"int8": "integer",
|
|
||||||
"int16": "integer",
|
|
||||||
"int32": "integer",
|
|
||||||
"int64": "integer",
|
|
||||||
|
|
||||||
"byte": "integer",
|
|
||||||
"float64": "number",
|
|
||||||
"float32": "number",
|
|
||||||
"bool": "boolean",
|
|
||||||
"time.Time": "string",
|
|
||||||
}
|
|
||||||
mapped, ok := schemaMap[modelName]
|
|
||||||
if !ok {
|
|
||||||
return modelName // use as is (custom or struct)
|
|
||||||
}
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
|
|
||||||
func (b modelBuilder) jsonSchemaFormat(modelName string) string {
|
|
||||||
if b.Config != nil && b.Config.SchemaFormatHandler != nil {
|
|
||||||
if mapped := b.Config.SchemaFormatHandler(modelName); mapped != "" {
|
|
||||||
return mapped
|
|
||||||
}
|
|
||||||
}
|
|
||||||
schemaMap := map[string]string{
|
|
||||||
"int": "int32",
|
|
||||||
"int32": "int32",
|
|
||||||
"int64": "int64",
|
|
||||||
"byte": "byte",
|
|
||||||
"uint": "integer",
|
|
||||||
"uint8": "byte",
|
|
||||||
"float64": "double",
|
|
||||||
"float32": "float",
|
|
||||||
"time.Time": "date-time",
|
|
||||||
"*time.Time": "date-time",
|
|
||||||
}
|
|
||||||
mapped, ok := schemaMap[modelName]
|
|
||||||
if !ok {
|
|
||||||
return "" // no format
|
|
||||||
}
|
|
||||||
return mapped
|
|
||||||
}
|
|
1283
vendor/github.com/emicklei/go-restful-swagger12/model_builder_test.go
generated
vendored
1283
vendor/github.com/emicklei/go-restful-swagger12/model_builder_test.go
generated
vendored
File diff suppressed because it is too large
Load diff
86
vendor/github.com/emicklei/go-restful-swagger12/model_list.go
generated
vendored
86
vendor/github.com/emicklei/go-restful-swagger12/model_list.go
generated
vendored
|
@ -1,86 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
// Copyright 2015 Ernest Micklei. All rights reserved.
|
|
||||||
// Use of this source code is governed by a license
|
|
||||||
// that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
// NamedModel associates a name with a Model (not using its Id)
|
|
||||||
type NamedModel struct {
|
|
||||||
Name string
|
|
||||||
Model Model
|
|
||||||
}
|
|
||||||
|
|
||||||
// ModelList encapsulates a list of NamedModel (association)
|
|
||||||
type ModelList struct {
|
|
||||||
List []NamedModel
|
|
||||||
}
|
|
||||||
|
|
||||||
// Put adds or replaces a Model by its name
|
|
||||||
func (l *ModelList) Put(name string, model Model) {
|
|
||||||
for i, each := range l.List {
|
|
||||||
if each.Name == name {
|
|
||||||
// replace
|
|
||||||
l.List[i] = NamedModel{name, model}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// add
|
|
||||||
l.List = append(l.List, NamedModel{name, model})
|
|
||||||
}
|
|
||||||
|
|
||||||
// At returns a Model by its name, ok is false if absent
|
|
||||||
func (l *ModelList) At(name string) (m Model, ok bool) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
if each.Name == name {
|
|
||||||
return each.Model, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return m, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do enumerates all the models, each with its assigned name
|
|
||||||
func (l *ModelList) Do(block func(name string, value Model)) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
block(each.Name, each.Model)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalJSON writes the ModelList as if it was a map[string]Model
|
|
||||||
func (l ModelList) MarshalJSON() ([]byte, error) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
encoder := json.NewEncoder(&buf)
|
|
||||||
buf.WriteString("{\n")
|
|
||||||
for i, each := range l.List {
|
|
||||||
buf.WriteString("\"")
|
|
||||||
buf.WriteString(each.Name)
|
|
||||||
buf.WriteString("\": ")
|
|
||||||
encoder.Encode(each.Model)
|
|
||||||
if i < len(l.List)-1 {
|
|
||||||
buf.WriteString(",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf.WriteString("}")
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalJSON reads back a ModelList. This is an expensive operation.
|
|
||||||
func (l *ModelList) UnmarshalJSON(data []byte) error {
|
|
||||||
raw := map[string]interface{}{}
|
|
||||||
json.NewDecoder(bytes.NewReader(data)).Decode(&raw)
|
|
||||||
for k, v := range raw {
|
|
||||||
// produces JSON bytes for each value
|
|
||||||
data, err := json.Marshal(v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var m Model
|
|
||||||
json.NewDecoder(bytes.NewReader(data)).Decode(&m)
|
|
||||||
l.Put(k, m)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
48
vendor/github.com/emicklei/go-restful-swagger12/model_list_test.go
generated
vendored
48
vendor/github.com/emicklei/go-restful-swagger12/model_list_test.go
generated
vendored
|
@ -1,48 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestModelList(t *testing.T) {
|
|
||||||
m := Model{}
|
|
||||||
m.Id = "m"
|
|
||||||
l := ModelList{}
|
|
||||||
l.Put("m", m)
|
|
||||||
k, ok := l.At("m")
|
|
||||||
if !ok {
|
|
||||||
t.Error("want model back")
|
|
||||||
}
|
|
||||||
if got, want := k.Id, "m"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestModelList_Marshal(t *testing.T) {
|
|
||||||
l := ModelList{}
|
|
||||||
m := Model{Id: "myid"}
|
|
||||||
l.Put("myid", m)
|
|
||||||
data, err := json.Marshal(l)
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
if got, want := string(data), `{"myid":{"id":"myid","properties":{}}}`; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestModelList_Unmarshal(t *testing.T) {
|
|
||||||
data := `{"myid":{"id":"myid","properties":{}}}`
|
|
||||||
l := ModelList{}
|
|
||||||
if err := json.Unmarshal([]byte(data), &l); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
m, ok := l.At("myid")
|
|
||||||
if !ok {
|
|
||||||
t.Error("expected myid")
|
|
||||||
}
|
|
||||||
if got, want := m.Id, "myid"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
81
vendor/github.com/emicklei/go-restful-swagger12/model_property_ext.go
generated
vendored
81
vendor/github.com/emicklei/go-restful-swagger12/model_property_ext.go
generated
vendored
|
@ -1,81 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setDescription(field reflect.StructField) {
|
|
||||||
if tag := field.Tag.Get("description"); tag != "" {
|
|
||||||
prop.Description = tag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setDefaultValue(field reflect.StructField) {
|
|
||||||
if tag := field.Tag.Get("default"); tag != "" {
|
|
||||||
prop.DefaultValue = Special(tag)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setEnumValues(field reflect.StructField) {
|
|
||||||
// We use | to separate the enum values. This value is chosen
|
|
||||||
// since its unlikely to be useful in actual enumeration values.
|
|
||||||
if tag := field.Tag.Get("enum"); tag != "" {
|
|
||||||
prop.Enum = strings.Split(tag, "|")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setMaximum(field reflect.StructField) {
|
|
||||||
if tag := field.Tag.Get("maximum"); tag != "" {
|
|
||||||
prop.Maximum = tag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setType(field reflect.StructField) {
|
|
||||||
if tag := field.Tag.Get("type"); tag != "" {
|
|
||||||
// Check if the first two characters of the type tag are
|
|
||||||
// intended to emulate slice/array behaviour.
|
|
||||||
//
|
|
||||||
// If type is intended to be a slice/array then add the
|
|
||||||
// overriden type to the array item instead of the main property
|
|
||||||
if len(tag) > 2 && tag[0:2] == "[]" {
|
|
||||||
pType := "array"
|
|
||||||
prop.Type = &pType
|
|
||||||
prop.Items = new(Item)
|
|
||||||
|
|
||||||
iType := tag[2:]
|
|
||||||
prop.Items.Type = &iType
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
prop.Type = &tag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setMinimum(field reflect.StructField) {
|
|
||||||
if tag := field.Tag.Get("minimum"); tag != "" {
|
|
||||||
prop.Minimum = tag
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setUniqueItems(field reflect.StructField) {
|
|
||||||
tag := field.Tag.Get("unique")
|
|
||||||
switch tag {
|
|
||||||
case "true":
|
|
||||||
v := true
|
|
||||||
prop.UniqueItems = &v
|
|
||||||
case "false":
|
|
||||||
v := false
|
|
||||||
prop.UniqueItems = &v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (prop *ModelProperty) setPropertyMetadata(field reflect.StructField) {
|
|
||||||
prop.setDescription(field)
|
|
||||||
prop.setEnumValues(field)
|
|
||||||
prop.setMinimum(field)
|
|
||||||
prop.setMaximum(field)
|
|
||||||
prop.setUniqueItems(field)
|
|
||||||
prop.setDefaultValue(field)
|
|
||||||
prop.setType(field)
|
|
||||||
}
|
|
70
vendor/github.com/emicklei/go-restful-swagger12/model_property_ext_test.go
generated
vendored
70
vendor/github.com/emicklei/go-restful-swagger12/model_property_ext_test.go
generated
vendored
|
@ -1,70 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"net"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
// clear && go test -v -test.run TestThatExtraTagsAreReadIntoModel ...swagger
|
|
||||||
func TestThatExtraTagsAreReadIntoModel(t *testing.T) {
|
|
||||||
type fakeint int
|
|
||||||
type fakearray string
|
|
||||||
type Anything struct {
|
|
||||||
Name string `description:"name" modelDescription:"a test"`
|
|
||||||
Size int `minimum:"0" maximum:"10"`
|
|
||||||
Stati string `enum:"off|on" default:"on" modelDescription:"more description"`
|
|
||||||
ID string `unique:"true"`
|
|
||||||
FakeInt fakeint `type:"integer"`
|
|
||||||
FakeArray fakearray `type:"[]string"`
|
|
||||||
IP net.IP `type:"string"`
|
|
||||||
Password string
|
|
||||||
}
|
|
||||||
m := modelsFromStruct(Anything{})
|
|
||||||
props, _ := m.At("swagger.Anything")
|
|
||||||
p1, _ := props.Properties.At("Name")
|
|
||||||
if got, want := p1.Description, "name"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p2, _ := props.Properties.At("Size")
|
|
||||||
if got, want := p2.Minimum, "0"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
if got, want := p2.Maximum, "10"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p3, _ := props.Properties.At("Stati")
|
|
||||||
if got, want := p3.Enum[0], "off"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
if got, want := p3.Enum[1], "on"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p4, _ := props.Properties.At("ID")
|
|
||||||
if got, want := *p4.UniqueItems, true; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p5, _ := props.Properties.At("Password")
|
|
||||||
if got, want := *p5.Type, "string"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p6, _ := props.Properties.At("FakeInt")
|
|
||||||
if got, want := *p6.Type, "integer"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p7, _ := props.Properties.At("FakeArray")
|
|
||||||
if got, want := *p7.Type, "array"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p7p, _ := props.Properties.At("FakeArray")
|
|
||||||
if got, want := *p7p.Items.Type, "string"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
p8, _ := props.Properties.At("IP")
|
|
||||||
if got, want := *p8.Type, "string"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
|
|
||||||
if got, want := props.Description, "a test\nmore description"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
87
vendor/github.com/emicklei/go-restful-swagger12/model_property_list.go
generated
vendored
87
vendor/github.com/emicklei/go-restful-swagger12/model_property_list.go
generated
vendored
|
@ -1,87 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
// Copyright 2015 Ernest Micklei. All rights reserved.
|
|
||||||
// Use of this source code is governed by a license
|
|
||||||
// that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
)
|
|
||||||
|
|
||||||
// NamedModelProperty associates a name to a ModelProperty
|
|
||||||
type NamedModelProperty struct {
|
|
||||||
Name string
|
|
||||||
Property ModelProperty
|
|
||||||
}
|
|
||||||
|
|
||||||
// ModelPropertyList encapsulates a list of NamedModelProperty (association)
|
|
||||||
type ModelPropertyList struct {
|
|
||||||
List []NamedModelProperty
|
|
||||||
}
|
|
||||||
|
|
||||||
// At returns the ModelPropety by its name unless absent, then ok is false
|
|
||||||
func (l *ModelPropertyList) At(name string) (p ModelProperty, ok bool) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
if each.Name == name {
|
|
||||||
return each.Property, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return p, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Put adds or replaces a ModelProperty with this name
|
|
||||||
func (l *ModelPropertyList) Put(name string, prop ModelProperty) {
|
|
||||||
// maybe replace existing
|
|
||||||
for i, each := range l.List {
|
|
||||||
if each.Name == name {
|
|
||||||
// replace
|
|
||||||
l.List[i] = NamedModelProperty{Name: name, Property: prop}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// add
|
|
||||||
l.List = append(l.List, NamedModelProperty{Name: name, Property: prop})
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do enumerates all the properties, each with its assigned name
|
|
||||||
func (l *ModelPropertyList) Do(block func(name string, value ModelProperty)) {
|
|
||||||
for _, each := range l.List {
|
|
||||||
block(each.Name, each.Property)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MarshalJSON writes the ModelPropertyList as if it was a map[string]ModelProperty
|
|
||||||
func (l ModelPropertyList) MarshalJSON() ([]byte, error) {
|
|
||||||
var buf bytes.Buffer
|
|
||||||
encoder := json.NewEncoder(&buf)
|
|
||||||
buf.WriteString("{\n")
|
|
||||||
for i, each := range l.List {
|
|
||||||
buf.WriteString("\"")
|
|
||||||
buf.WriteString(each.Name)
|
|
||||||
buf.WriteString("\": ")
|
|
||||||
encoder.Encode(each.Property)
|
|
||||||
if i < len(l.List)-1 {
|
|
||||||
buf.WriteString(",\n")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf.WriteString("}")
|
|
||||||
return buf.Bytes(), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// UnmarshalJSON reads back a ModelPropertyList. This is an expensive operation.
|
|
||||||
func (l *ModelPropertyList) UnmarshalJSON(data []byte) error {
|
|
||||||
raw := map[string]interface{}{}
|
|
||||||
json.NewDecoder(bytes.NewReader(data)).Decode(&raw)
|
|
||||||
for k, v := range raw {
|
|
||||||
// produces JSON bytes for each value
|
|
||||||
data, err := json.Marshal(v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
var m ModelProperty
|
|
||||||
json.NewDecoder(bytes.NewReader(data)).Decode(&m)
|
|
||||||
l.Put(k, m)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
47
vendor/github.com/emicklei/go-restful-swagger12/model_property_list_test.go
generated
vendored
47
vendor/github.com/emicklei/go-restful-swagger12/model_property_list_test.go
generated
vendored
|
@ -1,47 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestModelPropertyList(t *testing.T) {
|
|
||||||
l := ModelPropertyList{}
|
|
||||||
p := ModelProperty{Description: "d"}
|
|
||||||
l.Put("p", p)
|
|
||||||
q, ok := l.At("p")
|
|
||||||
if !ok {
|
|
||||||
t.Error("expected p")
|
|
||||||
}
|
|
||||||
if got, want := q.Description, "d"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestModelPropertyList_Marshal(t *testing.T) {
|
|
||||||
l := ModelPropertyList{}
|
|
||||||
p := ModelProperty{Description: "d"}
|
|
||||||
l.Put("p", p)
|
|
||||||
data, err := json.Marshal(l)
|
|
||||||
if err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
if got, want := string(data), `{"p":{"description":"d"}}`; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestModelPropertyList_Unmarshal(t *testing.T) {
|
|
||||||
data := `{"p":{"description":"d"}}`
|
|
||||||
l := ModelPropertyList{}
|
|
||||||
if err := json.Unmarshal([]byte(data), &l); err != nil {
|
|
||||||
t.Error(err)
|
|
||||||
}
|
|
||||||
m, ok := l.At("p")
|
|
||||||
if !ok {
|
|
||||||
t.Error("expected p")
|
|
||||||
}
|
|
||||||
if got, want := m.Description, "d"; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
36
vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map.go
generated
vendored
36
vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map.go
generated
vendored
|
@ -1,36 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
// Copyright 2015 Ernest Micklei. All rights reserved.
|
|
||||||
// Use of this source code is governed by a license
|
|
||||||
// that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
import "github.com/emicklei/go-restful"
|
|
||||||
|
|
||||||
type orderedRouteMap struct {
|
|
||||||
elements map[string][]restful.Route
|
|
||||||
keys []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func newOrderedRouteMap() *orderedRouteMap {
|
|
||||||
return &orderedRouteMap{
|
|
||||||
elements: map[string][]restful.Route{},
|
|
||||||
keys: []string{},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *orderedRouteMap) Add(key string, route restful.Route) {
|
|
||||||
routes, ok := o.elements[key]
|
|
||||||
if ok {
|
|
||||||
routes = append(routes, route)
|
|
||||||
o.elements[key] = routes
|
|
||||||
return
|
|
||||||
}
|
|
||||||
o.elements[key] = []restful.Route{route}
|
|
||||||
o.keys = append(o.keys, key)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (o *orderedRouteMap) Do(block func(key string, routes []restful.Route)) {
|
|
||||||
for _, k := range o.keys {
|
|
||||||
block(k, o.elements[k])
|
|
||||||
}
|
|
||||||
}
|
|
29
vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map_test.go
generated
vendored
29
vendor/github.com/emicklei/go-restful-swagger12/ordered_route_map_test.go
generated
vendored
|
@ -1,29 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/emicklei/go-restful"
|
|
||||||
)
|
|
||||||
|
|
||||||
// go test -v -test.run TestOrderedRouteMap ...swagger
|
|
||||||
func TestOrderedRouteMap(t *testing.T) {
|
|
||||||
m := newOrderedRouteMap()
|
|
||||||
r1 := restful.Route{Path: "/r1"}
|
|
||||||
r2 := restful.Route{Path: "/r2"}
|
|
||||||
m.Add("a", r1)
|
|
||||||
m.Add("b", r2)
|
|
||||||
m.Add("b", r1)
|
|
||||||
m.Add("d", r2)
|
|
||||||
m.Add("c", r2)
|
|
||||||
order := ""
|
|
||||||
m.Do(func(k string, routes []restful.Route) {
|
|
||||||
order += k
|
|
||||||
if len(routes) == 0 {
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
if order != "abdc" {
|
|
||||||
t.Fail()
|
|
||||||
}
|
|
||||||
}
|
|
42
vendor/github.com/emicklei/go-restful-swagger12/postbuild_model_test.go
generated
vendored
42
vendor/github.com/emicklei/go-restful-swagger12/postbuild_model_test.go
generated
vendored
|
@ -1,42 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
type Boat struct {
|
|
||||||
Length int `json:"-"` // on default, this makes the fields not required
|
|
||||||
Weight int `json:"-"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// PostBuildModel is from swagger.ModelBuildable
|
|
||||||
func (b Boat) PostBuildModel(m *Model) *Model {
|
|
||||||
// override required
|
|
||||||
m.Required = []string{"Length", "Weight"}
|
|
||||||
|
|
||||||
// add model property (just to test is can be added; is this a real usecase?)
|
|
||||||
extraType := "string"
|
|
||||||
m.Properties.Put("extra", ModelProperty{
|
|
||||||
Description: "extra description",
|
|
||||||
DataTypeFields: DataTypeFields{
|
|
||||||
Type: &extraType,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCustomPostModelBuilde(t *testing.T) {
|
|
||||||
testJsonFromStruct(t, Boat{}, `{
|
|
||||||
"swagger.Boat": {
|
|
||||||
"id": "swagger.Boat",
|
|
||||||
"required": [
|
|
||||||
"Length",
|
|
||||||
"Weight"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"extra": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "extra description"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`)
|
|
||||||
}
|
|
185
vendor/github.com/emicklei/go-restful-swagger12/swagger.go
generated
vendored
185
vendor/github.com/emicklei/go-restful-swagger12/swagger.go
generated
vendored
|
@ -1,185 +0,0 @@
|
||||||
// Package swagger implements the structures of the Swagger
|
|
||||||
// https://github.com/wordnik/swagger-spec/blob/master/versions/1.2.md
|
|
||||||
package swagger
|
|
||||||
|
|
||||||
const swaggerVersion = "1.2"
|
|
||||||
|
|
||||||
// 4.3.3 Data Type Fields
|
|
||||||
type DataTypeFields struct {
|
|
||||||
Type *string `json:"type,omitempty"` // if Ref not used
|
|
||||||
Ref *string `json:"$ref,omitempty"` // if Type not used
|
|
||||||
Format string `json:"format,omitempty"`
|
|
||||||
DefaultValue Special `json:"defaultValue,omitempty"`
|
|
||||||
Enum []string `json:"enum,omitempty"`
|
|
||||||
Minimum string `json:"minimum,omitempty"`
|
|
||||||
Maximum string `json:"maximum,omitempty"`
|
|
||||||
Items *Item `json:"items,omitempty"`
|
|
||||||
UniqueItems *bool `json:"uniqueItems,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Special string
|
|
||||||
|
|
||||||
// 4.3.4 Items Object
|
|
||||||
type Item struct {
|
|
||||||
Type *string `json:"type,omitempty"`
|
|
||||||
Ref *string `json:"$ref,omitempty"`
|
|
||||||
Format string `json:"format,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1 Resource Listing
|
|
||||||
type ResourceListing struct {
|
|
||||||
SwaggerVersion string `json:"swaggerVersion"` // e.g 1.2
|
|
||||||
Apis []Resource `json:"apis"`
|
|
||||||
ApiVersion string `json:"apiVersion"`
|
|
||||||
Info Info `json:"info"`
|
|
||||||
Authorizations []Authorization `json:"authorizations,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.2 Resource Object
|
|
||||||
type Resource struct {
|
|
||||||
Path string `json:"path"` // relative or absolute, must start with /
|
|
||||||
Description string `json:"description"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.3 Info Object
|
|
||||||
type Info struct {
|
|
||||||
Title string `json:"title"`
|
|
||||||
Description string `json:"description"`
|
|
||||||
TermsOfServiceUrl string `json:"termsOfServiceUrl,omitempty"`
|
|
||||||
Contact string `json:"contact,omitempty"`
|
|
||||||
License string `json:"license,omitempty"`
|
|
||||||
LicenseUrl string `json:"licenseUrl,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.5
|
|
||||||
type Authorization struct {
|
|
||||||
Type string `json:"type"`
|
|
||||||
PassAs string `json:"passAs"`
|
|
||||||
Keyname string `json:"keyname"`
|
|
||||||
Scopes []Scope `json:"scopes"`
|
|
||||||
GrantTypes []GrantType `json:"grandTypes"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.6, 5.2.11
|
|
||||||
type Scope struct {
|
|
||||||
// Required. The name of the scope.
|
|
||||||
Scope string `json:"scope"`
|
|
||||||
// Recommended. A short description of the scope.
|
|
||||||
Description string `json:"description"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.7
|
|
||||||
type GrantType struct {
|
|
||||||
Implicit Implicit `json:"implicit"`
|
|
||||||
AuthorizationCode AuthorizationCode `json:"authorization_code"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.8 Implicit Object
|
|
||||||
type Implicit struct {
|
|
||||||
// Required. The login endpoint definition.
|
|
||||||
loginEndpoint LoginEndpoint `json:"loginEndpoint"`
|
|
||||||
// An optional alternative name to standard "access_token" OAuth2 parameter.
|
|
||||||
TokenName string `json:"tokenName"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.9 Authorization Code Object
|
|
||||||
type AuthorizationCode struct {
|
|
||||||
TokenRequestEndpoint TokenRequestEndpoint `json:"tokenRequestEndpoint"`
|
|
||||||
TokenEndpoint TokenEndpoint `json:"tokenEndpoint"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.10 Login Endpoint Object
|
|
||||||
type LoginEndpoint struct {
|
|
||||||
// Required. The URL of the authorization endpoint for the implicit grant flow. The value SHOULD be in a URL format.
|
|
||||||
Url string `json:"url"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.11 Token Request Endpoint Object
|
|
||||||
type TokenRequestEndpoint struct {
|
|
||||||
// Required. The URL of the authorization endpoint for the authentication code grant flow. The value SHOULD be in a URL format.
|
|
||||||
Url string `json:"url"`
|
|
||||||
// An optional alternative name to standard "client_id" OAuth2 parameter.
|
|
||||||
ClientIdName string `json:"clientIdName"`
|
|
||||||
// An optional alternative name to the standard "client_secret" OAuth2 parameter.
|
|
||||||
ClientSecretName string `json:"clientSecretName"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.1.12 Token Endpoint Object
|
|
||||||
type TokenEndpoint struct {
|
|
||||||
// Required. The URL of the token endpoint for the authentication code grant flow. The value SHOULD be in a URL format.
|
|
||||||
Url string `json:"url"`
|
|
||||||
// An optional alternative name to standard "access_token" OAuth2 parameter.
|
|
||||||
TokenName string `json:"tokenName"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2 API Declaration
|
|
||||||
type ApiDeclaration struct {
|
|
||||||
SwaggerVersion string `json:"swaggerVersion"`
|
|
||||||
ApiVersion string `json:"apiVersion"`
|
|
||||||
BasePath string `json:"basePath"`
|
|
||||||
ResourcePath string `json:"resourcePath"` // must start with /
|
|
||||||
Info Info `json:"info"`
|
|
||||||
Apis []Api `json:"apis,omitempty"`
|
|
||||||
Models ModelList `json:"models,omitempty"`
|
|
||||||
Produces []string `json:"produces,omitempty"`
|
|
||||||
Consumes []string `json:"consumes,omitempty"`
|
|
||||||
Authorizations []Authorization `json:"authorizations,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.2 API Object
|
|
||||||
type Api struct {
|
|
||||||
Path string `json:"path"` // relative or absolute, must start with /
|
|
||||||
Description string `json:"description"`
|
|
||||||
Operations []Operation `json:"operations,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.3 Operation Object
|
|
||||||
type Operation struct {
|
|
||||||
DataTypeFields
|
|
||||||
Method string `json:"method"`
|
|
||||||
Summary string `json:"summary,omitempty"`
|
|
||||||
Notes string `json:"notes,omitempty"`
|
|
||||||
Nickname string `json:"nickname"`
|
|
||||||
Authorizations []Authorization `json:"authorizations,omitempty"`
|
|
||||||
Parameters []Parameter `json:"parameters"`
|
|
||||||
ResponseMessages []ResponseMessage `json:"responseMessages,omitempty"` // optional
|
|
||||||
Produces []string `json:"produces,omitempty"`
|
|
||||||
Consumes []string `json:"consumes,omitempty"`
|
|
||||||
Deprecated string `json:"deprecated,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.4 Parameter Object
|
|
||||||
type Parameter struct {
|
|
||||||
DataTypeFields
|
|
||||||
ParamType string `json:"paramType"` // path,query,body,header,form
|
|
||||||
Name string `json:"name"`
|
|
||||||
Description string `json:"description"`
|
|
||||||
Required bool `json:"required"`
|
|
||||||
AllowMultiple bool `json:"allowMultiple"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.5 Response Message Object
|
|
||||||
type ResponseMessage struct {
|
|
||||||
Code int `json:"code"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
ResponseModel string `json:"responseModel,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.6, 5.2.7 Models Object
|
|
||||||
type Model struct {
|
|
||||||
Id string `json:"id"`
|
|
||||||
Description string `json:"description,omitempty"`
|
|
||||||
Required []string `json:"required,omitempty"`
|
|
||||||
Properties ModelPropertyList `json:"properties"`
|
|
||||||
SubTypes []string `json:"subTypes,omitempty"`
|
|
||||||
Discriminator string `json:"discriminator,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.8 Properties Object
|
|
||||||
type ModelProperty struct {
|
|
||||||
DataTypeFields
|
|
||||||
Description string `json:"description,omitempty"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// 5.2.10
|
|
||||||
type Authorizations map[string]Authorization
|
|
21
vendor/github.com/emicklei/go-restful-swagger12/swagger_builder.go
generated
vendored
21
vendor/github.com/emicklei/go-restful-swagger12/swagger_builder.go
generated
vendored
|
@ -1,21 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
type SwaggerBuilder struct {
|
|
||||||
SwaggerService
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewSwaggerBuilder(config Config) *SwaggerBuilder {
|
|
||||||
return &SwaggerBuilder{*newSwaggerService(config)}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb SwaggerBuilder) ProduceListing() ResourceListing {
|
|
||||||
return sb.SwaggerService.produceListing()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb SwaggerBuilder) ProduceAllDeclarations() map[string]ApiDeclaration {
|
|
||||||
return sb.SwaggerService.produceAllDeclarations()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb SwaggerBuilder) ProduceDeclarations(route string) (*ApiDeclaration, bool) {
|
|
||||||
return sb.SwaggerService.produceDeclarations(route)
|
|
||||||
}
|
|
318
vendor/github.com/emicklei/go-restful-swagger12/swagger_test.go
generated
vendored
318
vendor/github.com/emicklei/go-restful-swagger12/swagger_test.go
generated
vendored
|
@ -1,318 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/emicklei/go-restful"
|
|
||||||
"github.com/emicklei/go-restful-swagger12/test_package"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInfoStruct_Issue231(t *testing.T) {
|
|
||||||
config := Config{
|
|
||||||
Info: Info{
|
|
||||||
Title: "Title",
|
|
||||||
Description: "Description",
|
|
||||||
TermsOfServiceUrl: "http://example.com",
|
|
||||||
Contact: "example@example.com",
|
|
||||||
License: "License",
|
|
||||||
LicenseUrl: "http://example.com/license.txt",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
sws := newSwaggerService(config)
|
|
||||||
str, err := json.MarshalIndent(sws.produceListing(), "", " ")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
compareJson(t, string(str), `
|
|
||||||
{
|
|
||||||
"apiVersion": "",
|
|
||||||
"swaggerVersion": "1.2",
|
|
||||||
"apis": null,
|
|
||||||
"info": {
|
|
||||||
"title": "Title",
|
|
||||||
"description": "Description",
|
|
||||||
"termsOfServiceUrl": "http://example.com",
|
|
||||||
"contact": "example@example.com",
|
|
||||||
"license": "License",
|
|
||||||
"licenseUrl": "http://example.com/license.txt"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// go test -v -test.run TestThatMultiplePathsOnRootAreHandled ...swagger
|
|
||||||
func TestThatMultiplePathsOnRootAreHandled(t *testing.T) {
|
|
||||||
ws1 := new(restful.WebService)
|
|
||||||
ws1.Route(ws1.GET("/_ping").To(dummy))
|
|
||||||
ws1.Route(ws1.GET("/version").To(dummy))
|
|
||||||
|
|
||||||
cfg := Config{
|
|
||||||
WebServicesUrl: "http://here.com",
|
|
||||||
ApiPath: "/apipath",
|
|
||||||
WebServices: []*restful.WebService{ws1},
|
|
||||||
}
|
|
||||||
sws := newSwaggerService(cfg)
|
|
||||||
decl := sws.composeDeclaration(ws1, "/")
|
|
||||||
if got, want := len(decl.Apis), 2; got != want {
|
|
||||||
t.Errorf("got %v want %v", got, want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestWriteSamples(t *testing.T) {
|
|
||||||
ws1 := new(restful.WebService)
|
|
||||||
ws1.Route(ws1.GET("/object").To(dummy).Writes(test_package.TestStruct{}))
|
|
||||||
ws1.Route(ws1.GET("/array").To(dummy).Writes([]test_package.TestStruct{}))
|
|
||||||
ws1.Route(ws1.GET("/object_and_array").To(dummy).Writes(struct{ Abc test_package.TestStruct }{}))
|
|
||||||
|
|
||||||
cfg := Config{
|
|
||||||
WebServicesUrl: "http://here.com",
|
|
||||||
ApiPath: "/apipath",
|
|
||||||
WebServices: []*restful.WebService{ws1},
|
|
||||||
}
|
|
||||||
sws := newSwaggerService(cfg)
|
|
||||||
|
|
||||||
decl := sws.composeDeclaration(ws1, "/")
|
|
||||||
|
|
||||||
str, err := json.MarshalIndent(decl.Apis, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
compareJson(t, string(str), `
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"path": "/object",
|
|
||||||
"description": "",
|
|
||||||
"operations": [
|
|
||||||
{
|
|
||||||
"type": "test_package.TestStruct",
|
|
||||||
"method": "GET",
|
|
||||||
"nickname": "dummy",
|
|
||||||
"parameters": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "/array",
|
|
||||||
"description": "",
|
|
||||||
"operations": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "test_package.TestStruct"
|
|
||||||
},
|
|
||||||
"method": "GET",
|
|
||||||
"nickname": "dummy",
|
|
||||||
"parameters": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"path": "/object_and_array",
|
|
||||||
"description": "",
|
|
||||||
"operations": [
|
|
||||||
{
|
|
||||||
"type": "struct { Abc test_package.TestStruct }",
|
|
||||||
"method": "GET",
|
|
||||||
"nickname": "dummy",
|
|
||||||
"parameters": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]`)
|
|
||||||
|
|
||||||
str, err = json.MarshalIndent(decl.Models, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
compareJson(t, string(str), `
|
|
||||||
{
|
|
||||||
"test_package.TestStruct": {
|
|
||||||
"id": "test_package.TestStruct",
|
|
||||||
"required": [
|
|
||||||
"TestField"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"TestField": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"||test_package.TestStruct": {
|
|
||||||
"id": "||test_package.TestStruct",
|
|
||||||
"properties": {}
|
|
||||||
},
|
|
||||||
"struct { Abc test_package.TestStruct }": {
|
|
||||||
"id": "struct { Abc test_package.TestStruct }",
|
|
||||||
"required": [
|
|
||||||
"Abc"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"Abc": {
|
|
||||||
"$ref": "test_package.TestStruct"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRoutesWithCommonPart(t *testing.T) {
|
|
||||||
ws1 := new(restful.WebService)
|
|
||||||
ws1.Path("/")
|
|
||||||
ws1.Route(ws1.GET("/foobar").To(dummy).Writes(test_package.TestStruct{}))
|
|
||||||
ws1.Route(ws1.HEAD("/foobar").To(dummy).Writes(test_package.TestStruct{}))
|
|
||||||
ws1.Route(ws1.GET("/foo").To(dummy).Writes([]test_package.TestStruct{}))
|
|
||||||
ws1.Route(ws1.HEAD("/foo").To(dummy).Writes(test_package.TestStruct{}))
|
|
||||||
|
|
||||||
cfg := Config{
|
|
||||||
WebServicesUrl: "http://here.com",
|
|
||||||
ApiPath: "/apipath",
|
|
||||||
WebServices: []*restful.WebService{ws1},
|
|
||||||
}
|
|
||||||
sws := newSwaggerService(cfg)
|
|
||||||
|
|
||||||
decl := sws.composeDeclaration(ws1, "/foo")
|
|
||||||
|
|
||||||
str, err := json.MarshalIndent(decl.Apis, "", " ")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
compareJson(t, string(str), `[
|
|
||||||
{
|
|
||||||
"path": "/foo",
|
|
||||||
"description": "",
|
|
||||||
"operations": [
|
|
||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"$ref": "test_package.TestStruct"
|
|
||||||
},
|
|
||||||
"method": "GET",
|
|
||||||
"nickname": "dummy",
|
|
||||||
"parameters": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "test_package.TestStruct",
|
|
||||||
"method": "HEAD",
|
|
||||||
"nickname": "dummy",
|
|
||||||
"parameters": []
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// go test -v -test.run TestServiceToApi ...swagger
|
|
||||||
func TestServiceToApi(t *testing.T) {
|
|
||||||
ws := new(restful.WebService)
|
|
||||||
ws.Path("/tests")
|
|
||||||
ws.Consumes(restful.MIME_JSON)
|
|
||||||
ws.Produces(restful.MIME_XML)
|
|
||||||
ws.Route(ws.GET("/a").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.PUT("/b").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.POST("/c").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.DELETE("/d").To(dummy).Writes(sample{}))
|
|
||||||
|
|
||||||
ws.Route(ws.GET("/d").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.PUT("/c").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.POST("/b").To(dummy).Writes(sample{}))
|
|
||||||
ws.Route(ws.DELETE("/a").To(dummy).Writes(sample{}))
|
|
||||||
ws.ApiVersion("1.2.3")
|
|
||||||
cfg := Config{
|
|
||||||
WebServicesUrl: "http://here.com",
|
|
||||||
ApiPath: "/apipath",
|
|
||||||
WebServices: []*restful.WebService{ws},
|
|
||||||
PostBuildHandler: func(in *ApiDeclarationList) {},
|
|
||||||
}
|
|
||||||
sws := newSwaggerService(cfg)
|
|
||||||
decl := sws.composeDeclaration(ws, "/tests")
|
|
||||||
// checks
|
|
||||||
if decl.ApiVersion != "1.2.3" {
|
|
||||||
t.Errorf("got %v want %v", decl.ApiVersion, "1.2.3")
|
|
||||||
}
|
|
||||||
if decl.BasePath != "http://here.com" {
|
|
||||||
t.Errorf("got %v want %v", decl.BasePath, "http://here.com")
|
|
||||||
}
|
|
||||||
if len(decl.Apis) != 4 {
|
|
||||||
t.Errorf("got %v want %v", len(decl.Apis), 4)
|
|
||||||
}
|
|
||||||
pathOrder := ""
|
|
||||||
for _, each := range decl.Apis {
|
|
||||||
pathOrder += each.Path
|
|
||||||
for _, other := range each.Operations {
|
|
||||||
pathOrder += other.Method
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if pathOrder != "/tests/aGETDELETE/tests/bPUTPOST/tests/cPOSTPUT/tests/dDELETEGET" {
|
|
||||||
t.Errorf("got %v want %v", pathOrder, "see test source")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func dummy(i *restful.Request, o *restful.Response) {}
|
|
||||||
|
|
||||||
// go test -v -test.run TestIssue78 ...swagger
|
|
||||||
type Response struct {
|
|
||||||
Code int
|
|
||||||
Users *[]User
|
|
||||||
Items *[]TestItem
|
|
||||||
}
|
|
||||||
type User struct {
|
|
||||||
Id, Name string
|
|
||||||
}
|
|
||||||
type TestItem struct {
|
|
||||||
Id, Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
// clear && go test -v -test.run TestComposeResponseMessages ...swagger
|
|
||||||
func TestComposeResponseMessages(t *testing.T) {
|
|
||||||
responseErrors := map[int]restful.ResponseError{}
|
|
||||||
responseErrors[400] = restful.ResponseError{Code: 400, Message: "Bad Request", Model: TestItem{}}
|
|
||||||
route := restful.Route{ResponseErrors: responseErrors}
|
|
||||||
decl := new(ApiDeclaration)
|
|
||||||
decl.Models = ModelList{}
|
|
||||||
msgs := composeResponseMessages(route, decl, &Config{})
|
|
||||||
if msgs[0].ResponseModel != "swagger.TestItem" {
|
|
||||||
t.Errorf("got %s want swagger.TestItem", msgs[0].ResponseModel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestIssue78(t *testing.T) {
|
|
||||||
sws := newSwaggerService(Config{})
|
|
||||||
models := new(ModelList)
|
|
||||||
sws.addModelFromSampleTo(&Operation{}, true, Response{Items: &[]TestItem{}}, models)
|
|
||||||
model, ok := models.At("swagger.Response")
|
|
||||||
if !ok {
|
|
||||||
t.Fatal("missing response model")
|
|
||||||
}
|
|
||||||
if "swagger.Response" != model.Id {
|
|
||||||
t.Fatal("wrong model id:" + model.Id)
|
|
||||||
}
|
|
||||||
code, ok := model.Properties.At("Code")
|
|
||||||
if !ok {
|
|
||||||
t.Fatal("missing code")
|
|
||||||
}
|
|
||||||
if "integer" != *code.Type {
|
|
||||||
t.Fatal("wrong code type:" + *code.Type)
|
|
||||||
}
|
|
||||||
items, ok := model.Properties.At("Items")
|
|
||||||
if !ok {
|
|
||||||
t.Fatal("missing items")
|
|
||||||
}
|
|
||||||
if "array" != *items.Type {
|
|
||||||
t.Fatal("wrong items type:" + *items.Type)
|
|
||||||
}
|
|
||||||
items_items := items.Items
|
|
||||||
if items_items == nil {
|
|
||||||
t.Fatal("missing items->items")
|
|
||||||
}
|
|
||||||
ref := items_items.Ref
|
|
||||||
if ref == nil {
|
|
||||||
t.Fatal("missing $ref")
|
|
||||||
}
|
|
||||||
if *ref != "swagger.TestItem" {
|
|
||||||
t.Fatal("wrong $ref:" + *ref)
|
|
||||||
}
|
|
||||||
}
|
|
443
vendor/github.com/emicklei/go-restful-swagger12/swagger_webservice.go
generated
vendored
443
vendor/github.com/emicklei/go-restful-swagger12/swagger_webservice.go
generated
vendored
|
@ -1,443 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/emicklei/go-restful"
|
|
||||||
// "github.com/emicklei/hopwatch"
|
|
||||||
"net/http"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/emicklei/go-restful/log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type SwaggerService struct {
|
|
||||||
config Config
|
|
||||||
apiDeclarationMap *ApiDeclarationList
|
|
||||||
}
|
|
||||||
|
|
||||||
func newSwaggerService(config Config) *SwaggerService {
|
|
||||||
sws := &SwaggerService{
|
|
||||||
config: config,
|
|
||||||
apiDeclarationMap: new(ApiDeclarationList)}
|
|
||||||
|
|
||||||
// Build all ApiDeclarations
|
|
||||||
for _, each := range config.WebServices {
|
|
||||||
rootPath := each.RootPath()
|
|
||||||
// skip the api service itself
|
|
||||||
if rootPath != config.ApiPath {
|
|
||||||
if rootPath == "" || rootPath == "/" {
|
|
||||||
// use routes
|
|
||||||
for _, route := range each.Routes() {
|
|
||||||
entry := staticPathFromRoute(route)
|
|
||||||
_, exists := sws.apiDeclarationMap.At(entry)
|
|
||||||
if !exists {
|
|
||||||
sws.apiDeclarationMap.Put(entry, sws.composeDeclaration(each, entry))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else { // use root path
|
|
||||||
sws.apiDeclarationMap.Put(each.RootPath(), sws.composeDeclaration(each, each.RootPath()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// if specified then call the PostBuilderHandler
|
|
||||||
if config.PostBuildHandler != nil {
|
|
||||||
config.PostBuildHandler(sws.apiDeclarationMap)
|
|
||||||
}
|
|
||||||
return sws
|
|
||||||
}
|
|
||||||
|
|
||||||
// LogInfo is the function that is called when this package needs to log. It defaults to log.Printf
|
|
||||||
var LogInfo = func(format string, v ...interface{}) {
|
|
||||||
// use the restful package-wide logger
|
|
||||||
log.Printf(format, v...)
|
|
||||||
}
|
|
||||||
|
|
||||||
// InstallSwaggerService add the WebService that provides the API documentation of all services
|
|
||||||
// conform the Swagger documentation specifcation. (https://github.com/wordnik/swagger-core/wiki).
|
|
||||||
func InstallSwaggerService(aSwaggerConfig Config) {
|
|
||||||
RegisterSwaggerService(aSwaggerConfig, restful.DefaultContainer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RegisterSwaggerService add the WebService that provides the API documentation of all services
|
|
||||||
// conform the Swagger documentation specifcation. (https://github.com/wordnik/swagger-core/wiki).
|
|
||||||
func RegisterSwaggerService(config Config, wsContainer *restful.Container) {
|
|
||||||
sws := newSwaggerService(config)
|
|
||||||
ws := new(restful.WebService)
|
|
||||||
ws.Path(config.ApiPath)
|
|
||||||
ws.Produces(restful.MIME_JSON)
|
|
||||||
if config.DisableCORS {
|
|
||||||
ws.Filter(enableCORS)
|
|
||||||
}
|
|
||||||
ws.Route(ws.GET("/").To(sws.getListing))
|
|
||||||
ws.Route(ws.GET("/{a}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}/{c}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}/{c}/{d}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}/{f}").To(sws.getDeclarations))
|
|
||||||
ws.Route(ws.GET("/{a}/{b}/{c}/{d}/{e}/{f}/{g}").To(sws.getDeclarations))
|
|
||||||
LogInfo("[restful/swagger] listing is available at %v%v", config.WebServicesUrl, config.ApiPath)
|
|
||||||
wsContainer.Add(ws)
|
|
||||||
|
|
||||||
// Check paths for UI serving
|
|
||||||
if config.StaticHandler == nil && config.SwaggerFilePath != "" && config.SwaggerPath != "" {
|
|
||||||
swaggerPathSlash := config.SwaggerPath
|
|
||||||
// path must end with slash /
|
|
||||||
if "/" != config.SwaggerPath[len(config.SwaggerPath)-1:] {
|
|
||||||
LogInfo("[restful/swagger] use corrected SwaggerPath ; must end with slash (/)")
|
|
||||||
swaggerPathSlash += "/"
|
|
||||||
}
|
|
||||||
|
|
||||||
LogInfo("[restful/swagger] %v%v is mapped to folder %v", config.WebServicesUrl, swaggerPathSlash, config.SwaggerFilePath)
|
|
||||||
wsContainer.Handle(swaggerPathSlash, http.StripPrefix(swaggerPathSlash, http.FileServer(http.Dir(config.SwaggerFilePath))))
|
|
||||||
|
|
||||||
//if we define a custom static handler use it
|
|
||||||
} else if config.StaticHandler != nil && config.SwaggerPath != "" {
|
|
||||||
swaggerPathSlash := config.SwaggerPath
|
|
||||||
// path must end with slash /
|
|
||||||
if "/" != config.SwaggerPath[len(config.SwaggerPath)-1:] {
|
|
||||||
LogInfo("[restful/swagger] use corrected SwaggerFilePath ; must end with slash (/)")
|
|
||||||
swaggerPathSlash += "/"
|
|
||||||
|
|
||||||
}
|
|
||||||
LogInfo("[restful/swagger] %v%v is mapped to custom Handler %T", config.WebServicesUrl, swaggerPathSlash, config.StaticHandler)
|
|
||||||
wsContainer.Handle(swaggerPathSlash, config.StaticHandler)
|
|
||||||
|
|
||||||
} else {
|
|
||||||
LogInfo("[restful/swagger] Swagger(File)Path is empty ; no UI is served")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func staticPathFromRoute(r restful.Route) string {
|
|
||||||
static := r.Path
|
|
||||||
bracket := strings.Index(static, "{")
|
|
||||||
if bracket <= 1 { // result cannot be empty
|
|
||||||
return static
|
|
||||||
}
|
|
||||||
if bracket != -1 {
|
|
||||||
static = r.Path[:bracket]
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(static, "/") {
|
|
||||||
return static[:len(static)-1]
|
|
||||||
} else {
|
|
||||||
return static
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func enableCORS(req *restful.Request, resp *restful.Response, chain *restful.FilterChain) {
|
|
||||||
if origin := req.HeaderParameter(restful.HEADER_Origin); origin != "" {
|
|
||||||
// prevent duplicate header
|
|
||||||
if len(resp.Header().Get(restful.HEADER_AccessControlAllowOrigin)) == 0 {
|
|
||||||
resp.AddHeader(restful.HEADER_AccessControlAllowOrigin, origin)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
chain.ProcessFilter(req, resp)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sws SwaggerService) getListing(req *restful.Request, resp *restful.Response) {
|
|
||||||
listing := sws.produceListing()
|
|
||||||
resp.WriteAsJson(listing)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sws SwaggerService) produceListing() ResourceListing {
|
|
||||||
listing := ResourceListing{SwaggerVersion: swaggerVersion, ApiVersion: sws.config.ApiVersion, Info: sws.config.Info}
|
|
||||||
sws.apiDeclarationMap.Do(func(k string, v ApiDeclaration) {
|
|
||||||
ref := Resource{Path: k}
|
|
||||||
if len(v.Apis) > 0 { // use description of first (could still be empty)
|
|
||||||
ref.Description = v.Apis[0].Description
|
|
||||||
}
|
|
||||||
listing.Apis = append(listing.Apis, ref)
|
|
||||||
})
|
|
||||||
return listing
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sws SwaggerService) getDeclarations(req *restful.Request, resp *restful.Response) {
|
|
||||||
decl, ok := sws.produceDeclarations(composeRootPath(req))
|
|
||||||
if !ok {
|
|
||||||
resp.WriteErrorString(http.StatusNotFound, "ApiDeclaration not found")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
// unless WebServicesUrl is given
|
|
||||||
if len(sws.config.WebServicesUrl) == 0 {
|
|
||||||
// update base path from the actual request
|
|
||||||
// TODO how to detect https? assume http for now
|
|
||||||
var host string
|
|
||||||
// X-Forwarded-Host or Host or Request.Host
|
|
||||||
hostvalues, ok := req.Request.Header["X-Forwarded-Host"] // apache specific?
|
|
||||||
if !ok || len(hostvalues) == 0 {
|
|
||||||
forwarded, ok := req.Request.Header["Host"] // without reverse-proxy
|
|
||||||
if !ok || len(forwarded) == 0 {
|
|
||||||
// fallback to Host field
|
|
||||||
host = req.Request.Host
|
|
||||||
} else {
|
|
||||||
host = forwarded[0]
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
host = hostvalues[0]
|
|
||||||
}
|
|
||||||
// inspect Referer for the scheme (http vs https)
|
|
||||||
scheme := "http"
|
|
||||||
if referer := req.Request.Header["Referer"]; len(referer) > 0 {
|
|
||||||
if strings.HasPrefix(referer[0], "https") {
|
|
||||||
scheme = "https"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
decl.BasePath = fmt.Sprintf("%s://%s", scheme, host)
|
|
||||||
}
|
|
||||||
resp.WriteAsJson(decl)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sws SwaggerService) produceAllDeclarations() map[string]ApiDeclaration {
|
|
||||||
decls := map[string]ApiDeclaration{}
|
|
||||||
sws.apiDeclarationMap.Do(func(k string, v ApiDeclaration) {
|
|
||||||
decls[k] = v
|
|
||||||
})
|
|
||||||
return decls
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sws SwaggerService) produceDeclarations(route string) (*ApiDeclaration, bool) {
|
|
||||||
decl, ok := sws.apiDeclarationMap.At(route)
|
|
||||||
if !ok {
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
decl.BasePath = sws.config.WebServicesUrl
|
|
||||||
return &decl, true
|
|
||||||
}
|
|
||||||
|
|
||||||
// composeDeclaration uses all routes and parameters to create a ApiDeclaration
|
|
||||||
func (sws SwaggerService) composeDeclaration(ws *restful.WebService, pathPrefix string) ApiDeclaration {
|
|
||||||
decl := ApiDeclaration{
|
|
||||||
SwaggerVersion: swaggerVersion,
|
|
||||||
BasePath: sws.config.WebServicesUrl,
|
|
||||||
ResourcePath: pathPrefix,
|
|
||||||
Models: ModelList{},
|
|
||||||
ApiVersion: ws.Version()}
|
|
||||||
|
|
||||||
// collect any path parameters
|
|
||||||
rootParams := []Parameter{}
|
|
||||||
for _, param := range ws.PathParameters() {
|
|
||||||
rootParams = append(rootParams, asSwaggerParameter(param.Data()))
|
|
||||||
}
|
|
||||||
// aggregate by path
|
|
||||||
pathToRoutes := newOrderedRouteMap()
|
|
||||||
for _, other := range ws.Routes() {
|
|
||||||
if strings.HasPrefix(other.Path, pathPrefix) {
|
|
||||||
if len(pathPrefix) > 1 && len(other.Path) > len(pathPrefix) && other.Path[len(pathPrefix)] != '/' {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
pathToRoutes.Add(other.Path, other)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pathToRoutes.Do(func(path string, routes []restful.Route) {
|
|
||||||
api := Api{Path: strings.TrimSuffix(withoutWildcard(path), "/"), Description: ws.Documentation()}
|
|
||||||
voidString := "void"
|
|
||||||
for _, route := range routes {
|
|
||||||
operation := Operation{
|
|
||||||
Method: route.Method,
|
|
||||||
Summary: route.Doc,
|
|
||||||
Notes: route.Notes,
|
|
||||||
// Type gets overwritten if there is a write sample
|
|
||||||
DataTypeFields: DataTypeFields{Type: &voidString},
|
|
||||||
Parameters: []Parameter{},
|
|
||||||
Nickname: route.Operation,
|
|
||||||
ResponseMessages: composeResponseMessages(route, &decl, &sws.config)}
|
|
||||||
|
|
||||||
operation.Consumes = route.Consumes
|
|
||||||
operation.Produces = route.Produces
|
|
||||||
|
|
||||||
// share root params if any
|
|
||||||
for _, swparam := range rootParams {
|
|
||||||
operation.Parameters = append(operation.Parameters, swparam)
|
|
||||||
}
|
|
||||||
// route specific params
|
|
||||||
for _, param := range route.ParameterDocs {
|
|
||||||
operation.Parameters = append(operation.Parameters, asSwaggerParameter(param.Data()))
|
|
||||||
}
|
|
||||||
|
|
||||||
sws.addModelsFromRouteTo(&operation, route, &decl)
|
|
||||||
api.Operations = append(api.Operations, operation)
|
|
||||||
}
|
|
||||||
decl.Apis = append(decl.Apis, api)
|
|
||||||
})
|
|
||||||
return decl
|
|
||||||
}
|
|
||||||
|
|
||||||
func withoutWildcard(path string) string {
|
|
||||||
if strings.HasSuffix(path, ":*}") {
|
|
||||||
return path[0:len(path)-3] + "}"
|
|
||||||
}
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
|
|
||||||
// composeResponseMessages takes the ResponseErrors (if any) and creates ResponseMessages from them.
|
|
||||||
func composeResponseMessages(route restful.Route, decl *ApiDeclaration, config *Config) (messages []ResponseMessage) {
|
|
||||||
if route.ResponseErrors == nil {
|
|
||||||
return messages
|
|
||||||
}
|
|
||||||
// sort by code
|
|
||||||
codes := sort.IntSlice{}
|
|
||||||
for code := range route.ResponseErrors {
|
|
||||||
codes = append(codes, code)
|
|
||||||
}
|
|
||||||
codes.Sort()
|
|
||||||
for _, code := range codes {
|
|
||||||
each := route.ResponseErrors[code]
|
|
||||||
message := ResponseMessage{
|
|
||||||
Code: code,
|
|
||||||
Message: each.Message,
|
|
||||||
}
|
|
||||||
if each.Model != nil {
|
|
||||||
st := reflect.TypeOf(each.Model)
|
|
||||||
isCollection, st := detectCollectionType(st)
|
|
||||||
// collection cannot be in responsemodel
|
|
||||||
if !isCollection {
|
|
||||||
modelName := modelBuilder{}.keyFrom(st)
|
|
||||||
modelBuilder{Models: &decl.Models, Config: config}.addModel(st, "")
|
|
||||||
message.ResponseModel = modelName
|
|
||||||
}
|
|
||||||
}
|
|
||||||
messages = append(messages, message)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// addModelsFromRoute takes any read or write sample from the Route and creates a Swagger model from it.
|
|
||||||
func (sws SwaggerService) addModelsFromRouteTo(operation *Operation, route restful.Route, decl *ApiDeclaration) {
|
|
||||||
if route.ReadSample != nil {
|
|
||||||
sws.addModelFromSampleTo(operation, false, route.ReadSample, &decl.Models)
|
|
||||||
}
|
|
||||||
if route.WriteSample != nil {
|
|
||||||
sws.addModelFromSampleTo(operation, true, route.WriteSample, &decl.Models)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func detectCollectionType(st reflect.Type) (bool, reflect.Type) {
|
|
||||||
isCollection := false
|
|
||||||
if st.Kind() == reflect.Slice || st.Kind() == reflect.Array {
|
|
||||||
st = st.Elem()
|
|
||||||
isCollection = true
|
|
||||||
} else {
|
|
||||||
if st.Kind() == reflect.Ptr {
|
|
||||||
if st.Elem().Kind() == reflect.Slice || st.Elem().Kind() == reflect.Array {
|
|
||||||
st = st.Elem().Elem()
|
|
||||||
isCollection = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return isCollection, st
|
|
||||||
}
|
|
||||||
|
|
||||||
// addModelFromSample creates and adds (or overwrites) a Model from a sample resource
|
|
||||||
func (sws SwaggerService) addModelFromSampleTo(operation *Operation, isResponse bool, sample interface{}, models *ModelList) {
|
|
||||||
mb := modelBuilder{Models: models, Config: &sws.config}
|
|
||||||
if isResponse {
|
|
||||||
sampleType, items := asDataType(sample, &sws.config)
|
|
||||||
operation.Type = sampleType
|
|
||||||
operation.Items = items
|
|
||||||
}
|
|
||||||
mb.addModelFrom(sample)
|
|
||||||
}
|
|
||||||
|
|
||||||
func asSwaggerParameter(param restful.ParameterData) Parameter {
|
|
||||||
return Parameter{
|
|
||||||
DataTypeFields: DataTypeFields{
|
|
||||||
Type: ¶m.DataType,
|
|
||||||
Format: asFormat(param.DataType, param.DataFormat),
|
|
||||||
DefaultValue: Special(param.DefaultValue),
|
|
||||||
},
|
|
||||||
Name: param.Name,
|
|
||||||
Description: param.Description,
|
|
||||||
ParamType: asParamType(param.Kind),
|
|
||||||
|
|
||||||
Required: param.Required}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Between 1..7 path parameters is supported
|
|
||||||
func composeRootPath(req *restful.Request) string {
|
|
||||||
path := "/" + req.PathParameter("a")
|
|
||||||
b := req.PathParameter("b")
|
|
||||||
if b == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
path = path + "/" + b
|
|
||||||
c := req.PathParameter("c")
|
|
||||||
if c == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
path = path + "/" + c
|
|
||||||
d := req.PathParameter("d")
|
|
||||||
if d == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
path = path + "/" + d
|
|
||||||
e := req.PathParameter("e")
|
|
||||||
if e == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
path = path + "/" + e
|
|
||||||
f := req.PathParameter("f")
|
|
||||||
if f == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
path = path + "/" + f
|
|
||||||
g := req.PathParameter("g")
|
|
||||||
if g == "" {
|
|
||||||
return path
|
|
||||||
}
|
|
||||||
return path + "/" + g
|
|
||||||
}
|
|
||||||
|
|
||||||
func asFormat(dataType string, dataFormat string) string {
|
|
||||||
if dataFormat != "" {
|
|
||||||
return dataFormat
|
|
||||||
}
|
|
||||||
return "" // TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
func asParamType(kind int) string {
|
|
||||||
switch {
|
|
||||||
case kind == restful.PathParameterKind:
|
|
||||||
return "path"
|
|
||||||
case kind == restful.QueryParameterKind:
|
|
||||||
return "query"
|
|
||||||
case kind == restful.BodyParameterKind:
|
|
||||||
return "body"
|
|
||||||
case kind == restful.HeaderParameterKind:
|
|
||||||
return "header"
|
|
||||||
case kind == restful.FormParameterKind:
|
|
||||||
return "form"
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func asDataType(any interface{}, config *Config) (*string, *Item) {
|
|
||||||
// If it's not a collection, return the suggested model name
|
|
||||||
st := reflect.TypeOf(any)
|
|
||||||
isCollection, st := detectCollectionType(st)
|
|
||||||
modelName := modelBuilder{}.keyFrom(st)
|
|
||||||
// if it's not a collection we are done
|
|
||||||
if !isCollection {
|
|
||||||
return &modelName, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// XXX: This is not very elegant
|
|
||||||
// We create an Item object referring to the given model
|
|
||||||
models := ModelList{}
|
|
||||||
mb := modelBuilder{Models: &models, Config: config}
|
|
||||||
mb.addModelFrom(any)
|
|
||||||
|
|
||||||
elemTypeName := mb.getElementTypeName(modelName, "", st)
|
|
||||||
item := new(Item)
|
|
||||||
if mb.isPrimitiveType(elemTypeName) {
|
|
||||||
mapped := mb.jsonSchemaType(elemTypeName)
|
|
||||||
item.Type = &mapped
|
|
||||||
} else {
|
|
||||||
item.Ref = &elemTypeName
|
|
||||||
}
|
|
||||||
tmp := "array"
|
|
||||||
return &tmp, item
|
|
||||||
}
|
|
86
vendor/github.com/emicklei/go-restful-swagger12/utils_test.go
generated
vendored
86
vendor/github.com/emicklei/go-restful-swagger12/utils_test.go
generated
vendored
|
@ -1,86 +0,0 @@
|
||||||
package swagger
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func testJsonFromStructWithConfig(t *testing.T, sample interface{}, expectedJson string, config *Config) bool {
|
|
||||||
m := modelsFromStructWithConfig(sample, config)
|
|
||||||
data, _ := json.MarshalIndent(m, " ", " ")
|
|
||||||
return compareJson(t, string(data), expectedJson)
|
|
||||||
}
|
|
||||||
|
|
||||||
func modelsFromStructWithConfig(sample interface{}, config *Config) *ModelList {
|
|
||||||
models := new(ModelList)
|
|
||||||
builder := modelBuilder{Models: models, Config: config}
|
|
||||||
builder.addModelFrom(sample)
|
|
||||||
return models
|
|
||||||
}
|
|
||||||
|
|
||||||
func testJsonFromStruct(t *testing.T, sample interface{}, expectedJson string) bool {
|
|
||||||
return testJsonFromStructWithConfig(t, sample, expectedJson, &Config{})
|
|
||||||
}
|
|
||||||
|
|
||||||
func modelsFromStruct(sample interface{}) *ModelList {
|
|
||||||
return modelsFromStructWithConfig(sample, &Config{})
|
|
||||||
}
|
|
||||||
|
|
||||||
func compareJson(t *testing.T, actualJsonAsString string, expectedJsonAsString string) bool {
|
|
||||||
success := false
|
|
||||||
var actualMap map[string]interface{}
|
|
||||||
json.Unmarshal([]byte(actualJsonAsString), &actualMap)
|
|
||||||
var expectedMap map[string]interface{}
|
|
||||||
err := json.Unmarshal([]byte(expectedJsonAsString), &expectedMap)
|
|
||||||
if err != nil {
|
|
||||||
var actualArray []interface{}
|
|
||||||
json.Unmarshal([]byte(actualJsonAsString), &actualArray)
|
|
||||||
var expectedArray []interface{}
|
|
||||||
err := json.Unmarshal([]byte(expectedJsonAsString), &expectedArray)
|
|
||||||
success = reflect.DeepEqual(actualArray, expectedArray)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Unparsable expected JSON: %s, actual: %v, expected: %v", err, actualJsonAsString, expectedJsonAsString)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
success = reflect.DeepEqual(actualMap, expectedMap)
|
|
||||||
}
|
|
||||||
if !success {
|
|
||||||
t.Log("---- expected -----")
|
|
||||||
t.Log(withLineNumbers(expectedJsonAsString))
|
|
||||||
t.Log("---- actual -----")
|
|
||||||
t.Log(withLineNumbers(actualJsonAsString))
|
|
||||||
t.Log("---- raw -----")
|
|
||||||
t.Log(actualJsonAsString)
|
|
||||||
t.Error("there are differences")
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func indexOfNonMatchingLine(actual, expected string) int {
|
|
||||||
a := strings.Split(actual, "\n")
|
|
||||||
e := strings.Split(expected, "\n")
|
|
||||||
size := len(a)
|
|
||||||
if len(e) < len(a) {
|
|
||||||
size = len(e)
|
|
||||||
}
|
|
||||||
for i := 0; i < size; i++ {
|
|
||||||
if a[i] != e[i] {
|
|
||||||
return i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return -1
|
|
||||||
}
|
|
||||||
|
|
||||||
func withLineNumbers(content string) string {
|
|
||||||
var buffer bytes.Buffer
|
|
||||||
lines := strings.Split(content, "\n")
|
|
||||||
for i, each := range lines {
|
|
||||||
buffer.WriteString(fmt.Sprintf("%d:%s\n", i, each))
|
|
||||||
}
|
|
||||||
return buffer.String()
|
|
||||||
}
|
|
3
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
3
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
|
@ -1,3 +0,0 @@
|
||||||
secrets.yml
|
|
||||||
coverage.out
|
|
||||||
.idea
|
|
18
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
18
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
|
@ -1,18 +0,0 @@
|
||||||
language: go
|
|
||||||
go:
|
|
||||||
- 1.7
|
|
||||||
install:
|
|
||||||
- go get -u github.com/stretchr/testify/assert
|
|
||||||
- go get -u gopkg.in/yaml.v2
|
|
||||||
- go get -u github.com/go-openapi/swag
|
|
||||||
- go get -u github.com/go-openapi/jsonpointer
|
|
||||||
- go get -u github.com/go-openapi/spec
|
|
||||||
- go get -u github.com/go-openapi/strfmt
|
|
||||||
- go get -u github.com/go-openapi/loads/fmts
|
|
||||||
script:
|
|
||||||
- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./...
|
|
||||||
after_success:
|
|
||||||
- bash <(curl -s https://codecov.io/bash)
|
|
||||||
notifications:
|
|
||||||
slack:
|
|
||||||
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
|
|
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
|
@ -1,74 +0,0 @@
|
||||||
# Contributor Covenant Code of Conduct
|
|
||||||
|
|
||||||
## Our Pledge
|
|
||||||
|
|
||||||
In the interest of fostering an open and welcoming environment, we as
|
|
||||||
contributors and maintainers pledge to making participation in our project and
|
|
||||||
our community a harassment-free experience for everyone, regardless of age, body
|
|
||||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
|
||||||
nationality, personal appearance, race, religion, or sexual identity and
|
|
||||||
orientation.
|
|
||||||
|
|
||||||
## Our Standards
|
|
||||||
|
|
||||||
Examples of behavior that contributes to creating a positive environment
|
|
||||||
include:
|
|
||||||
|
|
||||||
* Using welcoming and inclusive language
|
|
||||||
* Being respectful of differing viewpoints and experiences
|
|
||||||
* Gracefully accepting constructive criticism
|
|
||||||
* Focusing on what is best for the community
|
|
||||||
* Showing empathy towards other community members
|
|
||||||
|
|
||||||
Examples of unacceptable behavior by participants include:
|
|
||||||
|
|
||||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
|
||||||
advances
|
|
||||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
|
||||||
* Public or private harassment
|
|
||||||
* Publishing others' private information, such as a physical or electronic
|
|
||||||
address, without explicit permission
|
|
||||||
* Other conduct which could reasonably be considered inappropriate in a
|
|
||||||
professional setting
|
|
||||||
|
|
||||||
## Our Responsibilities
|
|
||||||
|
|
||||||
Project maintainers are responsible for clarifying the standards of acceptable
|
|
||||||
behavior and are expected to take appropriate and fair corrective action in
|
|
||||||
response to any instances of unacceptable behavior.
|
|
||||||
|
|
||||||
Project maintainers have the right and responsibility to remove, edit, or
|
|
||||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
|
||||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
|
||||||
permanently any contributor for other behaviors that they deem inappropriate,
|
|
||||||
threatening, offensive, or harmful.
|
|
||||||
|
|
||||||
## Scope
|
|
||||||
|
|
||||||
This Code of Conduct applies both within project spaces and in public spaces
|
|
||||||
when an individual is representing the project or its community. Examples of
|
|
||||||
representing a project or community include using an official project e-mail
|
|
||||||
address, posting via an official social media account, or acting as an appointed
|
|
||||||
representative at an online or offline event. Representation of a project may be
|
|
||||||
further defined and clarified by project maintainers.
|
|
||||||
|
|
||||||
## Enforcement
|
|
||||||
|
|
||||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
|
||||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
|
||||||
complaints will be reviewed and investigated and will result in a response that
|
|
||||||
is deemed necessary and appropriate to the circumstances. The project team is
|
|
||||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
|
||||||
Further details of specific enforcement policies may be posted separately.
|
|
||||||
|
|
||||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
|
||||||
faith may face temporary or permanent repercussions as determined by other
|
|
||||||
members of the project's leadership.
|
|
||||||
|
|
||||||
## Attribution
|
|
||||||
|
|
||||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
|
||||||
available at [http://contributor-covenant.org/version/1/4][version]
|
|
||||||
|
|
||||||
[homepage]: http://contributor-covenant.org
|
|
||||||
[version]: http://contributor-covenant.org/version/1/4/
|
|
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
|
@ -1,6 +0,0 @@
|
||||||
# OpenAPI initiative analysis [](https://travis-ci.org/go-openapi/analysis) [](https://codecov.io/gh/go-openapi/analysis) [](https://slackin.goswagger.io)
|
|
||||||
|
|
||||||
[](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) [](http://godoc.org/github.com/go-openapi/analysis)
|
|
||||||
|
|
||||||
|
|
||||||
A foundational library to analyze an OAI specification document for easier reasoning about the content.
|
|
785
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
785
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
|
@ -1,785 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
slashpath "path"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/go-openapi/jsonpointer"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
type referenceAnalysis struct {
|
|
||||||
schemas map[string]spec.Ref
|
|
||||||
responses map[string]spec.Ref
|
|
||||||
parameters map[string]spec.Ref
|
|
||||||
items map[string]spec.Ref
|
|
||||||
headerItems map[string]spec.Ref
|
|
||||||
parameterItems map[string]spec.Ref
|
|
||||||
allRefs map[string]spec.Ref
|
|
||||||
pathItems map[string]spec.Ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
|
|
||||||
r.allRefs["#"+key] = ref
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
|
|
||||||
r.items["#"+key] = items.Ref
|
|
||||||
r.addRef(key, items.Ref)
|
|
||||||
if location == "header" {
|
|
||||||
r.headerItems["#"+key] = items.Ref
|
|
||||||
} else {
|
|
||||||
r.parameterItems["#"+key] = items.Ref
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
|
|
||||||
r.schemas["#"+key] = ref.Schema.Ref
|
|
||||||
r.addRef(key, ref.Schema.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
|
|
||||||
r.responses["#"+key] = resp.Ref
|
|
||||||
r.addRef(key, resp.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
|
|
||||||
r.parameters["#"+key] = param.Ref
|
|
||||||
r.addRef(key, param.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
|
|
||||||
r.pathItems["#"+key] = pathItem.Ref
|
|
||||||
r.addRef(key, pathItem.Ref)
|
|
||||||
}
|
|
||||||
|
|
||||||
type patternAnalysis struct {
|
|
||||||
parameters map[string]string
|
|
||||||
headers map[string]string
|
|
||||||
items map[string]string
|
|
||||||
schemas map[string]string
|
|
||||||
allPatterns map[string]string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addPattern(key, pattern string) {
|
|
||||||
p.allPatterns["#"+key] = pattern
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addParameterPattern(key, pattern string) {
|
|
||||||
p.parameters["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
|
|
||||||
p.headers["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addItemsPattern(key, pattern string) {
|
|
||||||
p.items["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
|
|
||||||
p.schemas["#"+key] = pattern
|
|
||||||
p.addPattern(key, pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
// New takes a swagger spec object and returns an analyzed spec document.
|
|
||||||
// The analyzed document contains a number of indices that make it easier to
|
|
||||||
// reason about semantics of a swagger specification for use in code generation
|
|
||||||
// or validation etc.
|
|
||||||
func New(doc *spec.Swagger) *Spec {
|
|
||||||
a := &Spec{
|
|
||||||
spec: doc,
|
|
||||||
consumes: make(map[string]struct{}, 150),
|
|
||||||
produces: make(map[string]struct{}, 150),
|
|
||||||
authSchemes: make(map[string]struct{}, 150),
|
|
||||||
operations: make(map[string]map[string]*spec.Operation, 150),
|
|
||||||
allSchemas: make(map[string]SchemaRef, 150),
|
|
||||||
allOfs: make(map[string]SchemaRef, 150),
|
|
||||||
references: referenceAnalysis{
|
|
||||||
schemas: make(map[string]spec.Ref, 150),
|
|
||||||
pathItems: make(map[string]spec.Ref, 150),
|
|
||||||
responses: make(map[string]spec.Ref, 150),
|
|
||||||
parameters: make(map[string]spec.Ref, 150),
|
|
||||||
items: make(map[string]spec.Ref, 150),
|
|
||||||
headerItems: make(map[string]spec.Ref, 150),
|
|
||||||
parameterItems: make(map[string]spec.Ref, 150),
|
|
||||||
allRefs: make(map[string]spec.Ref, 150),
|
|
||||||
},
|
|
||||||
patterns: patternAnalysis{
|
|
||||||
parameters: make(map[string]string, 150),
|
|
||||||
headers: make(map[string]string, 150),
|
|
||||||
items: make(map[string]string, 150),
|
|
||||||
schemas: make(map[string]string, 150),
|
|
||||||
allPatterns: make(map[string]string, 150),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
a.initialize()
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
// Spec takes a swagger spec object and turns it into a registry
|
|
||||||
// with a bunch of utility methods to act on the information in the spec
|
|
||||||
type Spec struct {
|
|
||||||
spec *spec.Swagger
|
|
||||||
consumes map[string]struct{}
|
|
||||||
produces map[string]struct{}
|
|
||||||
authSchemes map[string]struct{}
|
|
||||||
operations map[string]map[string]*spec.Operation
|
|
||||||
references referenceAnalysis
|
|
||||||
patterns patternAnalysis
|
|
||||||
allSchemas map[string]SchemaRef
|
|
||||||
allOfs map[string]SchemaRef
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) reset() {
|
|
||||||
s.consumes = make(map[string]struct{}, 150)
|
|
||||||
s.produces = make(map[string]struct{}, 150)
|
|
||||||
s.authSchemes = make(map[string]struct{}, 150)
|
|
||||||
s.operations = make(map[string]map[string]*spec.Operation, 150)
|
|
||||||
s.allSchemas = make(map[string]SchemaRef, 150)
|
|
||||||
s.allOfs = make(map[string]SchemaRef, 150)
|
|
||||||
s.references.schemas = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.pathItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.responses = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.parameters = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.items = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.headerItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.parameterItems = make(map[string]spec.Ref, 150)
|
|
||||||
s.references.allRefs = make(map[string]spec.Ref, 150)
|
|
||||||
s.patterns.parameters = make(map[string]string, 150)
|
|
||||||
s.patterns.headers = make(map[string]string, 150)
|
|
||||||
s.patterns.items = make(map[string]string, 150)
|
|
||||||
s.patterns.schemas = make(map[string]string, 150)
|
|
||||||
s.patterns.allPatterns = make(map[string]string, 150)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) reload() {
|
|
||||||
s.reset()
|
|
||||||
s.initialize()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) initialize() {
|
|
||||||
for _, c := range s.spec.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range s.spec.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range s.spec.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for path, pathItem := range s.AllPaths() {
|
|
||||||
s.analyzeOperations(path, &pathItem)
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, parameter := range s.spec.Parameters {
|
|
||||||
refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
|
|
||||||
if parameter.Items != nil {
|
|
||||||
s.analyzeItems("items", parameter.Items, refPref, "parameter")
|
|
||||||
}
|
|
||||||
if parameter.In == "body" && parameter.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *parameter.Schema, refPref)
|
|
||||||
}
|
|
||||||
if parameter.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, parameter.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, response := range s.spec.Responses {
|
|
||||||
refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
|
|
||||||
for k, v := range response.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
if v.Items != nil {
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
}
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if response.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *response.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for name, schema := range s.spec.Definitions {
|
|
||||||
s.analyzeSchema(name, schema, "/definitions")
|
|
||||||
}
|
|
||||||
// TODO: after analyzing all things and flattening schemas etc
|
|
||||||
// resolve all the collected references to their final representations
|
|
||||||
// best put in a separate method because this could get expensive
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
|
|
||||||
// TODO: resolve refs here?
|
|
||||||
op := pi
|
|
||||||
if pi.Ref.String() != "" {
|
|
||||||
key := slashpath.Join("/paths", jsonpointer.Escape(path))
|
|
||||||
s.references.addPathItemRef(key, pi)
|
|
||||||
}
|
|
||||||
s.analyzeOperation("GET", path, op.Get)
|
|
||||||
s.analyzeOperation("PUT", path, op.Put)
|
|
||||||
s.analyzeOperation("POST", path, op.Post)
|
|
||||||
s.analyzeOperation("PATCH", path, op.Patch)
|
|
||||||
s.analyzeOperation("DELETE", path, op.Delete)
|
|
||||||
s.analyzeOperation("HEAD", path, op.Head)
|
|
||||||
s.analyzeOperation("OPTIONS", path, op.Options)
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
if param.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
|
||||||
}
|
|
||||||
if param.Items != nil {
|
|
||||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
|
||||||
}
|
|
||||||
if param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
|
|
||||||
if items == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
refPref := slashpath.Join(prefix, name)
|
|
||||||
s.analyzeItems(name, items.Items, refPref, location)
|
|
||||||
if items.Ref.String() != "" {
|
|
||||||
s.references.addItemsRef(refPref, items, location)
|
|
||||||
}
|
|
||||||
if items.Pattern != "" {
|
|
||||||
s.patterns.addItemsPattern(refPref, items.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
|
|
||||||
if op == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, c := range op.Consumes {
|
|
||||||
s.consumes[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, c := range op.Produces {
|
|
||||||
s.produces[c] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range op.Security {
|
|
||||||
for k := range ss {
|
|
||||||
s.authSchemes[k] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if _, ok := s.operations[method]; !ok {
|
|
||||||
s.operations[method] = make(map[string]*spec.Operation)
|
|
||||||
}
|
|
||||||
s.operations[method][path] = op
|
|
||||||
prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
|
|
||||||
for i, param := range op.Parameters {
|
|
||||||
refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
|
|
||||||
if param.Ref.String() != "" {
|
|
||||||
s.references.addParamRef(refPref, ¶m)
|
|
||||||
}
|
|
||||||
if param.Pattern != "" {
|
|
||||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
|
||||||
}
|
|
||||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
|
||||||
if param.In == "body" && param.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if op.Responses != nil {
|
|
||||||
if op.Responses.Default != nil {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", "default")
|
|
||||||
if op.Responses.Default.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, op.Responses.Default)
|
|
||||||
}
|
|
||||||
for k, v := range op.Responses.Default.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if op.Responses.Default.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, res := range op.Responses.StatusCodeResponses {
|
|
||||||
refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
|
|
||||||
if res.Ref.String() != "" {
|
|
||||||
s.references.addResponseRef(refPref, &res)
|
|
||||||
}
|
|
||||||
for k, v := range res.Headers {
|
|
||||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
|
||||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
|
||||||
if v.Pattern != "" {
|
|
||||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if res.Schema != nil {
|
|
||||||
s.analyzeSchema("schema", *res.Schema, refPref)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
|
|
||||||
refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
|
|
||||||
schRef := SchemaRef{
|
|
||||||
Name: name,
|
|
||||||
Schema: &schema,
|
|
||||||
Ref: spec.MustCreateRef("#" + refURI),
|
|
||||||
TopLevel: prefix == "/definitions",
|
|
||||||
}
|
|
||||||
|
|
||||||
s.allSchemas["#"+refURI] = schRef
|
|
||||||
|
|
||||||
if schema.Ref.String() != "" {
|
|
||||||
s.references.addSchemaRef(refURI, schRef)
|
|
||||||
}
|
|
||||||
if schema.Pattern != "" {
|
|
||||||
s.patterns.addSchemaPattern(refURI, schema.Pattern)
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range schema.Definitions {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.Properties {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
|
|
||||||
}
|
|
||||||
for k, v := range schema.PatternProperties {
|
|
||||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.AllOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
|
|
||||||
}
|
|
||||||
if len(schema.AllOf) > 0 {
|
|
||||||
s.allOfs["#"+refURI] = schRef
|
|
||||||
}
|
|
||||||
for i, v := range schema.AnyOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
|
|
||||||
}
|
|
||||||
for i, v := range schema.OneOf {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
|
|
||||||
}
|
|
||||||
if schema.Not != nil {
|
|
||||||
s.analyzeSchema("not", *schema.Not, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
|
|
||||||
s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
|
|
||||||
s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
|
|
||||||
}
|
|
||||||
if schema.Items != nil {
|
|
||||||
if schema.Items.Schema != nil {
|
|
||||||
s.analyzeSchema("items", *schema.Items.Schema, refURI)
|
|
||||||
}
|
|
||||||
for i, sch := range schema.Items.Schemas {
|
|
||||||
s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirement is a representation of a security requirement for an operation
|
|
||||||
type SecurityRequirement struct {
|
|
||||||
Name string
|
|
||||||
Scopes []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityRequirementsFor gets the security requirements for the operation
|
|
||||||
func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) []SecurityRequirement {
|
|
||||||
if s.spec.Security == nil && operation.Security == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
schemes := s.spec.Security
|
|
||||||
if operation.Security != nil {
|
|
||||||
schemes = operation.Security
|
|
||||||
}
|
|
||||||
|
|
||||||
unique := make(map[string]SecurityRequirement)
|
|
||||||
for _, scheme := range schemes {
|
|
||||||
for k, v := range scheme {
|
|
||||||
if _, ok := unique[k]; !ok {
|
|
||||||
unique[k] = SecurityRequirement{Name: k, Scopes: v}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []SecurityRequirement
|
|
||||||
for _, v := range unique {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
|
|
||||||
func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
|
|
||||||
requirements := s.SecurityRequirementsFor(operation)
|
|
||||||
if len(requirements) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make(map[string]spec.SecurityScheme)
|
|
||||||
for _, v := range requirements {
|
|
||||||
if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
|
|
||||||
if definition != nil {
|
|
||||||
result[v.Name] = *definition
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConsumesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
|
|
||||||
|
|
||||||
if len(operation.Consumes) == 0 {
|
|
||||||
cons := make(map[string]struct{}, len(s.spec.Consumes))
|
|
||||||
for _, k := range s.spec.Consumes {
|
|
||||||
cons[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
cons := make(map[string]struct{}, len(operation.Consumes))
|
|
||||||
for _, c := range operation.Consumes {
|
|
||||||
cons[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(cons)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProducesFor gets the mediatypes for the operation
|
|
||||||
func (s *Spec) ProducesFor(operation *spec.Operation) []string {
|
|
||||||
if len(operation.Produces) == 0 {
|
|
||||||
prod := make(map[string]struct{}, len(s.spec.Produces))
|
|
||||||
for _, k := range s.spec.Produces {
|
|
||||||
prod[k] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
prod := make(map[string]struct{}, len(operation.Produces))
|
|
||||||
for _, c := range operation.Produces {
|
|
||||||
prod[c] = struct{}{}
|
|
||||||
}
|
|
||||||
return s.structMapKeys(prod)
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapKeyFromParam(param *spec.Parameter) string {
|
|
||||||
return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
|
|
||||||
}
|
|
||||||
|
|
||||||
func fieldNameFromParam(param *spec.Parameter) string {
|
|
||||||
if nm, ok := param.Extensions.GetString("go-name"); ok {
|
|
||||||
return nm
|
|
||||||
}
|
|
||||||
return swag.ToGoName(param.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter) {
|
|
||||||
for _, param := range parameters {
|
|
||||||
pr := param
|
|
||||||
if pr.Ref.String() != "" {
|
|
||||||
obj, _, err := pr.Ref.GetPointer().Get(s.spec)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
pr = obj.(spec.Parameter)
|
|
||||||
}
|
|
||||||
res[mapKeyFromParam(&pr)] = pr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParametersFor the specified operation id
|
|
||||||
func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
|
|
||||||
gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
|
|
||||||
bag := make(map[string]spec.Parameter)
|
|
||||||
s.paramsAsMap(pi.Parameters, bag)
|
|
||||||
s.paramsAsMap(op.Parameters, bag)
|
|
||||||
|
|
||||||
var res []spec.Parameter
|
|
||||||
for _, v := range bag {
|
|
||||||
res = append(res, v)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
for _, pi := range s.spec.Paths.Paths {
|
|
||||||
if pi.Get != nil && pi.Get.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Get)
|
|
||||||
}
|
|
||||||
if pi.Head != nil && pi.Head.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Head)
|
|
||||||
}
|
|
||||||
if pi.Options != nil && pi.Options.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Options)
|
|
||||||
}
|
|
||||||
if pi.Post != nil && pi.Post.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Post)
|
|
||||||
}
|
|
||||||
if pi.Patch != nil && pi.Patch.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Patch)
|
|
||||||
}
|
|
||||||
if pi.Put != nil && pi.Put.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Put)
|
|
||||||
}
|
|
||||||
if pi.Delete != nil && pi.Delete.ID == operationID {
|
|
||||||
return gatherParams(&pi, pi.Delete)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
|
|
||||||
// apply for the method and path.
|
|
||||||
func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
|
|
||||||
res := make(map[string]spec.Parameter)
|
|
||||||
if pi, ok := s.spec.Paths.Paths[path]; ok {
|
|
||||||
s.paramsAsMap(pi.Parameters, res)
|
|
||||||
s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res)
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationForName gets the operation for the given id
|
|
||||||
func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
|
|
||||||
for method, pathItem := range s.operations {
|
|
||||||
for path, op := range pathItem {
|
|
||||||
if operationID == op.ID {
|
|
||||||
return method, path, op, true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", "", nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationFor the given method and path
|
|
||||||
func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
|
|
||||||
if mp, ok := s.operations[strings.ToUpper(method)]; ok {
|
|
||||||
op, fn := mp[path]
|
|
||||||
return op, fn
|
|
||||||
}
|
|
||||||
return nil, false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Operations gathers all the operations specified in the spec document
|
|
||||||
func (s *Spec) Operations() map[string]map[string]*spec.Operation {
|
|
||||||
return s.operations
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
|
|
||||||
if len(mp) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
result := make([]string, 0, len(mp))
|
|
||||||
for k := range mp {
|
|
||||||
result = append(result, k)
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPaths returns all the paths in the swagger spec
|
|
||||||
func (s *Spec) AllPaths() map[string]spec.PathItem {
|
|
||||||
if s.spec == nil || s.spec.Paths == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return s.spec.Paths.Paths
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationIDs gets all the operation ids based on method an dpath
|
|
||||||
func (s *Spec) OperationIDs() []string {
|
|
||||||
if len(s.operations) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(s.operations))
|
|
||||||
for method, v := range s.operations {
|
|
||||||
for p, o := range v {
|
|
||||||
if o.ID != "" {
|
|
||||||
result = append(result, o.ID)
|
|
||||||
} else {
|
|
||||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// OperationMethodPaths gets all the operation ids based on method an dpath
|
|
||||||
func (s *Spec) OperationMethodPaths() []string {
|
|
||||||
if len(s.operations) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
result := make([]string, 0, len(s.operations))
|
|
||||||
for method, v := range s.operations {
|
|
||||||
for p := range v {
|
|
||||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredConsumes gets all the distinct consumes that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredConsumes() []string {
|
|
||||||
return s.structMapKeys(s.consumes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredProduces gets all the distinct produces that are specified in the specification document
|
|
||||||
func (s *Spec) RequiredProduces() []string {
|
|
||||||
return s.structMapKeys(s.produces)
|
|
||||||
}
|
|
||||||
|
|
||||||
// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
|
|
||||||
func (s *Spec) RequiredSecuritySchemes() []string {
|
|
||||||
return s.structMapKeys(s.authSchemes)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemaRef is a reference to a schema
|
|
||||||
type SchemaRef struct {
|
|
||||||
Name string
|
|
||||||
Ref spec.Ref
|
|
||||||
Schema *spec.Schema
|
|
||||||
TopLevel bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemasWithAllOf returns schema references to all schemas that are defined
|
|
||||||
// with an allOf key
|
|
||||||
func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allOfs {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitions returns schema references for all the definitions that were discovered
|
|
||||||
func (s *Spec) AllDefinitions() (result []SchemaRef) {
|
|
||||||
for _, v := range s.allSchemas {
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllDefinitionReferences returns json refs for all the discovered schemas
|
|
||||||
func (s *Spec) AllDefinitionReferences() (result []string) {
|
|
||||||
for _, v := range s.references.schemas {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllParameterReferences returns json refs for all the discovered parameters
|
|
||||||
func (s *Spec) AllParameterReferences() (result []string) {
|
|
||||||
for _, v := range s.references.parameters {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllResponseReferences returns json refs for all the discovered responses
|
|
||||||
func (s *Spec) AllResponseReferences() (result []string) {
|
|
||||||
for _, v := range s.references.responses {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPathItemReferences returns the references for all the items
|
|
||||||
func (s *Spec) AllPathItemReferences() (result []string) {
|
|
||||||
for _, v := range s.references.pathItems {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllItemsReferences returns the references for all the items
|
|
||||||
func (s *Spec) AllItemsReferences() (result []string) {
|
|
||||||
for _, v := range s.references.items {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllReferences returns all the references found in the document
|
|
||||||
func (s *Spec) AllReferences() (result []string) {
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
result = append(result, v.String())
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllRefs returns all the unique references found in the document
|
|
||||||
func (s *Spec) AllRefs() (result []spec.Ref) {
|
|
||||||
set := make(map[string]struct{})
|
|
||||||
for _, v := range s.references.allRefs {
|
|
||||||
a := v.String()
|
|
||||||
if a == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if _, ok := set[a]; !ok {
|
|
||||||
set[a] = struct{}{}
|
|
||||||
result = append(result, v)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
func cloneStringMap(source map[string]string) map[string]string {
|
|
||||||
res := make(map[string]string, len(source))
|
|
||||||
for k, v := range source {
|
|
||||||
res[k] = v
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParameterPatterns returns all the patterns found in parameters
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) ParameterPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.parameters)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HeaderPatterns returns all the patterns found in response headers
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) HeaderPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.headers)
|
|
||||||
}
|
|
||||||
|
|
||||||
// ItemsPatterns returns all the patterns found in simple array items
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) ItemsPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.items)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SchemaPatterns returns all the patterns found in schemas
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) SchemaPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.schemas)
|
|
||||||
}
|
|
||||||
|
|
||||||
// AllPatterns returns all the patterns found in the spec
|
|
||||||
// the map is cloned to avoid accidental changes
|
|
||||||
func (s *Spec) AllPatterns() map[string]string {
|
|
||||||
return cloneStringMap(s.patterns.allPatterns)
|
|
||||||
}
|
|
284
vendor/github.com/go-openapi/analysis/analyzer_test.go
generated
vendored
284
vendor/github.com/go-openapi/analysis/analyzer_test.go
generated
vendored
|
@ -1,284 +0,0 @@
|
||||||
// Copyright 2015 go-swagger maintainers
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/go-openapi/loads/fmts"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func schemeNames(schemes []SecurityRequirement) []string {
|
|
||||||
var names []string
|
|
||||||
for _, v := range schemes {
|
|
||||||
names = append(names, v.Name)
|
|
||||||
}
|
|
||||||
sort.Sort(sort.StringSlice(names))
|
|
||||||
return names
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAnalyzer(t *testing.T) {
|
|
||||||
formatParam := spec.QueryParam("format").Typed("string", "")
|
|
||||||
|
|
||||||
limitParam := spec.QueryParam("limit").Typed("integer", "int32")
|
|
||||||
limitParam.Extensions = spec.Extensions(map[string]interface{}{})
|
|
||||||
limitParam.Extensions.Add("go-name", "Limit")
|
|
||||||
|
|
||||||
skipParam := spec.QueryParam("skip").Typed("integer", "int32")
|
|
||||||
pi := spec.PathItem{}
|
|
||||||
pi.Parameters = []spec.Parameter{*limitParam}
|
|
||||||
|
|
||||||
op := &spec.Operation{}
|
|
||||||
op.Consumes = []string{"application/x-yaml"}
|
|
||||||
op.Produces = []string{"application/x-yaml"}
|
|
||||||
op.Security = []map[string][]string{
|
|
||||||
map[string][]string{"oauth2": []string{}},
|
|
||||||
map[string][]string{"basic": nil},
|
|
||||||
}
|
|
||||||
op.ID = "someOperation"
|
|
||||||
op.Parameters = []spec.Parameter{*skipParam}
|
|
||||||
pi.Get = op
|
|
||||||
|
|
||||||
pi2 := spec.PathItem{}
|
|
||||||
pi2.Parameters = []spec.Parameter{*limitParam}
|
|
||||||
op2 := &spec.Operation{}
|
|
||||||
op2.ID = "anotherOperation"
|
|
||||||
op2.Parameters = []spec.Parameter{*skipParam}
|
|
||||||
pi2.Get = op2
|
|
||||||
|
|
||||||
spec := &spec.Swagger{
|
|
||||||
SwaggerProps: spec.SwaggerProps{
|
|
||||||
Consumes: []string{"application/json"},
|
|
||||||
Produces: []string{"application/json"},
|
|
||||||
Security: []map[string][]string{
|
|
||||||
map[string][]string{"apikey": nil},
|
|
||||||
},
|
|
||||||
SecurityDefinitions: map[string]*spec.SecurityScheme{
|
|
||||||
"basic": spec.BasicAuth(),
|
|
||||||
"apiKey": spec.APIKeyAuth("api_key", "query"),
|
|
||||||
"oauth2": spec.OAuth2AccessToken("http://authorize.com", "http://token.com"),
|
|
||||||
},
|
|
||||||
Parameters: map[string]spec.Parameter{"format": *formatParam},
|
|
||||||
Paths: &spec.Paths{
|
|
||||||
Paths: map[string]spec.PathItem{
|
|
||||||
"/": pi,
|
|
||||||
"/items": pi2,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
analyzer := New(spec)
|
|
||||||
|
|
||||||
assert.Len(t, analyzer.consumes, 2)
|
|
||||||
assert.Len(t, analyzer.produces, 2)
|
|
||||||
assert.Len(t, analyzer.operations, 1)
|
|
||||||
assert.Equal(t, analyzer.operations["GET"]["/"], spec.Paths.Paths["/"].Get)
|
|
||||||
|
|
||||||
expected := []string{"application/x-yaml"}
|
|
||||||
sort.Sort(sort.StringSlice(expected))
|
|
||||||
consumes := analyzer.ConsumesFor(spec.Paths.Paths["/"].Get)
|
|
||||||
sort.Sort(sort.StringSlice(consumes))
|
|
||||||
assert.Equal(t, expected, consumes)
|
|
||||||
|
|
||||||
produces := analyzer.ProducesFor(spec.Paths.Paths["/"].Get)
|
|
||||||
sort.Sort(sort.StringSlice(produces))
|
|
||||||
assert.Equal(t, expected, produces)
|
|
||||||
|
|
||||||
expected = []string{"application/json"}
|
|
||||||
sort.Sort(sort.StringSlice(expected))
|
|
||||||
consumes = analyzer.ConsumesFor(spec.Paths.Paths["/items"].Get)
|
|
||||||
sort.Sort(sort.StringSlice(consumes))
|
|
||||||
assert.Equal(t, expected, consumes)
|
|
||||||
|
|
||||||
produces = analyzer.ProducesFor(spec.Paths.Paths["/items"].Get)
|
|
||||||
sort.Sort(sort.StringSlice(produces))
|
|
||||||
assert.Equal(t, expected, produces)
|
|
||||||
|
|
||||||
expectedSchemes := []SecurityRequirement{SecurityRequirement{"oauth2", []string{}}, SecurityRequirement{"basic", nil}}
|
|
||||||
schemes := analyzer.SecurityRequirementsFor(spec.Paths.Paths["/"].Get)
|
|
||||||
assert.Equal(t, schemeNames(expectedSchemes), schemeNames(schemes))
|
|
||||||
|
|
||||||
securityDefinitions := analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
|
|
||||||
assert.Equal(t, securityDefinitions["basic"], *spec.SecurityDefinitions["basic"])
|
|
||||||
assert.Equal(t, securityDefinitions["oauth2"], *spec.SecurityDefinitions["oauth2"])
|
|
||||||
|
|
||||||
parameters := analyzer.ParamsFor("GET", "/")
|
|
||||||
assert.Len(t, parameters, 2)
|
|
||||||
|
|
||||||
operations := analyzer.OperationIDs()
|
|
||||||
assert.Len(t, operations, 2)
|
|
||||||
|
|
||||||
producers := analyzer.RequiredProduces()
|
|
||||||
assert.Len(t, producers, 2)
|
|
||||||
consumers := analyzer.RequiredConsumes()
|
|
||||||
assert.Len(t, consumers, 2)
|
|
||||||
authSchemes := analyzer.RequiredSecuritySchemes()
|
|
||||||
assert.Len(t, authSchemes, 3)
|
|
||||||
|
|
||||||
ops := analyzer.Operations()
|
|
||||||
assert.Len(t, ops, 1)
|
|
||||||
assert.Len(t, ops["GET"], 2)
|
|
||||||
|
|
||||||
op, ok := analyzer.OperationFor("get", "/")
|
|
||||||
assert.True(t, ok)
|
|
||||||
assert.NotNil(t, op)
|
|
||||||
|
|
||||||
op, ok = analyzer.OperationFor("delete", "/")
|
|
||||||
assert.False(t, ok)
|
|
||||||
assert.Nil(t, op)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDefinitionAnalysis(t *testing.T) {
|
|
||||||
doc, err := loadSpec(filepath.Join("fixtures", "definitions.yml"))
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
analyzer := New(doc)
|
|
||||||
definitions := analyzer.allSchemas
|
|
||||||
// parameters
|
|
||||||
assertSchemaRefExists(t, definitions, "#/parameters/someParam/schema")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/parameters/1/schema")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/parameters/1/schema")
|
|
||||||
// responses
|
|
||||||
assertSchemaRefExists(t, definitions, "#/responses/someResponse/schema")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/default/schema")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
|
|
||||||
// definitions
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/id")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/value")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/id")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/value")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps/additionalProperties")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/0")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/1")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/additionalItems")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withNot")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withNot/not")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/0")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/1")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/0")
|
|
||||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/1")
|
|
||||||
allOfs := analyzer.allOfs
|
|
||||||
assert.Len(t, allOfs, 1)
|
|
||||||
assert.Contains(t, allOfs, "#/definitions/withAllOf")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadSpec(path string) (*spec.Swagger, error) {
|
|
||||||
spec.PathLoader = func(path string) (json.RawMessage, error) {
|
|
||||||
ext := filepath.Ext(path)
|
|
||||||
if ext == ".yml" || ext == ".yaml" {
|
|
||||||
return fmts.YAMLDoc(path)
|
|
||||||
}
|
|
||||||
data, err := swag.LoadFromFileOrHTTP(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return json.RawMessage(data), nil
|
|
||||||
}
|
|
||||||
data, err := fmts.YAMLDoc(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var sw spec.Swagger
|
|
||||||
if err := json.Unmarshal(data, &sw); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return &sw, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestReferenceAnalysis(t *testing.T) {
|
|
||||||
doc, err := loadSpec(filepath.Join("fixtures", "references.yml"))
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
definitions := New(doc).references
|
|
||||||
|
|
||||||
// parameters
|
|
||||||
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/parameters/0")
|
|
||||||
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0")
|
|
||||||
|
|
||||||
// path items
|
|
||||||
assertRefExists(t, definitions.pathItems, "#/paths/~1other~1place")
|
|
||||||
|
|
||||||
// responses
|
|
||||||
assertRefExists(t, definitions.responses, "#/paths/~1some~1where~1{id}/get/responses/404")
|
|
||||||
|
|
||||||
// definitions
|
|
||||||
assertRefExists(t, definitions.schemas, "#/responses/notFound/schema")
|
|
||||||
assertRefExists(t, definitions.schemas, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
|
|
||||||
assertRefExists(t, definitions.schemas, "#/definitions/tag/properties/audit")
|
|
||||||
|
|
||||||
// items
|
|
||||||
assertRefExists(t, definitions.allRefs, "#/paths/~1some~1where~1{id}/get/parameters/1/items")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func assertRefExists(t testing.TB, data map[string]spec.Ref, key string) bool {
|
|
||||||
if _, ok := data[key]; !ok {
|
|
||||||
return assert.Fail(t, fmt.Sprintf("expected %q to exist in the ref bag", key))
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func assertSchemaRefExists(t testing.TB, data map[string]SchemaRef, key string) bool {
|
|
||||||
if _, ok := data[key]; !ok {
|
|
||||||
return assert.Fail(t, fmt.Sprintf("expected %q to exist in schema ref bag", key))
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPatternAnalysis(t *testing.T) {
|
|
||||||
doc, err := loadSpec(filepath.Join("fixtures", "patterns.yml"))
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
pt := New(doc).patterns
|
|
||||||
|
|
||||||
// parameters
|
|
||||||
assertPattern(t, pt.parameters, "#/parameters/idParam", "a[A-Za-Z0-9]+")
|
|
||||||
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/parameters/1", "b[A-Za-z0-9]+")
|
|
||||||
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0", "[abc][0-9]+")
|
|
||||||
|
|
||||||
// responses
|
|
||||||
assertPattern(t, pt.headers, "#/responses/notFound/headers/ContentLength", "[0-9]+")
|
|
||||||
assertPattern(t, pt.headers, "#/paths/~1some~1where~1{id}/get/responses/200/headers/X-Request-Id", "d[A-Za-z0-9]+")
|
|
||||||
|
|
||||||
// definitions
|
|
||||||
assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/parameters/0/schema/properties/value", "e[A-Za-z0-9]+")
|
|
||||||
assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/responses/200/schema/properties/data", "[0-9]+[abd]")
|
|
||||||
assertPattern(t, pt.schemas, "#/definitions/named", "f[A-Za-z0-9]+")
|
|
||||||
assertPattern(t, pt.schemas, "#/definitions/tag/properties/value", "g[A-Za-z0-9]+")
|
|
||||||
|
|
||||||
// items
|
|
||||||
assertPattern(t, pt.items, "#/paths/~1some~1where~1{id}/get/parameters/1/items", "c[A-Za-z0-9]+")
|
|
||||||
assertPattern(t, pt.items, "#/paths/~1other~1place/post/responses/default/headers/Via/items", "[A-Za-z]+")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func assertPattern(t testing.TB, data map[string]string, key, pattern string) bool {
|
|
||||||
if assert.Contains(t, data, key) {
|
|
||||||
return assert.Equal(t, pattern, data[key])
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
800
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
800
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
|
@ -1,800 +0,0 @@
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"net/http"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"strconv"
|
|
||||||
|
|
||||||
"github.com/go-openapi/jsonpointer"
|
|
||||||
swspec "github.com/go-openapi/spec"
|
|
||||||
"github.com/go-openapi/swag"
|
|
||||||
)
|
|
||||||
|
|
||||||
// FlattenOpts configuration for flattening a swagger specification.
|
|
||||||
type FlattenOpts struct {
|
|
||||||
// If Expand is true, we skip flattening the spec and expand it instead
|
|
||||||
Expand bool
|
|
||||||
Spec *Spec
|
|
||||||
BasePath string
|
|
||||||
|
|
||||||
_ struct{} // require keys
|
|
||||||
}
|
|
||||||
|
|
||||||
// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
|
|
||||||
func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *swspec.ExpandOptions {
|
|
||||||
return &swspec.ExpandOptions{RelativeBase: f.BasePath, SkipSchemas: skipSchemas}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swagger gets the swagger specification for this flatten operation
|
|
||||||
func (f *FlattenOpts) Swagger() *swspec.Swagger {
|
|
||||||
return f.Spec.spec
|
|
||||||
}
|
|
||||||
|
|
||||||
// Flatten an analyzed spec.
|
|
||||||
//
|
|
||||||
// To flatten a spec means:
|
|
||||||
//
|
|
||||||
// Expand the parameters, responses, path items, parameter items and header items.
|
|
||||||
// Import external (http, file) references so they become internal to the document.
|
|
||||||
// Move every inline schema to be a definition with an auto-generated name in a depth-first fashion.
|
|
||||||
// Rewritten schemas get a vendor extension x-go-gen-location so we know in which package they need to be rendered.
|
|
||||||
func Flatten(opts FlattenOpts) error {
|
|
||||||
// Make sure opts.BasePath is an absolute path
|
|
||||||
if !filepath.IsAbs(opts.BasePath) {
|
|
||||||
cwd, _ := os.Getwd()
|
|
||||||
opts.BasePath = filepath.Join(cwd, opts.BasePath)
|
|
||||||
}
|
|
||||||
// recursively expand responses, parameters, path items and items
|
|
||||||
err := swspec.ExpandSpec(opts.Swagger(), &swspec.ExpandOptions{
|
|
||||||
RelativeBase: opts.BasePath,
|
|
||||||
SkipSchemas: !opts.Expand,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
opts.Spec.reload() // re-analyze
|
|
||||||
|
|
||||||
// at this point there are no other references left but schemas
|
|
||||||
if err := importExternalReferences(&opts); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
opts.Spec.reload() // re-analyze
|
|
||||||
|
|
||||||
// rewrite the inline schemas (schemas that aren't simple types or arrays of simple types)
|
|
||||||
if err := nameInlinedSchemas(&opts); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
opts.Spec.reload() // re-analyze
|
|
||||||
|
|
||||||
// TODO: simplifiy known schema patterns to flat objects with properties?
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func nameInlinedSchemas(opts *FlattenOpts) error {
|
|
||||||
namer := &inlineSchemaNamer{Spec: opts.Swagger(), Operations: opRefsByRef(gatherOperations(opts.Spec, nil))}
|
|
||||||
depthFirst := sortDepthFirst(opts.Spec.allSchemas)
|
|
||||||
for _, key := range depthFirst {
|
|
||||||
sch := opts.Spec.allSchemas[key]
|
|
||||||
if sch.Schema != nil && sch.Schema.Ref.String() == "" && !sch.TopLevel { // inline schema
|
|
||||||
asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("schema analysis [%s]: %v", sch.Ref.String(), err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if !asch.IsSimpleSchema { // complex schemas get moved
|
|
||||||
if err := namer.Name(key, sch.Schema, asch); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var depthGroupOrder = []string{"sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition"}
|
|
||||||
|
|
||||||
func sortDepthFirst(data map[string]SchemaRef) (sorted []string) {
|
|
||||||
// group by category (shared params, op param, statuscode response, default response, definitions)
|
|
||||||
// sort groups internally by number of parts in the key and lexical names
|
|
||||||
// flatten groups into a single list of keys
|
|
||||||
grouped := make(map[string]keys, len(data))
|
|
||||||
for k := range data {
|
|
||||||
split := keyParts(k)
|
|
||||||
var pk string
|
|
||||||
if split.IsSharedOperationParam() {
|
|
||||||
pk = "sharedOpParam"
|
|
||||||
}
|
|
||||||
if split.IsOperationParam() {
|
|
||||||
pk = "opParam"
|
|
||||||
}
|
|
||||||
if split.IsStatusCodeResponse() {
|
|
||||||
pk = "codeResponse"
|
|
||||||
}
|
|
||||||
if split.IsDefaultResponse() {
|
|
||||||
pk = "defaultResponse"
|
|
||||||
}
|
|
||||||
if split.IsDefinition() {
|
|
||||||
pk = "definition"
|
|
||||||
}
|
|
||||||
grouped[pk] = append(grouped[pk], key{len(split), k})
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, pk := range depthGroupOrder {
|
|
||||||
res := grouped[pk]
|
|
||||||
sort.Sort(res)
|
|
||||||
for _, v := range res {
|
|
||||||
sorted = append(sorted, v.Key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
type key struct {
|
|
||||||
Segments int
|
|
||||||
Key string
|
|
||||||
}
|
|
||||||
type keys []key
|
|
||||||
|
|
||||||
func (k keys) Len() int { return len(k) }
|
|
||||||
func (k keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
|
|
||||||
func (k keys) Less(i, j int) bool {
|
|
||||||
return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
|
|
||||||
}
|
|
||||||
|
|
||||||
type inlineSchemaNamer struct {
|
|
||||||
Spec *swspec.Swagger
|
|
||||||
Operations map[string]opRef
|
|
||||||
}
|
|
||||||
|
|
||||||
func opRefsByRef(oprefs map[string]opRef) map[string]opRef {
|
|
||||||
result := make(map[string]opRef, len(oprefs))
|
|
||||||
for _, v := range oprefs {
|
|
||||||
result[v.Ref.String()] = v
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
func (isn *inlineSchemaNamer) Name(key string, schema *swspec.Schema, aschema *AnalyzedSchema) error {
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("naming inlined schema at %s", key)
|
|
||||||
}
|
|
||||||
|
|
||||||
parts := keyParts(key)
|
|
||||||
for _, name := range namesFromKey(parts, aschema, isn.Operations) {
|
|
||||||
if name != "" {
|
|
||||||
// create unique name
|
|
||||||
newName := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
|
|
||||||
|
|
||||||
// clone schema
|
|
||||||
sch, err := cloneSchema(schema)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// replace values on schema
|
|
||||||
if err := rewriteSchemaToRef(isn.Spec, key, swspec.MustCreateRef("#/definitions/"+newName)); err != nil {
|
|
||||||
return fmt.Errorf("name inlined schema: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
sch.AddExtension("x-go-gen-location", genLocation(parts))
|
|
||||||
// fmt.Printf("{\n %q,\n \"\",\n spec.MustCreateRef(%q),\n \"\",\n},\n", key, "#/definitions/"+newName)
|
|
||||||
// save cloned schema to definitions
|
|
||||||
saveSchema(isn.Spec, newName, sch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func genLocation(parts splitKey) string {
|
|
||||||
if parts.IsOperation() {
|
|
||||||
return "operations"
|
|
||||||
}
|
|
||||||
if parts.IsDefinition() {
|
|
||||||
return "models"
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func uniqifyName(definitions swspec.Definitions, name string) string {
|
|
||||||
if name == "" {
|
|
||||||
name = "oaiGen"
|
|
||||||
}
|
|
||||||
if len(definitions) == 0 {
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
unq := true
|
|
||||||
for k := range definitions {
|
|
||||||
if strings.ToLower(k) == strings.ToLower(name) {
|
|
||||||
unq = false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if unq {
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
name += "OAIGen"
|
|
||||||
var idx int
|
|
||||||
unique := name
|
|
||||||
_, known := definitions[unique]
|
|
||||||
for known {
|
|
||||||
idx++
|
|
||||||
unique = fmt.Sprintf("%s%d", name, idx)
|
|
||||||
_, known = definitions[unique]
|
|
||||||
}
|
|
||||||
return unique
|
|
||||||
}
|
|
||||||
|
|
||||||
func namesFromKey(parts splitKey, aschema *AnalyzedSchema, operations map[string]opRef) []string {
|
|
||||||
var baseNames [][]string
|
|
||||||
var startIndex int
|
|
||||||
if parts.IsOperation() {
|
|
||||||
// params
|
|
||||||
if parts.IsOperationParam() || parts.IsSharedOperationParam() {
|
|
||||||
piref := parts.PathItemRef()
|
|
||||||
if piref.String() != "" && parts.IsOperationParam() {
|
|
||||||
if op, ok := operations[piref.String()]; ok {
|
|
||||||
startIndex = 5
|
|
||||||
baseNames = append(baseNames, []string{op.ID, "params", "body"})
|
|
||||||
}
|
|
||||||
} else if parts.IsSharedOperationParam() {
|
|
||||||
pref := parts.PathRef()
|
|
||||||
for k, v := range operations {
|
|
||||||
if strings.HasPrefix(k, pref.String()) {
|
|
||||||
startIndex = 4
|
|
||||||
baseNames = append(baseNames, []string{v.ID, "params", "body"})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// responses
|
|
||||||
if parts.IsOperationResponse() {
|
|
||||||
piref := parts.PathItemRef()
|
|
||||||
if piref.String() != "" {
|
|
||||||
if op, ok := operations[piref.String()]; ok {
|
|
||||||
startIndex = 6
|
|
||||||
baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// definitions
|
|
||||||
if parts.IsDefinition() {
|
|
||||||
nm := parts.DefinitionName()
|
|
||||||
if nm != "" {
|
|
||||||
startIndex = 2
|
|
||||||
baseNames = append(baseNames, []string{parts.DefinitionName()})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var result []string
|
|
||||||
for _, segments := range baseNames {
|
|
||||||
nm := parts.BuildName(segments, startIndex, aschema)
|
|
||||||
if nm != "" {
|
|
||||||
result = append(result, nm)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sort.Strings(result)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
|
||||||
pths = "paths"
|
|
||||||
responses = "responses"
|
|
||||||
parameters = "parameters"
|
|
||||||
definitions = "definitions"
|
|
||||||
)
|
|
||||||
|
|
||||||
var ignoredKeys map[string]struct{}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
ignoredKeys = map[string]struct{}{
|
|
||||||
"schema": {},
|
|
||||||
"properties": {},
|
|
||||||
"not": {},
|
|
||||||
"anyOf": {},
|
|
||||||
"oneOf": {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type splitKey []string
|
|
||||||
|
|
||||||
func (s splitKey) IsDefinition() bool {
|
|
||||||
return len(s) > 1 && s[0] == definitions
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) DefinitionName() string {
|
|
||||||
if !s.IsDefinition() {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return s[1]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) isKeyName(i int) bool {
|
|
||||||
if i <= 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
count := 0
|
|
||||||
for idx := i - 1; idx > 0; idx-- {
|
|
||||||
if s[idx] != "properties" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
|
|
||||||
if count%2 != 0 {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) BuildName(segments []string, startIndex int, aschema *AnalyzedSchema) string {
|
|
||||||
for i, part := range s[startIndex:] {
|
|
||||||
if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
|
|
||||||
if part == "items" || part == "additionalItems" {
|
|
||||||
if aschema.IsTuple || aschema.IsTupleWithExtra {
|
|
||||||
segments = append(segments, "tuple")
|
|
||||||
} else {
|
|
||||||
segments = append(segments, "items")
|
|
||||||
}
|
|
||||||
if part == "additionalItems" {
|
|
||||||
segments = append(segments, part)
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
segments = append(segments, part)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return strings.Join(segments, " ")
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsOperation() bool {
|
|
||||||
return len(s) > 1 && s[0] == pths
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsSharedOperationParam() bool {
|
|
||||||
return len(s) > 2 && s[0] == pths && s[2] == parameters
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsOperationParam() bool {
|
|
||||||
return len(s) > 3 && s[0] == pths && s[3] == parameters
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsOperationResponse() bool {
|
|
||||||
return len(s) > 3 && s[0] == pths && s[3] == responses
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsDefaultResponse() bool {
|
|
||||||
return len(s) > 4 && s[0] == pths && s[3] == responses && s[4] == "default"
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) IsStatusCodeResponse() bool {
|
|
||||||
isInt := func() bool {
|
|
||||||
_, err := strconv.Atoi(s[4])
|
|
||||||
return err == nil
|
|
||||||
}
|
|
||||||
return len(s) > 4 && s[0] == pths && s[3] == responses && isInt()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) ResponseName() string {
|
|
||||||
if s.IsStatusCodeResponse() {
|
|
||||||
code, _ := strconv.Atoi(s[4])
|
|
||||||
return http.StatusText(code)
|
|
||||||
}
|
|
||||||
if s.IsDefaultResponse() {
|
|
||||||
return "Default"
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
var validMethods map[string]struct{}
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
validMethods = map[string]struct{}{
|
|
||||||
"GET": {},
|
|
||||||
"HEAD": {},
|
|
||||||
"OPTIONS": {},
|
|
||||||
"PATCH": {},
|
|
||||||
"POST": {},
|
|
||||||
"PUT": {},
|
|
||||||
"DELETE": {},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) PathItemRef() swspec.Ref {
|
|
||||||
if len(s) < 3 {
|
|
||||||
return swspec.Ref{}
|
|
||||||
}
|
|
||||||
pth, method := s[1], s[2]
|
|
||||||
if _, validMethod := validMethods[strings.ToUpper(method)]; !validMethod && !strings.HasPrefix(method, "x-") {
|
|
||||||
return swspec.Ref{}
|
|
||||||
}
|
|
||||||
return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(pth), strings.ToUpper(method)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s splitKey) PathRef() swspec.Ref {
|
|
||||||
if !s.IsOperation() {
|
|
||||||
return swspec.Ref{}
|
|
||||||
}
|
|
||||||
return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(s[1])))
|
|
||||||
}
|
|
||||||
|
|
||||||
func keyParts(key string) splitKey {
|
|
||||||
var res []string
|
|
||||||
for _, part := range strings.Split(key[1:], "/") {
|
|
||||||
if part != "" {
|
|
||||||
res = append(res, jsonpointer.Unescape(part))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res
|
|
||||||
}
|
|
||||||
|
|
||||||
func rewriteSchemaToRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("rewriting schema to ref for %s with %s", key, ref.String())
|
|
||||||
}
|
|
||||||
pth := key[1:]
|
|
||||||
ptr, err := jsonpointer.New(pth)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
value, _, err := ptr.Get(spec)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch refable := value.(type) {
|
|
||||||
case *swspec.Schema:
|
|
||||||
return rewriteParentRef(spec, key, ref)
|
|
||||||
case *swspec.SchemaOrBool:
|
|
||||||
if refable.Schema != nil {
|
|
||||||
refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
}
|
|
||||||
case *swspec.SchemaOrArray:
|
|
||||||
if refable.Schema != nil {
|
|
||||||
refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
}
|
|
||||||
case swspec.Schema:
|
|
||||||
return rewriteParentRef(spec, key, ref)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func rewriteParentRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
|
||||||
pth := key[1:]
|
|
||||||
parent, entry := path.Dir(pth), path.Base(pth)
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Println("getting schema holder at:", parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
pptr, err := jsonpointer.New(parent)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
pvalue, _, err := pptr.Get(spec)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("can't get parent for %s: %v", parent, err)
|
|
||||||
}
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("rewriting holder for %T", pvalue)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch container := pvalue.(type) {
|
|
||||||
case swspec.Response:
|
|
||||||
if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
case *swspec.Response:
|
|
||||||
container.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case *swspec.Responses:
|
|
||||||
statusCode, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
resp := container.StatusCodeResponses[statusCode]
|
|
||||||
resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
container.StatusCodeResponses[statusCode] = resp
|
|
||||||
|
|
||||||
case map[string]swspec.Response:
|
|
||||||
resp := container[entry]
|
|
||||||
resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
container[entry] = resp
|
|
||||||
|
|
||||||
case swspec.Parameter:
|
|
||||||
if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
case map[string]swspec.Parameter:
|
|
||||||
param := container[entry]
|
|
||||||
param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
container[entry] = param
|
|
||||||
|
|
||||||
case []swspec.Parameter:
|
|
||||||
idx, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
param := container[idx]
|
|
||||||
param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
container[idx] = param
|
|
||||||
|
|
||||||
case swspec.Definitions:
|
|
||||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case map[string]swspec.Schema:
|
|
||||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case []swspec.Schema:
|
|
||||||
idx, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case *swspec.SchemaOrArray:
|
|
||||||
idx, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func cloneSchema(schema *swspec.Schema) (*swspec.Schema, error) {
|
|
||||||
var sch swspec.Schema
|
|
||||||
if err := swag.FromDynamicJSON(schema, &sch); err != nil {
|
|
||||||
return nil, fmt.Errorf("name inlined schema: %v", err)
|
|
||||||
}
|
|
||||||
return &sch, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func importExternalReferences(opts *FlattenOpts) error {
|
|
||||||
groupedRefs := reverseIndexForSchemaRefs(opts)
|
|
||||||
|
|
||||||
for refStr, entry := range groupedRefs {
|
|
||||||
if !entry.Ref.HasFragmentOnly {
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
|
|
||||||
}
|
|
||||||
// resolve to actual schema
|
|
||||||
sch := new(swspec.Schema)
|
|
||||||
sch.Ref = entry.Ref
|
|
||||||
expandOpts := swspec.ExpandOptions{
|
|
||||||
RelativeBase: opts.BasePath,
|
|
||||||
SkipSchemas: false,
|
|
||||||
}
|
|
||||||
err := swspec.ExpandSchemaWithBasePath(sch, nil, &expandOpts)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if sch == nil {
|
|
||||||
return fmt.Errorf("no schema found at %s for [%s]", refStr, strings.Join(entry.Keys, ", "))
|
|
||||||
}
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
|
|
||||||
}
|
|
||||||
|
|
||||||
// generate a unique name
|
|
||||||
newName := uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("new name for [%s]: %s", strings.Join(entry.Keys, ", "), newName)
|
|
||||||
}
|
|
||||||
|
|
||||||
// rewrite the external refs to local ones
|
|
||||||
for _, key := range entry.Keys {
|
|
||||||
if err := updateRef(opts.Swagger(), key, swspec.MustCreateRef("#"+path.Join("/definitions", newName))); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// add the resolved schema to the definitions
|
|
||||||
saveSchema(opts.Swagger(), newName, sch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type refRevIdx struct {
|
|
||||||
Ref swspec.Ref
|
|
||||||
Keys []string
|
|
||||||
}
|
|
||||||
|
|
||||||
func reverseIndexForSchemaRefs(opts *FlattenOpts) map[string]refRevIdx {
|
|
||||||
collected := make(map[string]refRevIdx)
|
|
||||||
for key, schRef := range opts.Spec.references.schemas {
|
|
||||||
if entry, ok := collected[schRef.String()]; ok {
|
|
||||||
entry.Keys = append(entry.Keys, key)
|
|
||||||
collected[schRef.String()] = entry
|
|
||||||
} else {
|
|
||||||
collected[schRef.String()] = refRevIdx{
|
|
||||||
Ref: schRef,
|
|
||||||
Keys: []string{key},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return collected
|
|
||||||
}
|
|
||||||
|
|
||||||
func nameFromRef(ref swspec.Ref) string {
|
|
||||||
u := ref.GetURL()
|
|
||||||
if u.Fragment != "" {
|
|
||||||
return swag.ToJSONName(path.Base(u.Fragment))
|
|
||||||
}
|
|
||||||
if u.Path != "" {
|
|
||||||
bn := path.Base(u.Path)
|
|
||||||
if bn != "" && bn != "/" {
|
|
||||||
ext := path.Ext(bn)
|
|
||||||
if ext != "" {
|
|
||||||
return swag.ToJSONName(bn[:len(bn)-len(ext)])
|
|
||||||
}
|
|
||||||
return swag.ToJSONName(bn)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return swag.ToJSONName(strings.Replace(u.Host, ".", " ", -1))
|
|
||||||
}
|
|
||||||
|
|
||||||
func saveSchema(spec *swspec.Swagger, name string, schema *swspec.Schema) {
|
|
||||||
if schema == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if spec.Definitions == nil {
|
|
||||||
spec.Definitions = make(map[string]swspec.Schema, 150)
|
|
||||||
}
|
|
||||||
spec.Definitions[name] = *schema
|
|
||||||
}
|
|
||||||
|
|
||||||
func updateRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Printf("updating ref for %s with %s", key, ref.String())
|
|
||||||
}
|
|
||||||
pth := key[1:]
|
|
||||||
ptr, err := jsonpointer.New(pth)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
value, _, err := ptr.Get(spec)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
switch refable := value.(type) {
|
|
||||||
case *swspec.Schema:
|
|
||||||
refable.Ref = ref
|
|
||||||
case *swspec.SchemaOrBool:
|
|
||||||
if refable.Schema != nil {
|
|
||||||
refable.Schema.Ref = ref
|
|
||||||
}
|
|
||||||
case *swspec.SchemaOrArray:
|
|
||||||
if refable.Schema != nil {
|
|
||||||
refable.Schema.Ref = ref
|
|
||||||
}
|
|
||||||
case swspec.Schema:
|
|
||||||
parent, entry := path.Dir(pth), path.Base(pth)
|
|
||||||
if swspec.Debug {
|
|
||||||
log.Println("getting schema holder at:", parent)
|
|
||||||
}
|
|
||||||
|
|
||||||
pptr, err := jsonpointer.New(parent)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
pvalue, _, err := pptr.Get(spec)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("can't get parent for %s: %v", parent, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
switch container := pvalue.(type) {
|
|
||||||
case swspec.Definitions:
|
|
||||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case map[string]swspec.Schema:
|
|
||||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case []swspec.Schema:
|
|
||||||
idx, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
case *swspec.SchemaOrArray:
|
|
||||||
idx, err := strconv.Atoi(entry)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
|
||||||
}
|
|
||||||
container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func containsString(names []string, name string) bool {
|
|
||||||
for _, nm := range names {
|
|
||||||
if nm == name {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
type opRef struct {
|
|
||||||
Method string
|
|
||||||
Path string
|
|
||||||
Key string
|
|
||||||
ID string
|
|
||||||
Op *swspec.Operation
|
|
||||||
Ref swspec.Ref
|
|
||||||
}
|
|
||||||
|
|
||||||
type opRefs []opRef
|
|
||||||
|
|
||||||
func (o opRefs) Len() int { return len(o) }
|
|
||||||
func (o opRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
|
|
||||||
func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
|
|
||||||
|
|
||||||
func gatherOperations(specDoc *Spec, operationIDs []string) map[string]opRef {
|
|
||||||
var oprefs opRefs
|
|
||||||
|
|
||||||
for method, pathItem := range specDoc.Operations() {
|
|
||||||
for pth, operation := range pathItem {
|
|
||||||
vv := *operation
|
|
||||||
oprefs = append(oprefs, opRef{
|
|
||||||
Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
|
|
||||||
Method: method,
|
|
||||||
Path: pth,
|
|
||||||
ID: vv.ID,
|
|
||||||
Op: &vv,
|
|
||||||
Ref: swspec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sort.Sort(oprefs)
|
|
||||||
|
|
||||||
operations := make(map[string]opRef)
|
|
||||||
for _, opr := range oprefs {
|
|
||||||
nm := opr.ID
|
|
||||||
if nm == "" {
|
|
||||||
nm = opr.Key
|
|
||||||
}
|
|
||||||
|
|
||||||
oo, found := operations[nm]
|
|
||||||
if found && oo.Method != opr.Method && oo.Path != opr.Path {
|
|
||||||
nm = opr.Key
|
|
||||||
}
|
|
||||||
if len(operationIDs) == 0 || containsString(operationIDs, opr.ID) || containsString(operationIDs, nm) {
|
|
||||||
opr.ID = nm
|
|
||||||
opr.Op.ID = nm
|
|
||||||
operations[nm] = opr
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return operations
|
|
||||||
}
|
|
826
vendor/github.com/go-openapi/analysis/flatten_test.go
generated
vendored
826
vendor/github.com/go-openapi/analysis/flatten_test.go
generated
vendored
|
@ -1,826 +0,0 @@
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/go-openapi/jsonpointer"
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
"github.com/stretchr/testify/assert"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSaveDefinition(t *testing.T) {
|
|
||||||
sp := &spec.Swagger{}
|
|
||||||
saveSchema(sp, "theName", spec.StringProperty())
|
|
||||||
assert.Contains(t, sp.Definitions, "theName")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNameFromRef(t *testing.T) {
|
|
||||||
values := []struct{ Source, Expected string }{
|
|
||||||
{"#/definitions/errorModel", "errorModel"},
|
|
||||||
{"http://somewhere.com/definitions/errorModel", "errorModel"},
|
|
||||||
{"http://somewhere.com/definitions/errorModel.json", "errorModel"},
|
|
||||||
{"/definitions/errorModel", "errorModel"},
|
|
||||||
{"/definitions/errorModel.json", "errorModel"},
|
|
||||||
{"http://somewhere.com", "somewhereCom"},
|
|
||||||
{"#", ""},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, v := range values {
|
|
||||||
assert.Equal(t, v.Expected, nameFromRef(spec.MustCreateRef(v.Source)))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDefinitionName(t *testing.T) {
|
|
||||||
values := []struct {
|
|
||||||
Source, Expected string
|
|
||||||
Definitions spec.Definitions
|
|
||||||
}{
|
|
||||||
{"#/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
|
|
||||||
{"http://somewhere.com/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
|
|
||||||
{"#/definitions/errorModel", "errorModel", map[string]spec.Schema{"apples": *spec.StringProperty()}},
|
|
||||||
{"#/definitions/errorModel", "errorModelOAIGen", map[string]spec.Schema{"errorModel": *spec.StringProperty()}},
|
|
||||||
{"#/definitions/errorModel", "errorModelOAIGen1", map[string]spec.Schema{"errorModel": *spec.StringProperty(), "errorModelOAIGen": *spec.StringProperty()}},
|
|
||||||
{"#", "oaiGen", nil},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, v := range values {
|
|
||||||
assert.Equal(t, v.Expected, uniqifyName(v.Definitions, nameFromRef(spec.MustCreateRef(v.Source))))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestUpdateRef(t *testing.T) {
|
|
||||||
bp := filepath.Join("fixtures", "external_definitions.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Ref spec.Ref
|
|
||||||
}{
|
|
||||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, v := range values {
|
|
||||||
err := updateRef(sp, v.Key, v.Ref)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
ptr, err := jsonpointer.New(v.Key[1:])
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
vv, _, err := ptr.Get(sp)
|
|
||||||
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
switch tv := vv.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String())
|
|
||||||
case spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String())
|
|
||||||
case *spec.SchemaOrBool:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
|
|
||||||
case *spec.SchemaOrArray:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T", vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestImportExternalReferences(t *testing.T) {
|
|
||||||
bp := filepath.Join(".", "fixtures", "external_definitions.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Ref spec.Ref
|
|
||||||
}{
|
|
||||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
}
|
|
||||||
for _, v := range values {
|
|
||||||
// technically not necessary to run for each value, but if things go right
|
|
||||||
// this is idempotent, so having it repeat shouldn't matter
|
|
||||||
// this validates that behavior
|
|
||||||
err := importExternalReferences(&FlattenOpts{
|
|
||||||
Spec: New(sp),
|
|
||||||
BasePath: bp,
|
|
||||||
})
|
|
||||||
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
|
|
||||||
ptr, err := jsonpointer.New(v.Key[1:])
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
vv, _, err := ptr.Get(sp)
|
|
||||||
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
switch tv := vv.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
|
|
||||||
case spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
|
|
||||||
case *spec.SchemaOrBool:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
|
|
||||||
case *spec.SchemaOrArray:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T", vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
assert.Len(t, sp.Definitions, 11)
|
|
||||||
assert.Contains(t, sp.Definitions, "tag")
|
|
||||||
assert.Contains(t, sp.Definitions, "named")
|
|
||||||
assert.Contains(t, sp.Definitions, "record")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRewriteSchemaRef(t *testing.T) {
|
|
||||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Ref spec.Ref
|
|
||||||
}{
|
|
||||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
|
||||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
|
||||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, v := range values {
|
|
||||||
err := rewriteSchemaToRef(sp, v.Key, v.Ref)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
ptr, err := jsonpointer.New(v.Key[1:])
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
vv, _, err := ptr.Get(sp)
|
|
||||||
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
switch tv := vv.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrBool:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrArray:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T", vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSplitKey(t *testing.T) {
|
|
||||||
|
|
||||||
type KeyFlag uint64
|
|
||||||
|
|
||||||
const (
|
|
||||||
isOperation KeyFlag = 1 << iota
|
|
||||||
isDefinition
|
|
||||||
isSharedOperationParam
|
|
||||||
isOperationParam
|
|
||||||
isOperationResponse
|
|
||||||
isDefaultResponse
|
|
||||||
isStatusCodeResponse
|
|
||||||
)
|
|
||||||
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Flags KeyFlag
|
|
||||||
PathItemRef spec.Ref
|
|
||||||
PathRef spec.Ref
|
|
||||||
Name string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/parameters/1/schema",
|
|
||||||
isOperation | isSharedOperationParam,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema",
|
|
||||||
isOperation | isOperationParam,
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema",
|
|
||||||
isOperation | isOperationResponse | isDefaultResponse,
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
|
||||||
"Default",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
|
||||||
isOperation | isOperationResponse | isStatusCodeResponse,
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
|
||||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
|
||||||
"OK",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/namedAgain",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"namedAgain",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedRecords/items/1",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"datedRecords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedRecords/items/1",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"datedRecords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedTaggedRecords/items/1",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"datedTaggedRecords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedTaggedRecords/additionalItems",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"datedTaggedRecords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/otherRecords/items",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"otherRecords",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/tags/additionalProperties",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"tags",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/namedThing/properties/name",
|
|
||||||
isDefinition,
|
|
||||||
spec.Ref{},
|
|
||||||
spec.Ref{},
|
|
||||||
"namedThing",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, v := range values {
|
|
||||||
parts := keyParts(v.Key)
|
|
||||||
pref := parts.PathRef()
|
|
||||||
piref := parts.PathItemRef()
|
|
||||||
assert.Equal(t, v.PathRef.String(), pref.String(), "pathRef: %s at %d", v.Key, i)
|
|
||||||
assert.Equal(t, v.PathItemRef.String(), piref.String(), "pathItemRef: %s at %d", v.Key, i)
|
|
||||||
|
|
||||||
if v.Flags&isOperation != 0 {
|
|
||||||
assert.True(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
if v.Flags&isDefinition != 0 {
|
|
||||||
assert.True(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
|
|
||||||
assert.Equal(t, v.Name, parts.DefinitionName(), "definition name: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
|
|
||||||
if v.Name != "" {
|
|
||||||
assert.Equal(t, v.Name, parts.ResponseName(), "response name: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if v.Flags&isOperationParam != 0 {
|
|
||||||
assert.True(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
if v.Flags&isSharedOperationParam != 0 {
|
|
||||||
assert.True(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
if v.Flags&isOperationResponse != 0 {
|
|
||||||
assert.True(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
if v.Flags&isDefaultResponse != 0 {
|
|
||||||
assert.True(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
if v.Flags&isStatusCodeResponse != 0 {
|
|
||||||
assert.True(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
|
|
||||||
} else {
|
|
||||||
assert.False(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func definitionPtr(key string) string {
|
|
||||||
if !strings.HasPrefix(key, "#/definitions") {
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
return strings.Join(strings.Split(key, "/")[:3], "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNamesFromKey(t *testing.T) {
|
|
||||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Names []string
|
|
||||||
}{
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", []string{"GetSomeWhereID params body", "PostSomeWhereID params body"}},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", []string{"GetSomeWhereID params body"}},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", []string{"GetSomeWhereID Default body"}},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", []string{"GetSomeWhereID OK body"}},
|
|
||||||
{"#/definitions/namedAgain", []string{"namedAgain"}},
|
|
||||||
{"#/definitions/datedTag/allOf/1", []string{"datedTag allOf 1"}},
|
|
||||||
{"#/definitions/datedRecords/items/1", []string{"datedRecords tuple 1"}},
|
|
||||||
{"#/definitions/datedTaggedRecords/items/1", []string{"datedTaggedRecords tuple 1"}},
|
|
||||||
{"#/definitions/datedTaggedRecords/additionalItems", []string{"datedTaggedRecords tuple additionalItems"}},
|
|
||||||
{"#/definitions/otherRecords/items", []string{"otherRecords items"}},
|
|
||||||
{"#/definitions/tags/additionalProperties", []string{"tags additionalProperties"}},
|
|
||||||
{"#/definitions/namedThing/properties/name", []string{"namedThing name"}},
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, v := range values {
|
|
||||||
ptr, err := jsonpointer.New(definitionPtr(v.Key)[1:])
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
vv, _, err := ptr.Get(sp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
switch tv := vv.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
aschema, err := Schema(SchemaOpts{Schema: tv, Root: sp, BasePath: bp})
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
|
|
||||||
assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
case spec.Schema:
|
|
||||||
aschema, err := Schema(SchemaOpts{Schema: &tv, Root: sp, BasePath: bp})
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
|
|
||||||
assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T", vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDepthFirstSort(t *testing.T) {
|
|
||||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
values := []string{
|
|
||||||
"#/paths/~1some~1where~1{id}/parameters/1/schema/properties/createdAt",
|
|
||||||
"#/paths/~1some~1where~1{id}/parameters/1/schema",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/createdAt",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/id",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/value",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/404/schema",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/createdAt",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema",
|
|
||||||
"#/definitions/datedRecords/items/1/properties/createdAt",
|
|
||||||
"#/definitions/datedTaggedRecords/items/1/properties/createdAt",
|
|
||||||
"#/definitions/namedThing/properties/name/properties/id",
|
|
||||||
"#/definitions/records/items/0/properties/createdAt",
|
|
||||||
"#/definitions/datedTaggedRecords/additionalItems/properties/id",
|
|
||||||
"#/definitions/datedTaggedRecords/additionalItems/properties/value",
|
|
||||||
"#/definitions/otherRecords/items/properties/createdAt",
|
|
||||||
"#/definitions/tags/additionalProperties/properties/id",
|
|
||||||
"#/definitions/tags/additionalProperties/properties/value",
|
|
||||||
"#/definitions/datedRecords/items/0",
|
|
||||||
"#/definitions/datedRecords/items/1",
|
|
||||||
"#/definitions/datedTag/allOf/0",
|
|
||||||
"#/definitions/datedTag/allOf/1",
|
|
||||||
"#/definitions/datedTag/properties/id",
|
|
||||||
"#/definitions/datedTag/properties/value",
|
|
||||||
"#/definitions/datedTaggedRecords/items/0",
|
|
||||||
"#/definitions/datedTaggedRecords/items/1",
|
|
||||||
"#/definitions/namedAgain/properties/id",
|
|
||||||
"#/definitions/namedThing/properties/name",
|
|
||||||
"#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism/properties/floccinaucinihilipilificationCreatedAt",
|
|
||||||
"#/definitions/records/items/0",
|
|
||||||
"#/definitions/datedTaggedRecords/additionalItems",
|
|
||||||
"#/definitions/otherRecords/items",
|
|
||||||
"#/definitions/tags/additionalProperties",
|
|
||||||
"#/definitions/datedRecords",
|
|
||||||
"#/definitions/datedTag",
|
|
||||||
"#/definitions/datedTaggedRecords",
|
|
||||||
"#/definitions/namedAgain",
|
|
||||||
"#/definitions/namedThing",
|
|
||||||
"#/definitions/otherRecords",
|
|
||||||
"#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism",
|
|
||||||
"#/definitions/records",
|
|
||||||
"#/definitions/tags",
|
|
||||||
}
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
a := New(sp)
|
|
||||||
result := sortDepthFirst(a.allSchemas)
|
|
||||||
assert.Equal(t, values, result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBuildNameWithReservedKeyWord(t *testing.T) {
|
|
||||||
s := splitKey([]string{"definitions", "fullview", "properties", "properties"})
|
|
||||||
startIdx := 2
|
|
||||||
segments := []string{"fullview"}
|
|
||||||
newName := s.BuildName(segments, startIdx, nil)
|
|
||||||
assert.Equal(t, "fullview properties", newName)
|
|
||||||
s = splitKey([]string{"definitions", "fullview", "properties", "properties", "properties", "properties", "properties", "properties"})
|
|
||||||
newName = s.BuildName(segments, startIdx, nil)
|
|
||||||
assert.Equal(t, "fullview properties properties properties", newName)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNameInlinedSchemas(t *testing.T) {
|
|
||||||
cwd, _ := os.Getwd()
|
|
||||||
bp := filepath.Join(cwd, "fixtures", "nested_inline_schemas.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
err = spec.ExpandSpec(sp, &spec.ExpandOptions{
|
|
||||||
RelativeBase: bp,
|
|
||||||
SkipSchemas: true,
|
|
||||||
})
|
|
||||||
assert.NoError(t, err)
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Location string
|
|
||||||
Ref spec.Ref
|
|
||||||
}{
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema/items", "#/definitions/postSomeWhereIdParamsBody/items", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBodyItems")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", "#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBody")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/1", "#/definitions/getSomeWhereIdParamsBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2", "#/definitions/getSomeWhereIdParamsBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record", "#/definitions/getSomeWhereIdParamsBodyOAIGen/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", "#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyOAIGen")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdOKBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2Name")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/1", "#/definitions/getSomeWhereIdOKBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems1")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2", "#/definitions/getSomeWhereIdOKBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record", "#/definitions/getSomeWhereIdOKBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecord")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", "#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdDefaultBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2Name")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/1", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems1")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record", "#/definitions/getSomeWhereIdDefaultBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecord")},
|
|
||||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", "#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBody")},
|
|
||||||
{"#/definitions/nestedThing/properties/record/items/2/allOf/1/additionalProperties", "#/definitions/nestedThingRecordItems2AllOf1/additionalProperties", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1AdditionalProperties")},
|
|
||||||
{"#/definitions/nestedThing/properties/record/items/2/allOf/1", "#/definitions/nestedThingRecordItems2/allOf/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1")},
|
|
||||||
{"#/definitions/nestedThing/properties/record/items/2/properties/name", "#/definitions/nestedThingRecordItems2/properties/name", spec.MustCreateRef("#/definitions/nestedThingRecordItems2Name")},
|
|
||||||
{"#/definitions/nestedThing/properties/record/items/1", "#/definitions/nestedThingRecord/items/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems1")},
|
|
||||||
{"#/definitions/nestedThing/properties/record/items/2", "#/definitions/nestedThingRecord/items/2", spec.MustCreateRef("#/definitions/nestedThingRecordItems2")},
|
|
||||||
{"#/definitions/datedRecords/items/1", "#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/datedRecordsItems1")},
|
|
||||||
{"#/definitions/datedTaggedRecords/items/1", "#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/datedTaggedRecordsItems1")},
|
|
||||||
{"#/definitions/namedThing/properties/name", "#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/namedThingName")},
|
|
||||||
{"#/definitions/nestedThing/properties/record", "#/definitions/nestedThing/properties/record", spec.MustCreateRef("#/definitions/nestedThingRecord")},
|
|
||||||
{"#/definitions/records/items/0", "#/definitions/records/items/0", spec.MustCreateRef("#/definitions/recordsItems0")},
|
|
||||||
{"#/definitions/datedTaggedRecords/additionalItems", "#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/datedTaggedRecordsItemsAdditionalItems")},
|
|
||||||
{"#/definitions/otherRecords/items", "#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/otherRecordsItems")},
|
|
||||||
{"#/definitions/tags/additionalProperties", "#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tagsAdditionalProperties")},
|
|
||||||
}
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
err := nameInlinedSchemas(&FlattenOpts{
|
|
||||||
Spec: New(sp),
|
|
||||||
BasePath: bp,
|
|
||||||
})
|
|
||||||
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
for i, v := range values {
|
|
||||||
ptr, err := jsonpointer.New(v.Location[1:])
|
|
||||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
|
||||||
vv, _, err := ptr.Get(sp)
|
|
||||||
|
|
||||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
|
||||||
switch tv := vv.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrBool:
|
|
||||||
var sRef spec.Ref
|
|
||||||
if tv != nil && tv.Schema != nil {
|
|
||||||
sRef = tv.Schema.Ref
|
|
||||||
}
|
|
||||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrArray:
|
|
||||||
var sRef spec.Ref
|
|
||||||
if tv != nil && tv.Schema != nil {
|
|
||||||
sRef = tv.Schema.Ref
|
|
||||||
}
|
|
||||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T", vv)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, rr := range New(sp).allSchemas {
|
|
||||||
if !strings.HasPrefix(k, "#/responses") && !strings.HasPrefix(k, "#/parameters") {
|
|
||||||
if rr.Schema != nil && rr.Schema.Ref.String() == "" && !rr.TopLevel {
|
|
||||||
asch, err := Schema(SchemaOpts{Schema: rr.Schema, Root: sp, BasePath: bp})
|
|
||||||
if assert.NoError(t, err, "for key: %s", k) {
|
|
||||||
if !asch.IsSimpleSchema {
|
|
||||||
assert.Fail(t, "not a top level schema", "for key: %s", k)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestFlatten(t *testing.T) {
|
|
||||||
cwd, _ := os.Getwd()
|
|
||||||
bp := filepath.Join(cwd, "fixtures", "flatten.yml")
|
|
||||||
sp, err := loadSpec(bp)
|
|
||||||
values := []struct {
|
|
||||||
Key string
|
|
||||||
Location string
|
|
||||||
Ref spec.Ref
|
|
||||||
Expected interface{}
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
"#/responses/notFound/schema",
|
|
||||||
"#/responses/notFound/schema",
|
|
||||||
spec.MustCreateRef("#/definitions/error"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/parameters/0",
|
|
||||||
"#/paths/~1some~1where~1{id}/parameters/0/name",
|
|
||||||
spec.Ref{},
|
|
||||||
"id",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1other~1place",
|
|
||||||
"#/paths/~1other~1place/get/operationId",
|
|
||||||
spec.Ref{},
|
|
||||||
"modelOp",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/0",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/0/name",
|
|
||||||
spec.Ref{},
|
|
||||||
"limit",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/1",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/1/name",
|
|
||||||
spec.Ref{},
|
|
||||||
"some",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/2",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/2/name",
|
|
||||||
spec.Ref{},
|
|
||||||
"other",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/3",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/parameters/3/schema",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBody"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200",
|
|
||||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/namedAgain",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/named"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/namedThing/properties/name",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/named"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/namedThing/properties/namedAgain",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/namedAgain"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedRecords/items/1",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/record"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/otherRecords/items",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/record"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/tags/additionalProperties",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/tag"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/datedTag/allOf/1",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/tag"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/nestedThingRecordItems2/allOf/1",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/nestedThingRecord/items/1",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems1"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/nestedThingRecord/items/2",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems2"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/nestedThing/properties/record",
|
|
||||||
"",
|
|
||||||
spec.MustCreateRef("#/definitions/nestedThingRecord"),
|
|
||||||
"",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/named",
|
|
||||||
"#/definitions/named/type",
|
|
||||||
spec.Ref{},
|
|
||||||
spec.StringOrArray{"string"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/error",
|
|
||||||
"#/definitions/error/properties/id/type",
|
|
||||||
spec.Ref{},
|
|
||||||
spec.StringOrArray{"integer"},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/record",
|
|
||||||
"#/definitions/record/properties/createdAt/format",
|
|
||||||
spec.Ref{},
|
|
||||||
"date-time",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdOKBody",
|
|
||||||
"#/definitions/getSomeWhereIdOKBody/properties/record",
|
|
||||||
spec.MustCreateRef("#/definitions/nestedThing"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBody",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBody/properties/record",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecord",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecord/items/1",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecord",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecord/items/2",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2"),
|
|
||||||
nil,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2/allOf/0/format",
|
|
||||||
spec.Ref{},
|
|
||||||
"date",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2Name",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2Name/properties/createdAt/format",
|
|
||||||
spec.Ref{},
|
|
||||||
"date-time",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2",
|
|
||||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name",
|
|
||||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name"),
|
|
||||||
"date",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
err := Flatten(FlattenOpts{Spec: New(sp), BasePath: bp})
|
|
||||||
//b, _ := sp.MarshalJSON()
|
|
||||||
//panic(string(b))
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
for i, v := range values {
|
|
||||||
pk := v.Key[1:]
|
|
||||||
if v.Location != "" {
|
|
||||||
pk = v.Location[1:]
|
|
||||||
}
|
|
||||||
ptr, err := jsonpointer.New(pk)
|
|
||||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
|
||||||
d, _, err := ptr.Get(sp)
|
|
||||||
if assert.NoError(t, err) {
|
|
||||||
if v.Ref.String() != "" {
|
|
||||||
switch s := d.(type) {
|
|
||||||
case *spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case spec.Schema:
|
|
||||||
assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrArray:
|
|
||||||
var sRef spec.Ref
|
|
||||||
if s != nil && s.Schema != nil {
|
|
||||||
sRef = s.Schema.Ref
|
|
||||||
}
|
|
||||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
|
||||||
case *spec.SchemaOrBool:
|
|
||||||
var sRef spec.Ref
|
|
||||||
if s != nil && s.Schema != nil {
|
|
||||||
sRef = s.Schema.Ref
|
|
||||||
}
|
|
||||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
|
||||||
default:
|
|
||||||
assert.Fail(t, "unknown type", "got %T at %d for %s", d, i, v.Key)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
assert.Equal(t, v.Expected, d)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
199
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
199
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
|
@ -1,199 +0,0 @@
|
||||||
package analysis
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/go-openapi/spec"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Mixin modifies the primary swagger spec by adding the paths and
|
|
||||||
// definitions from the mixin specs. Top level parameters and
|
|
||||||
// responses from the mixins are also carried over. Operation id
|
|
||||||
// collisions are avoided by appending "Mixin<N>" but only if
|
|
||||||
// needed. No other parts of primary are modified. Consider calling
|
|
||||||
// FixEmptyResponseDescriptions() on the modified primary if you read
|
|
||||||
// them from storage and they are valid to start with.
|
|
||||||
//
|
|
||||||
// Entries in "paths", "definitions", "parameters" and "responses" are
|
|
||||||
// added to the primary in the order of the given mixins. If the entry
|
|
||||||
// already exists in primary it is skipped with a warning message.
|
|
||||||
//
|
|
||||||
// The count of skipped entries (from collisions) is returned so any
|
|
||||||
// deviation from the number expected can flag warning in your build
|
|
||||||
// scripts. Carefully review the collisions before accepting them;
|
|
||||||
// consider renaming things if possible.
|
|
||||||
//
|
|
||||||
// No normalization of any keys takes place (paths, type defs,
|
|
||||||
// etc). Ensure they are canonical if your downstream tools do
|
|
||||||
// key normalization of any form.
|
|
||||||
func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
|
|
||||||
var skipped []string
|
|
||||||
opIds := getOpIds(primary)
|
|
||||||
if primary.Paths == nil {
|
|
||||||
primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
|
|
||||||
}
|
|
||||||
if primary.Paths.Paths == nil {
|
|
||||||
primary.Paths.Paths = make(map[string]spec.PathItem)
|
|
||||||
}
|
|
||||||
if primary.Definitions == nil {
|
|
||||||
primary.Definitions = make(spec.Definitions)
|
|
||||||
}
|
|
||||||
if primary.Parameters == nil {
|
|
||||||
primary.Parameters = make(map[string]spec.Parameter)
|
|
||||||
}
|
|
||||||
if primary.Responses == nil {
|
|
||||||
primary.Responses = make(map[string]spec.Response)
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, m := range mixins {
|
|
||||||
for k, v := range m.Definitions {
|
|
||||||
// assume name collisions represent IDENTICAL type. careful.
|
|
||||||
if _, exists := primary.Definitions[k]; exists {
|
|
||||||
warn := fmt.Sprintf("definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Definitions[k] = v
|
|
||||||
}
|
|
||||||
if m.Paths != nil {
|
|
||||||
for k, v := range m.Paths.Paths {
|
|
||||||
if _, exists := primary.Paths.Paths[k]; exists {
|
|
||||||
warn := fmt.Sprintf("paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Swagger requires that operationIds be
|
|
||||||
// unique within a spec. If we find a
|
|
||||||
// collision we append "Mixin0" to the
|
|
||||||
// operatoinId we are adding, where 0 is mixin
|
|
||||||
// index. We assume that operationIds with
|
|
||||||
// all the proivded specs are already unique.
|
|
||||||
piops := pathItemOps(v)
|
|
||||||
for _, piop := range piops {
|
|
||||||
if opIds[piop.ID] {
|
|
||||||
piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", i)
|
|
||||||
}
|
|
||||||
opIds[piop.ID] = true
|
|
||||||
}
|
|
||||||
primary.Paths.Paths[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, v := range m.Parameters {
|
|
||||||
// could try to rename on conflict but would
|
|
||||||
// have to fix $refs in the mixin. Complain
|
|
||||||
// for now
|
|
||||||
if _, exists := primary.Parameters[k]; exists {
|
|
||||||
warn := fmt.Sprintf("top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Parameters[k] = v
|
|
||||||
}
|
|
||||||
for k, v := range m.Responses {
|
|
||||||
// could try to rename on conflict but would
|
|
||||||
// have to fix $refs in the mixin. Complain
|
|
||||||
// for now
|
|
||||||
if _, exists := primary.Responses[k]; exists {
|
|
||||||
warn := fmt.Sprintf("top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
|
||||||
skipped = append(skipped, warn)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
primary.Responses[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return skipped
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixEmptyResponseDescriptions replaces empty ("") response
|
|
||||||
// descriptions in the input with "(empty)" to ensure that the
|
|
||||||
// resulting Swagger is stays valid. The problem appears to arise
|
|
||||||
// from reading in valid specs that have a explicit response
|
|
||||||
// description of "" (valid, response.description is required), but
|
|
||||||
// due to zero values being omitted upon re-serializing (omitempty) we
|
|
||||||
// lose them unless we stick some chars in there.
|
|
||||||
func FixEmptyResponseDescriptions(s *spec.Swagger) {
|
|
||||||
if s.Paths != nil {
|
|
||||||
for _, v := range s.Paths.Paths {
|
|
||||||
if v.Get != nil {
|
|
||||||
FixEmptyDescs(v.Get.Responses)
|
|
||||||
}
|
|
||||||
if v.Put != nil {
|
|
||||||
FixEmptyDescs(v.Put.Responses)
|
|
||||||
}
|
|
||||||
if v.Post != nil {
|
|
||||||
FixEmptyDescs(v.Post.Responses)
|
|
||||||
}
|
|
||||||
if v.Delete != nil {
|
|
||||||
FixEmptyDescs(v.Delete.Responses)
|
|
||||||
}
|
|
||||||
if v.Options != nil {
|
|
||||||
FixEmptyDescs(v.Options.Responses)
|
|
||||||
}
|
|
||||||
if v.Head != nil {
|
|
||||||
FixEmptyDescs(v.Head.Responses)
|
|
||||||
}
|
|
||||||
if v.Patch != nil {
|
|
||||||
FixEmptyDescs(v.Patch.Responses)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for k, v := range s.Responses {
|
|
||||||
FixEmptyDesc(&v)
|
|
||||||
s.Responses[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixEmptyDescs adds "(empty)" as the description for any Response in
|
|
||||||
// the given Responses object that doesn't already have one.
|
|
||||||
func FixEmptyDescs(rs *spec.Responses) {
|
|
||||||
FixEmptyDesc(rs.Default)
|
|
||||||
for k, v := range rs.StatusCodeResponses {
|
|
||||||
FixEmptyDesc(&v)
|
|
||||||
rs.StatusCodeResponses[k] = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// FixEmptyDesc adds "(empty)" as the description to the given
|
|
||||||
// Response object if it doesn't already have one and isn't a
|
|
||||||
// ref. No-op on nil input.
|
|
||||||
func FixEmptyDesc(rs *spec.Response) {
|
|
||||||
if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rs.Description = "(empty)"
|
|
||||||
}
|
|
||||||
|
|
||||||
// getOpIds extracts all the paths.<path>.operationIds from the given
|
|
||||||
// spec and returns them as the keys in a map with 'true' values.
|
|
||||||
func getOpIds(s *spec.Swagger) map[string]bool {
|
|
||||||
rv := make(map[string]bool)
|
|
||||||
if s.Paths == nil {
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
for _, v := range s.Paths.Paths {
|
|
||||||
piops := pathItemOps(v)
|
|
||||||
for _, op := range piops {
|
|
||||||
rv[op.ID] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func pathItemOps(p spec.PathItem) []*spec.Operation {
|
|
||||||
var rv []*spec.Operation
|
|
||||||
rv = appendOp(rv, p.Get)
|
|
||||||
rv = appendOp(rv, p.Put)
|
|
||||||
rv = appendOp(rv, p.Post)
|
|
||||||
rv = appendOp(rv, p.Delete)
|
|
||||||
rv = appendOp(rv, p.Head)
|
|
||||||
rv = appendOp(rv, p.Patch)
|
|
||||||
return rv
|
|
||||||
}
|
|
||||||
|
|
||||||
func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
|
|
||||||
if op == nil {
|
|
||||||
return ops
|
|
||||||
}
|
|
||||||
return append(ops, op)
|
|
||||||
}
|
|
64
vendor/github.com/go-openapi/analysis/mixin_test.go
generated
vendored
64
vendor/github.com/go-openapi/analysis/mixin_test.go
generated
vendored
|
@ -1,64 +0,0 @@
|
||||||
package analysis
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
const (
|
|
||||||
widgetFile = "fixtures/widget-crud.yml"
|
|
||||||
fooFile = "fixtures/foo-crud.yml"
|
|
||||||
barFile = "fixtures/bar-crud.yml"
|
|
||||||
noPathsFile = "fixtures/no-paths.yml"
|
|
||||||
emptyPathsFile = "fixtures/empty-paths.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMixin(t *testing.T) {
|
|
||||||
|
|
||||||
primary, err := loadSpec(widgetFile)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Could not load '%v': %v\n", widgetFile, err)
|
|
||||||
}
|
|
||||||
mixin1, err := loadSpec(fooFile)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Could not load '%v': %v\n", fooFile, err)
|
|
||||||
}
|
|
||||||
mixin2, err := loadSpec(barFile)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Could not load '%v': %v\n", barFile, err)
|
|
||||||
}
|
|
||||||
mixin3, err := loadSpec(noPathsFile)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Could not load '%v': %v\n", noPathsFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
collisions := Mixin(primary, mixin1, mixin2, mixin3)
|
|
||||||
if len(collisions) != 16 {
|
|
||||||
t.Errorf("TestMixin: Expected 16 collisions, got %v\n%v", len(collisions), collisions)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(primary.Paths.Paths) != 7 {
|
|
||||||
t.Errorf("TestMixin: Expected 7 paths in merged, got %v\n", len(primary.Paths.Paths))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(primary.Definitions) != 8 {
|
|
||||||
t.Errorf("TestMixin: Expected 8 definitions in merged, got %v\n", len(primary.Definitions))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(primary.Parameters) != 4 {
|
|
||||||
t.Errorf("TestMixin: Expected 4 top level parameters in merged, got %v\n", len(primary.Parameters))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(primary.Responses) != 2 {
|
|
||||||
t.Errorf("TestMixin: Expected 2 top level responses in merged, got %v\n", len(primary.Responses))
|
|
||||||
}
|
|
||||||
|
|
||||||
// test that adding paths to a primary with no paths works (was NPE)
|
|
||||||
emptyPaths, err := loadSpec(emptyPathsFile)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("Could not load '%v': %v\n", emptyPathsFile, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
collisions = Mixin(emptyPaths, primary)
|
|
||||||
if len(collisions) != 0 {
|
|
||||||
t.Errorf("TestMixin: Expected 0 collisions, got %v\n%v", len(collisions), collisions)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue