plugin/kubernetes: Enable protobuf, Update client api package (#1114)
* vendor * code
This commit is contained in:
parent
45b0252c1a
commit
4b3a430ff2
1511 changed files with 286873 additions and 253612 deletions
3
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
Normal file
3
vendor/github.com/go-openapi/analysis/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
secrets.yml
|
||||
coverage.out
|
||||
.idea
|
18
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
Normal file
18
vendor/github.com/go-openapi/analysis/.travis.yml
generated
vendored
Normal file
|
@ -0,0 +1,18 @@
|
|||
language: go
|
||||
go:
|
||||
- 1.7
|
||||
install:
|
||||
- go get -u github.com/stretchr/testify/assert
|
||||
- go get -u gopkg.in/yaml.v2
|
||||
- go get -u github.com/go-openapi/swag
|
||||
- go get -u github.com/go-openapi/jsonpointer
|
||||
- go get -u github.com/go-openapi/spec
|
||||
- go get -u github.com/go-openapi/strfmt
|
||||
- go get -u github.com/go-openapi/loads/fmts
|
||||
script:
|
||||
- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./...
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
notifications:
|
||||
slack:
|
||||
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
|
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
Normal file
74
vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
generated
vendored
Normal file
|
@ -0,0 +1,74 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
Normal file
202
vendor/github.com/go-openapi/analysis/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
Normal file
6
vendor/github.com/go-openapi/analysis/README.md
generated
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
# OpenAPI initiative analysis [](https://travis-ci.org/go-openapi/analysis) [](https://codecov.io/gh/go-openapi/analysis) [](https://slackin.goswagger.io)
|
||||
|
||||
[](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) [](http://godoc.org/github.com/go-openapi/analysis)
|
||||
|
||||
|
||||
A foundational library to analyze an OAI specification document for easier reasoning about the content.
|
785
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
Normal file
785
vendor/github.com/go-openapi/analysis/analyzer.go
generated
vendored
Normal file
|
@ -0,0 +1,785 @@
|
|||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
slashpath "path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
type referenceAnalysis struct {
|
||||
schemas map[string]spec.Ref
|
||||
responses map[string]spec.Ref
|
||||
parameters map[string]spec.Ref
|
||||
items map[string]spec.Ref
|
||||
headerItems map[string]spec.Ref
|
||||
parameterItems map[string]spec.Ref
|
||||
allRefs map[string]spec.Ref
|
||||
pathItems map[string]spec.Ref
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
|
||||
r.allRefs["#"+key] = ref
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
|
||||
r.items["#"+key] = items.Ref
|
||||
r.addRef(key, items.Ref)
|
||||
if location == "header" {
|
||||
r.headerItems["#"+key] = items.Ref
|
||||
} else {
|
||||
r.parameterItems["#"+key] = items.Ref
|
||||
}
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
|
||||
r.schemas["#"+key] = ref.Schema.Ref
|
||||
r.addRef(key, ref.Schema.Ref)
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
|
||||
r.responses["#"+key] = resp.Ref
|
||||
r.addRef(key, resp.Ref)
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
|
||||
r.parameters["#"+key] = param.Ref
|
||||
r.addRef(key, param.Ref)
|
||||
}
|
||||
|
||||
func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
|
||||
r.pathItems["#"+key] = pathItem.Ref
|
||||
r.addRef(key, pathItem.Ref)
|
||||
}
|
||||
|
||||
type patternAnalysis struct {
|
||||
parameters map[string]string
|
||||
headers map[string]string
|
||||
items map[string]string
|
||||
schemas map[string]string
|
||||
allPatterns map[string]string
|
||||
}
|
||||
|
||||
func (p *patternAnalysis) addPattern(key, pattern string) {
|
||||
p.allPatterns["#"+key] = pattern
|
||||
}
|
||||
|
||||
func (p *patternAnalysis) addParameterPattern(key, pattern string) {
|
||||
p.parameters["#"+key] = pattern
|
||||
p.addPattern(key, pattern)
|
||||
}
|
||||
|
||||
func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
|
||||
p.headers["#"+key] = pattern
|
||||
p.addPattern(key, pattern)
|
||||
}
|
||||
|
||||
func (p *patternAnalysis) addItemsPattern(key, pattern string) {
|
||||
p.items["#"+key] = pattern
|
||||
p.addPattern(key, pattern)
|
||||
}
|
||||
|
||||
func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
|
||||
p.schemas["#"+key] = pattern
|
||||
p.addPattern(key, pattern)
|
||||
}
|
||||
|
||||
// New takes a swagger spec object and returns an analyzed spec document.
|
||||
// The analyzed document contains a number of indices that make it easier to
|
||||
// reason about semantics of a swagger specification for use in code generation
|
||||
// or validation etc.
|
||||
func New(doc *spec.Swagger) *Spec {
|
||||
a := &Spec{
|
||||
spec: doc,
|
||||
consumes: make(map[string]struct{}, 150),
|
||||
produces: make(map[string]struct{}, 150),
|
||||
authSchemes: make(map[string]struct{}, 150),
|
||||
operations: make(map[string]map[string]*spec.Operation, 150),
|
||||
allSchemas: make(map[string]SchemaRef, 150),
|
||||
allOfs: make(map[string]SchemaRef, 150),
|
||||
references: referenceAnalysis{
|
||||
schemas: make(map[string]spec.Ref, 150),
|
||||
pathItems: make(map[string]spec.Ref, 150),
|
||||
responses: make(map[string]spec.Ref, 150),
|
||||
parameters: make(map[string]spec.Ref, 150),
|
||||
items: make(map[string]spec.Ref, 150),
|
||||
headerItems: make(map[string]spec.Ref, 150),
|
||||
parameterItems: make(map[string]spec.Ref, 150),
|
||||
allRefs: make(map[string]spec.Ref, 150),
|
||||
},
|
||||
patterns: patternAnalysis{
|
||||
parameters: make(map[string]string, 150),
|
||||
headers: make(map[string]string, 150),
|
||||
items: make(map[string]string, 150),
|
||||
schemas: make(map[string]string, 150),
|
||||
allPatterns: make(map[string]string, 150),
|
||||
},
|
||||
}
|
||||
a.initialize()
|
||||
return a
|
||||
}
|
||||
|
||||
// Spec takes a swagger spec object and turns it into a registry
|
||||
// with a bunch of utility methods to act on the information in the spec
|
||||
type Spec struct {
|
||||
spec *spec.Swagger
|
||||
consumes map[string]struct{}
|
||||
produces map[string]struct{}
|
||||
authSchemes map[string]struct{}
|
||||
operations map[string]map[string]*spec.Operation
|
||||
references referenceAnalysis
|
||||
patterns patternAnalysis
|
||||
allSchemas map[string]SchemaRef
|
||||
allOfs map[string]SchemaRef
|
||||
}
|
||||
|
||||
func (s *Spec) reset() {
|
||||
s.consumes = make(map[string]struct{}, 150)
|
||||
s.produces = make(map[string]struct{}, 150)
|
||||
s.authSchemes = make(map[string]struct{}, 150)
|
||||
s.operations = make(map[string]map[string]*spec.Operation, 150)
|
||||
s.allSchemas = make(map[string]SchemaRef, 150)
|
||||
s.allOfs = make(map[string]SchemaRef, 150)
|
||||
s.references.schemas = make(map[string]spec.Ref, 150)
|
||||
s.references.pathItems = make(map[string]spec.Ref, 150)
|
||||
s.references.responses = make(map[string]spec.Ref, 150)
|
||||
s.references.parameters = make(map[string]spec.Ref, 150)
|
||||
s.references.items = make(map[string]spec.Ref, 150)
|
||||
s.references.headerItems = make(map[string]spec.Ref, 150)
|
||||
s.references.parameterItems = make(map[string]spec.Ref, 150)
|
||||
s.references.allRefs = make(map[string]spec.Ref, 150)
|
||||
s.patterns.parameters = make(map[string]string, 150)
|
||||
s.patterns.headers = make(map[string]string, 150)
|
||||
s.patterns.items = make(map[string]string, 150)
|
||||
s.patterns.schemas = make(map[string]string, 150)
|
||||
s.patterns.allPatterns = make(map[string]string, 150)
|
||||
}
|
||||
|
||||
func (s *Spec) reload() {
|
||||
s.reset()
|
||||
s.initialize()
|
||||
}
|
||||
|
||||
func (s *Spec) initialize() {
|
||||
for _, c := range s.spec.Consumes {
|
||||
s.consumes[c] = struct{}{}
|
||||
}
|
||||
for _, c := range s.spec.Produces {
|
||||
s.produces[c] = struct{}{}
|
||||
}
|
||||
for _, ss := range s.spec.Security {
|
||||
for k := range ss {
|
||||
s.authSchemes[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
for path, pathItem := range s.AllPaths() {
|
||||
s.analyzeOperations(path, &pathItem)
|
||||
}
|
||||
|
||||
for name, parameter := range s.spec.Parameters {
|
||||
refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
|
||||
if parameter.Items != nil {
|
||||
s.analyzeItems("items", parameter.Items, refPref, "parameter")
|
||||
}
|
||||
if parameter.In == "body" && parameter.Schema != nil {
|
||||
s.analyzeSchema("schema", *parameter.Schema, refPref)
|
||||
}
|
||||
if parameter.Pattern != "" {
|
||||
s.patterns.addParameterPattern(refPref, parameter.Pattern)
|
||||
}
|
||||
}
|
||||
|
||||
for name, response := range s.spec.Responses {
|
||||
refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
|
||||
for k, v := range response.Headers {
|
||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
||||
if v.Items != nil {
|
||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
||||
}
|
||||
if v.Pattern != "" {
|
||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
||||
}
|
||||
}
|
||||
if response.Schema != nil {
|
||||
s.analyzeSchema("schema", *response.Schema, refPref)
|
||||
}
|
||||
}
|
||||
|
||||
for name, schema := range s.spec.Definitions {
|
||||
s.analyzeSchema(name, schema, "/definitions")
|
||||
}
|
||||
// TODO: after analyzing all things and flattening schemas etc
|
||||
// resolve all the collected references to their final representations
|
||||
// best put in a separate method because this could get expensive
|
||||
}
|
||||
|
||||
func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
|
||||
// TODO: resolve refs here?
|
||||
op := pi
|
||||
if pi.Ref.String() != "" {
|
||||
key := slashpath.Join("/paths", jsonpointer.Escape(path))
|
||||
s.references.addPathItemRef(key, pi)
|
||||
}
|
||||
s.analyzeOperation("GET", path, op.Get)
|
||||
s.analyzeOperation("PUT", path, op.Put)
|
||||
s.analyzeOperation("POST", path, op.Post)
|
||||
s.analyzeOperation("PATCH", path, op.Patch)
|
||||
s.analyzeOperation("DELETE", path, op.Delete)
|
||||
s.analyzeOperation("HEAD", path, op.Head)
|
||||
s.analyzeOperation("OPTIONS", path, op.Options)
|
||||
for i, param := range op.Parameters {
|
||||
refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
|
||||
if param.Ref.String() != "" {
|
||||
s.references.addParamRef(refPref, ¶m)
|
||||
}
|
||||
if param.Pattern != "" {
|
||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
||||
}
|
||||
if param.Items != nil {
|
||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
||||
}
|
||||
if param.Schema != nil {
|
||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
|
||||
if items == nil {
|
||||
return
|
||||
}
|
||||
refPref := slashpath.Join(prefix, name)
|
||||
s.analyzeItems(name, items.Items, refPref, location)
|
||||
if items.Ref.String() != "" {
|
||||
s.references.addItemsRef(refPref, items, location)
|
||||
}
|
||||
if items.Pattern != "" {
|
||||
s.patterns.addItemsPattern(refPref, items.Pattern)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
|
||||
if op == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for _, c := range op.Consumes {
|
||||
s.consumes[c] = struct{}{}
|
||||
}
|
||||
for _, c := range op.Produces {
|
||||
s.produces[c] = struct{}{}
|
||||
}
|
||||
for _, ss := range op.Security {
|
||||
for k := range ss {
|
||||
s.authSchemes[k] = struct{}{}
|
||||
}
|
||||
}
|
||||
if _, ok := s.operations[method]; !ok {
|
||||
s.operations[method] = make(map[string]*spec.Operation)
|
||||
}
|
||||
s.operations[method][path] = op
|
||||
prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
|
||||
for i, param := range op.Parameters {
|
||||
refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
|
||||
if param.Ref.String() != "" {
|
||||
s.references.addParamRef(refPref, ¶m)
|
||||
}
|
||||
if param.Pattern != "" {
|
||||
s.patterns.addParameterPattern(refPref, param.Pattern)
|
||||
}
|
||||
s.analyzeItems("items", param.Items, refPref, "parameter")
|
||||
if param.In == "body" && param.Schema != nil {
|
||||
s.analyzeSchema("schema", *param.Schema, refPref)
|
||||
}
|
||||
}
|
||||
if op.Responses != nil {
|
||||
if op.Responses.Default != nil {
|
||||
refPref := slashpath.Join(prefix, "responses", "default")
|
||||
if op.Responses.Default.Ref.String() != "" {
|
||||
s.references.addResponseRef(refPref, op.Responses.Default)
|
||||
}
|
||||
for k, v := range op.Responses.Default.Headers {
|
||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
||||
if v.Pattern != "" {
|
||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
||||
}
|
||||
}
|
||||
if op.Responses.Default.Schema != nil {
|
||||
s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
|
||||
}
|
||||
}
|
||||
for k, res := range op.Responses.StatusCodeResponses {
|
||||
refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
|
||||
if res.Ref.String() != "" {
|
||||
s.references.addResponseRef(refPref, &res)
|
||||
}
|
||||
for k, v := range res.Headers {
|
||||
hRefPref := slashpath.Join(refPref, "headers", k)
|
||||
s.analyzeItems("items", v.Items, hRefPref, "header")
|
||||
if v.Pattern != "" {
|
||||
s.patterns.addHeaderPattern(hRefPref, v.Pattern)
|
||||
}
|
||||
}
|
||||
if res.Schema != nil {
|
||||
s.analyzeSchema("schema", *res.Schema, refPref)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
|
||||
refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
|
||||
schRef := SchemaRef{
|
||||
Name: name,
|
||||
Schema: &schema,
|
||||
Ref: spec.MustCreateRef("#" + refURI),
|
||||
TopLevel: prefix == "/definitions",
|
||||
}
|
||||
|
||||
s.allSchemas["#"+refURI] = schRef
|
||||
|
||||
if schema.Ref.String() != "" {
|
||||
s.references.addSchemaRef(refURI, schRef)
|
||||
}
|
||||
if schema.Pattern != "" {
|
||||
s.patterns.addSchemaPattern(refURI, schema.Pattern)
|
||||
}
|
||||
|
||||
for k, v := range schema.Definitions {
|
||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
|
||||
}
|
||||
for k, v := range schema.Properties {
|
||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
|
||||
}
|
||||
for k, v := range schema.PatternProperties {
|
||||
s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
|
||||
}
|
||||
for i, v := range schema.AllOf {
|
||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
|
||||
}
|
||||
if len(schema.AllOf) > 0 {
|
||||
s.allOfs["#"+refURI] = schRef
|
||||
}
|
||||
for i, v := range schema.AnyOf {
|
||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
|
||||
}
|
||||
for i, v := range schema.OneOf {
|
||||
s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
|
||||
}
|
||||
if schema.Not != nil {
|
||||
s.analyzeSchema("not", *schema.Not, refURI)
|
||||
}
|
||||
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
|
||||
s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
|
||||
}
|
||||
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
|
||||
s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
|
||||
}
|
||||
if schema.Items != nil {
|
||||
if schema.Items.Schema != nil {
|
||||
s.analyzeSchema("items", *schema.Items.Schema, refURI)
|
||||
}
|
||||
for i, sch := range schema.Items.Schemas {
|
||||
s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SecurityRequirement is a representation of a security requirement for an operation
|
||||
type SecurityRequirement struct {
|
||||
Name string
|
||||
Scopes []string
|
||||
}
|
||||
|
||||
// SecurityRequirementsFor gets the security requirements for the operation
|
||||
func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) []SecurityRequirement {
|
||||
if s.spec.Security == nil && operation.Security == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
schemes := s.spec.Security
|
||||
if operation.Security != nil {
|
||||
schemes = operation.Security
|
||||
}
|
||||
|
||||
unique := make(map[string]SecurityRequirement)
|
||||
for _, scheme := range schemes {
|
||||
for k, v := range scheme {
|
||||
if _, ok := unique[k]; !ok {
|
||||
unique[k] = SecurityRequirement{Name: k, Scopes: v}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var result []SecurityRequirement
|
||||
for _, v := range unique {
|
||||
result = append(result, v)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
|
||||
func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
|
||||
requirements := s.SecurityRequirementsFor(operation)
|
||||
if len(requirements) == 0 {
|
||||
return nil
|
||||
}
|
||||
result := make(map[string]spec.SecurityScheme)
|
||||
for _, v := range requirements {
|
||||
if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
|
||||
if definition != nil {
|
||||
result[v.Name] = *definition
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// ConsumesFor gets the mediatypes for the operation
|
||||
func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
|
||||
|
||||
if len(operation.Consumes) == 0 {
|
||||
cons := make(map[string]struct{}, len(s.spec.Consumes))
|
||||
for _, k := range s.spec.Consumes {
|
||||
cons[k] = struct{}{}
|
||||
}
|
||||
return s.structMapKeys(cons)
|
||||
}
|
||||
|
||||
cons := make(map[string]struct{}, len(operation.Consumes))
|
||||
for _, c := range operation.Consumes {
|
||||
cons[c] = struct{}{}
|
||||
}
|
||||
return s.structMapKeys(cons)
|
||||
}
|
||||
|
||||
// ProducesFor gets the mediatypes for the operation
|
||||
func (s *Spec) ProducesFor(operation *spec.Operation) []string {
|
||||
if len(operation.Produces) == 0 {
|
||||
prod := make(map[string]struct{}, len(s.spec.Produces))
|
||||
for _, k := range s.spec.Produces {
|
||||
prod[k] = struct{}{}
|
||||
}
|
||||
return s.structMapKeys(prod)
|
||||
}
|
||||
|
||||
prod := make(map[string]struct{}, len(operation.Produces))
|
||||
for _, c := range operation.Produces {
|
||||
prod[c] = struct{}{}
|
||||
}
|
||||
return s.structMapKeys(prod)
|
||||
}
|
||||
|
||||
func mapKeyFromParam(param *spec.Parameter) string {
|
||||
return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
|
||||
}
|
||||
|
||||
func fieldNameFromParam(param *spec.Parameter) string {
|
||||
if nm, ok := param.Extensions.GetString("go-name"); ok {
|
||||
return nm
|
||||
}
|
||||
return swag.ToGoName(param.Name)
|
||||
}
|
||||
|
||||
func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter) {
|
||||
for _, param := range parameters {
|
||||
pr := param
|
||||
if pr.Ref.String() != "" {
|
||||
obj, _, err := pr.Ref.GetPointer().Get(s.spec)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
pr = obj.(spec.Parameter)
|
||||
}
|
||||
res[mapKeyFromParam(&pr)] = pr
|
||||
}
|
||||
}
|
||||
|
||||
// ParametersFor the specified operation id
|
||||
func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
|
||||
gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
|
||||
bag := make(map[string]spec.Parameter)
|
||||
s.paramsAsMap(pi.Parameters, bag)
|
||||
s.paramsAsMap(op.Parameters, bag)
|
||||
|
||||
var res []spec.Parameter
|
||||
for _, v := range bag {
|
||||
res = append(res, v)
|
||||
}
|
||||
return res
|
||||
}
|
||||
for _, pi := range s.spec.Paths.Paths {
|
||||
if pi.Get != nil && pi.Get.ID == operationID {
|
||||
return gatherParams(&pi, pi.Get)
|
||||
}
|
||||
if pi.Head != nil && pi.Head.ID == operationID {
|
||||
return gatherParams(&pi, pi.Head)
|
||||
}
|
||||
if pi.Options != nil && pi.Options.ID == operationID {
|
||||
return gatherParams(&pi, pi.Options)
|
||||
}
|
||||
if pi.Post != nil && pi.Post.ID == operationID {
|
||||
return gatherParams(&pi, pi.Post)
|
||||
}
|
||||
if pi.Patch != nil && pi.Patch.ID == operationID {
|
||||
return gatherParams(&pi, pi.Patch)
|
||||
}
|
||||
if pi.Put != nil && pi.Put.ID == operationID {
|
||||
return gatherParams(&pi, pi.Put)
|
||||
}
|
||||
if pi.Delete != nil && pi.Delete.ID == operationID {
|
||||
return gatherParams(&pi, pi.Delete)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
|
||||
// apply for the method and path.
|
||||
func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
|
||||
res := make(map[string]spec.Parameter)
|
||||
if pi, ok := s.spec.Paths.Paths[path]; ok {
|
||||
s.paramsAsMap(pi.Parameters, res)
|
||||
s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res)
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// OperationForName gets the operation for the given id
|
||||
func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
|
||||
for method, pathItem := range s.operations {
|
||||
for path, op := range pathItem {
|
||||
if operationID == op.ID {
|
||||
return method, path, op, true
|
||||
}
|
||||
}
|
||||
}
|
||||
return "", "", nil, false
|
||||
}
|
||||
|
||||
// OperationFor the given method and path
|
||||
func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
|
||||
if mp, ok := s.operations[strings.ToUpper(method)]; ok {
|
||||
op, fn := mp[path]
|
||||
return op, fn
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Operations gathers all the operations specified in the spec document
|
||||
func (s *Spec) Operations() map[string]map[string]*spec.Operation {
|
||||
return s.operations
|
||||
}
|
||||
|
||||
func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
|
||||
if len(mp) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
result := make([]string, 0, len(mp))
|
||||
for k := range mp {
|
||||
result = append(result, k)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// AllPaths returns all the paths in the swagger spec
|
||||
func (s *Spec) AllPaths() map[string]spec.PathItem {
|
||||
if s.spec == nil || s.spec.Paths == nil {
|
||||
return nil
|
||||
}
|
||||
return s.spec.Paths.Paths
|
||||
}
|
||||
|
||||
// OperationIDs gets all the operation ids based on method an dpath
|
||||
func (s *Spec) OperationIDs() []string {
|
||||
if len(s.operations) == 0 {
|
||||
return nil
|
||||
}
|
||||
result := make([]string, 0, len(s.operations))
|
||||
for method, v := range s.operations {
|
||||
for p, o := range v {
|
||||
if o.ID != "" {
|
||||
result = append(result, o.ID)
|
||||
} else {
|
||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
||||
}
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// OperationMethodPaths gets all the operation ids based on method an dpath
|
||||
func (s *Spec) OperationMethodPaths() []string {
|
||||
if len(s.operations) == 0 {
|
||||
return nil
|
||||
}
|
||||
result := make([]string, 0, len(s.operations))
|
||||
for method, v := range s.operations {
|
||||
for p := range v {
|
||||
result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
// RequiredConsumes gets all the distinct consumes that are specified in the specification document
|
||||
func (s *Spec) RequiredConsumes() []string {
|
||||
return s.structMapKeys(s.consumes)
|
||||
}
|
||||
|
||||
// RequiredProduces gets all the distinct produces that are specified in the specification document
|
||||
func (s *Spec) RequiredProduces() []string {
|
||||
return s.structMapKeys(s.produces)
|
||||
}
|
||||
|
||||
// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
|
||||
func (s *Spec) RequiredSecuritySchemes() []string {
|
||||
return s.structMapKeys(s.authSchemes)
|
||||
}
|
||||
|
||||
// SchemaRef is a reference to a schema
|
||||
type SchemaRef struct {
|
||||
Name string
|
||||
Ref spec.Ref
|
||||
Schema *spec.Schema
|
||||
TopLevel bool
|
||||
}
|
||||
|
||||
// SchemasWithAllOf returns schema references to all schemas that are defined
|
||||
// with an allOf key
|
||||
func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
|
||||
for _, v := range s.allOfs {
|
||||
result = append(result, v)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllDefinitions returns schema references for all the definitions that were discovered
|
||||
func (s *Spec) AllDefinitions() (result []SchemaRef) {
|
||||
for _, v := range s.allSchemas {
|
||||
result = append(result, v)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllDefinitionReferences returns json refs for all the discovered schemas
|
||||
func (s *Spec) AllDefinitionReferences() (result []string) {
|
||||
for _, v := range s.references.schemas {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllParameterReferences returns json refs for all the discovered parameters
|
||||
func (s *Spec) AllParameterReferences() (result []string) {
|
||||
for _, v := range s.references.parameters {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllResponseReferences returns json refs for all the discovered responses
|
||||
func (s *Spec) AllResponseReferences() (result []string) {
|
||||
for _, v := range s.references.responses {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllPathItemReferences returns the references for all the items
|
||||
func (s *Spec) AllPathItemReferences() (result []string) {
|
||||
for _, v := range s.references.pathItems {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllItemsReferences returns the references for all the items
|
||||
func (s *Spec) AllItemsReferences() (result []string) {
|
||||
for _, v := range s.references.items {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllReferences returns all the references found in the document
|
||||
func (s *Spec) AllReferences() (result []string) {
|
||||
for _, v := range s.references.allRefs {
|
||||
result = append(result, v.String())
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// AllRefs returns all the unique references found in the document
|
||||
func (s *Spec) AllRefs() (result []spec.Ref) {
|
||||
set := make(map[string]struct{})
|
||||
for _, v := range s.references.allRefs {
|
||||
a := v.String()
|
||||
if a == "" {
|
||||
continue
|
||||
}
|
||||
if _, ok := set[a]; !ok {
|
||||
set[a] = struct{}{}
|
||||
result = append(result, v)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func cloneStringMap(source map[string]string) map[string]string {
|
||||
res := make(map[string]string, len(source))
|
||||
for k, v := range source {
|
||||
res[k] = v
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// ParameterPatterns returns all the patterns found in parameters
|
||||
// the map is cloned to avoid accidental changes
|
||||
func (s *Spec) ParameterPatterns() map[string]string {
|
||||
return cloneStringMap(s.patterns.parameters)
|
||||
}
|
||||
|
||||
// HeaderPatterns returns all the patterns found in response headers
|
||||
// the map is cloned to avoid accidental changes
|
||||
func (s *Spec) HeaderPatterns() map[string]string {
|
||||
return cloneStringMap(s.patterns.headers)
|
||||
}
|
||||
|
||||
// ItemsPatterns returns all the patterns found in simple array items
|
||||
// the map is cloned to avoid accidental changes
|
||||
func (s *Spec) ItemsPatterns() map[string]string {
|
||||
return cloneStringMap(s.patterns.items)
|
||||
}
|
||||
|
||||
// SchemaPatterns returns all the patterns found in schemas
|
||||
// the map is cloned to avoid accidental changes
|
||||
func (s *Spec) SchemaPatterns() map[string]string {
|
||||
return cloneStringMap(s.patterns.schemas)
|
||||
}
|
||||
|
||||
// AllPatterns returns all the patterns found in the spec
|
||||
// the map is cloned to avoid accidental changes
|
||||
func (s *Spec) AllPatterns() map[string]string {
|
||||
return cloneStringMap(s.patterns.allPatterns)
|
||||
}
|
284
vendor/github.com/go-openapi/analysis/analyzer_test.go
generated
vendored
Normal file
284
vendor/github.com/go-openapi/analysis/analyzer_test.go
generated
vendored
Normal file
|
@ -0,0 +1,284 @@
|
|||
// Copyright 2015 go-swagger maintainers
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package analysis
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
"testing"
|
||||
|
||||
"github.com/go-openapi/loads/fmts"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func schemeNames(schemes []SecurityRequirement) []string {
|
||||
var names []string
|
||||
for _, v := range schemes {
|
||||
names = append(names, v.Name)
|
||||
}
|
||||
sort.Sort(sort.StringSlice(names))
|
||||
return names
|
||||
}
|
||||
|
||||
func TestAnalyzer(t *testing.T) {
|
||||
formatParam := spec.QueryParam("format").Typed("string", "")
|
||||
|
||||
limitParam := spec.QueryParam("limit").Typed("integer", "int32")
|
||||
limitParam.Extensions = spec.Extensions(map[string]interface{}{})
|
||||
limitParam.Extensions.Add("go-name", "Limit")
|
||||
|
||||
skipParam := spec.QueryParam("skip").Typed("integer", "int32")
|
||||
pi := spec.PathItem{}
|
||||
pi.Parameters = []spec.Parameter{*limitParam}
|
||||
|
||||
op := &spec.Operation{}
|
||||
op.Consumes = []string{"application/x-yaml"}
|
||||
op.Produces = []string{"application/x-yaml"}
|
||||
op.Security = []map[string][]string{
|
||||
map[string][]string{"oauth2": []string{}},
|
||||
map[string][]string{"basic": nil},
|
||||
}
|
||||
op.ID = "someOperation"
|
||||
op.Parameters = []spec.Parameter{*skipParam}
|
||||
pi.Get = op
|
||||
|
||||
pi2 := spec.PathItem{}
|
||||
pi2.Parameters = []spec.Parameter{*limitParam}
|
||||
op2 := &spec.Operation{}
|
||||
op2.ID = "anotherOperation"
|
||||
op2.Parameters = []spec.Parameter{*skipParam}
|
||||
pi2.Get = op2
|
||||
|
||||
spec := &spec.Swagger{
|
||||
SwaggerProps: spec.SwaggerProps{
|
||||
Consumes: []string{"application/json"},
|
||||
Produces: []string{"application/json"},
|
||||
Security: []map[string][]string{
|
||||
map[string][]string{"apikey": nil},
|
||||
},
|
||||
SecurityDefinitions: map[string]*spec.SecurityScheme{
|
||||
"basic": spec.BasicAuth(),
|
||||
"apiKey": spec.APIKeyAuth("api_key", "query"),
|
||||
"oauth2": spec.OAuth2AccessToken("http://authorize.com", "http://token.com"),
|
||||
},
|
||||
Parameters: map[string]spec.Parameter{"format": *formatParam},
|
||||
Paths: &spec.Paths{
|
||||
Paths: map[string]spec.PathItem{
|
||||
"/": pi,
|
||||
"/items": pi2,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
analyzer := New(spec)
|
||||
|
||||
assert.Len(t, analyzer.consumes, 2)
|
||||
assert.Len(t, analyzer.produces, 2)
|
||||
assert.Len(t, analyzer.operations, 1)
|
||||
assert.Equal(t, analyzer.operations["GET"]["/"], spec.Paths.Paths["/"].Get)
|
||||
|
||||
expected := []string{"application/x-yaml"}
|
||||
sort.Sort(sort.StringSlice(expected))
|
||||
consumes := analyzer.ConsumesFor(spec.Paths.Paths["/"].Get)
|
||||
sort.Sort(sort.StringSlice(consumes))
|
||||
assert.Equal(t, expected, consumes)
|
||||
|
||||
produces := analyzer.ProducesFor(spec.Paths.Paths["/"].Get)
|
||||
sort.Sort(sort.StringSlice(produces))
|
||||
assert.Equal(t, expected, produces)
|
||||
|
||||
expected = []string{"application/json"}
|
||||
sort.Sort(sort.StringSlice(expected))
|
||||
consumes = analyzer.ConsumesFor(spec.Paths.Paths["/items"].Get)
|
||||
sort.Sort(sort.StringSlice(consumes))
|
||||
assert.Equal(t, expected, consumes)
|
||||
|
||||
produces = analyzer.ProducesFor(spec.Paths.Paths["/items"].Get)
|
||||
sort.Sort(sort.StringSlice(produces))
|
||||
assert.Equal(t, expected, produces)
|
||||
|
||||
expectedSchemes := []SecurityRequirement{SecurityRequirement{"oauth2", []string{}}, SecurityRequirement{"basic", nil}}
|
||||
schemes := analyzer.SecurityRequirementsFor(spec.Paths.Paths["/"].Get)
|
||||
assert.Equal(t, schemeNames(expectedSchemes), schemeNames(schemes))
|
||||
|
||||
securityDefinitions := analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
|
||||
assert.Equal(t, securityDefinitions["basic"], *spec.SecurityDefinitions["basic"])
|
||||
assert.Equal(t, securityDefinitions["oauth2"], *spec.SecurityDefinitions["oauth2"])
|
||||
|
||||
parameters := analyzer.ParamsFor("GET", "/")
|
||||
assert.Len(t, parameters, 2)
|
||||
|
||||
operations := analyzer.OperationIDs()
|
||||
assert.Len(t, operations, 2)
|
||||
|
||||
producers := analyzer.RequiredProduces()
|
||||
assert.Len(t, producers, 2)
|
||||
consumers := analyzer.RequiredConsumes()
|
||||
assert.Len(t, consumers, 2)
|
||||
authSchemes := analyzer.RequiredSecuritySchemes()
|
||||
assert.Len(t, authSchemes, 3)
|
||||
|
||||
ops := analyzer.Operations()
|
||||
assert.Len(t, ops, 1)
|
||||
assert.Len(t, ops["GET"], 2)
|
||||
|
||||
op, ok := analyzer.OperationFor("get", "/")
|
||||
assert.True(t, ok)
|
||||
assert.NotNil(t, op)
|
||||
|
||||
op, ok = analyzer.OperationFor("delete", "/")
|
||||
assert.False(t, ok)
|
||||
assert.Nil(t, op)
|
||||
}
|
||||
|
||||
func TestDefinitionAnalysis(t *testing.T) {
|
||||
doc, err := loadSpec(filepath.Join("fixtures", "definitions.yml"))
|
||||
if assert.NoError(t, err) {
|
||||
analyzer := New(doc)
|
||||
definitions := analyzer.allSchemas
|
||||
// parameters
|
||||
assertSchemaRefExists(t, definitions, "#/parameters/someParam/schema")
|
||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/parameters/1/schema")
|
||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/parameters/1/schema")
|
||||
// responses
|
||||
assertSchemaRefExists(t, definitions, "#/responses/someResponse/schema")
|
||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/default/schema")
|
||||
assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
|
||||
// definitions
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/id")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/value")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/id")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/value")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps/additionalProperties")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/0")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/1")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/additionalItems")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withNot")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withNot/not")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/0")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/1")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/0")
|
||||
assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/1")
|
||||
allOfs := analyzer.allOfs
|
||||
assert.Len(t, allOfs, 1)
|
||||
assert.Contains(t, allOfs, "#/definitions/withAllOf")
|
||||
}
|
||||
}
|
||||
|
||||
func loadSpec(path string) (*spec.Swagger, error) {
|
||||
spec.PathLoader = func(path string) (json.RawMessage, error) {
|
||||
ext := filepath.Ext(path)
|
||||
if ext == ".yml" || ext == ".yaml" {
|
||||
return fmts.YAMLDoc(path)
|
||||
}
|
||||
data, err := swag.LoadFromFileOrHTTP(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return json.RawMessage(data), nil
|
||||
}
|
||||
data, err := fmts.YAMLDoc(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var sw spec.Swagger
|
||||
if err := json.Unmarshal(data, &sw); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &sw, nil
|
||||
}
|
||||
|
||||
func TestReferenceAnalysis(t *testing.T) {
|
||||
doc, err := loadSpec(filepath.Join("fixtures", "references.yml"))
|
||||
if assert.NoError(t, err) {
|
||||
definitions := New(doc).references
|
||||
|
||||
// parameters
|
||||
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/parameters/0")
|
||||
assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0")
|
||||
|
||||
// path items
|
||||
assertRefExists(t, definitions.pathItems, "#/paths/~1other~1place")
|
||||
|
||||
// responses
|
||||
assertRefExists(t, definitions.responses, "#/paths/~1some~1where~1{id}/get/responses/404")
|
||||
|
||||
// definitions
|
||||
assertRefExists(t, definitions.schemas, "#/responses/notFound/schema")
|
||||
assertRefExists(t, definitions.schemas, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
|
||||
assertRefExists(t, definitions.schemas, "#/definitions/tag/properties/audit")
|
||||
|
||||
// items
|
||||
assertRefExists(t, definitions.allRefs, "#/paths/~1some~1where~1{id}/get/parameters/1/items")
|
||||
}
|
||||
}
|
||||
|
||||
func assertRefExists(t testing.TB, data map[string]spec.Ref, key string) bool {
|
||||
if _, ok := data[key]; !ok {
|
||||
return assert.Fail(t, fmt.Sprintf("expected %q to exist in the ref bag", key))
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func assertSchemaRefExists(t testing.TB, data map[string]SchemaRef, key string) bool {
|
||||
if _, ok := data[key]; !ok {
|
||||
return assert.Fail(t, fmt.Sprintf("expected %q to exist in schema ref bag", key))
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func TestPatternAnalysis(t *testing.T) {
|
||||
doc, err := loadSpec(filepath.Join("fixtures", "patterns.yml"))
|
||||
if assert.NoError(t, err) {
|
||||
pt := New(doc).patterns
|
||||
|
||||
// parameters
|
||||
assertPattern(t, pt.parameters, "#/parameters/idParam", "a[A-Za-Z0-9]+")
|
||||
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/parameters/1", "b[A-Za-z0-9]+")
|
||||
assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0", "[abc][0-9]+")
|
||||
|
||||
// responses
|
||||
assertPattern(t, pt.headers, "#/responses/notFound/headers/ContentLength", "[0-9]+")
|
||||
assertPattern(t, pt.headers, "#/paths/~1some~1where~1{id}/get/responses/200/headers/X-Request-Id", "d[A-Za-z0-9]+")
|
||||
|
||||
// definitions
|
||||
assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/parameters/0/schema/properties/value", "e[A-Za-z0-9]+")
|
||||
assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/responses/200/schema/properties/data", "[0-9]+[abd]")
|
||||
assertPattern(t, pt.schemas, "#/definitions/named", "f[A-Za-z0-9]+")
|
||||
assertPattern(t, pt.schemas, "#/definitions/tag/properties/value", "g[A-Za-z0-9]+")
|
||||
|
||||
// items
|
||||
assertPattern(t, pt.items, "#/paths/~1some~1where~1{id}/get/parameters/1/items", "c[A-Za-z0-9]+")
|
||||
assertPattern(t, pt.items, "#/paths/~1other~1place/post/responses/default/headers/Via/items", "[A-Za-z]+")
|
||||
}
|
||||
}
|
||||
|
||||
func assertPattern(t testing.TB, data map[string]string, key, pattern string) bool {
|
||||
if assert.Contains(t, data, key) {
|
||||
return assert.Equal(t, pattern, data[key])
|
||||
}
|
||||
return false
|
||||
}
|
756
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
Normal file
756
vendor/github.com/go-openapi/analysis/flatten.go
generated
vendored
Normal file
|
@ -0,0 +1,756 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"strconv"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
swspec "github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// FlattenOpts configuration for flattening a swagger specification.
|
||||
type FlattenOpts struct {
|
||||
Spec *Spec
|
||||
BasePath string
|
||||
|
||||
_ struct{} // require keys
|
||||
}
|
||||
|
||||
// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
|
||||
func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *swspec.ExpandOptions {
|
||||
return &swspec.ExpandOptions{RelativeBase: f.BasePath, SkipSchemas: skipSchemas}
|
||||
}
|
||||
|
||||
// Swagger gets the swagger specification for this flatten operation
|
||||
func (f *FlattenOpts) Swagger() *swspec.Swagger {
|
||||
return f.Spec.spec
|
||||
}
|
||||
|
||||
// Flatten an analyzed spec.
|
||||
//
|
||||
// To flatten a spec means:
|
||||
//
|
||||
// Expand the parameters, responses, path items, parameter items and header items.
|
||||
// Import external (http, file) references so they become internal to the document.
|
||||
// Move every inline schema to be a definition with an auto-generated name in a depth-first fashion.
|
||||
// Rewritten schemas get a vendor extension x-go-gen-location so we know in which package they need to be rendered.
|
||||
func Flatten(opts FlattenOpts) error {
|
||||
// recursively expand responses, parameters, path items and items
|
||||
err := swspec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(true))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
// at this point there are no other references left but schemas
|
||||
if err := importExternalReferences(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
// rewrite the inline schemas (schemas that aren't simple types or arrays of simple types)
|
||||
if err := nameInlinedSchemas(&opts); err != nil {
|
||||
return err
|
||||
}
|
||||
opts.Spec.reload() // re-analyze
|
||||
|
||||
// TODO: simplifiy known schema patterns to flat objects with properties?
|
||||
return nil
|
||||
}
|
||||
|
||||
func nameInlinedSchemas(opts *FlattenOpts) error {
|
||||
namer := &inlineSchemaNamer{Spec: opts.Swagger(), Operations: opRefsByRef(gatherOperations(opts.Spec, nil))}
|
||||
depthFirst := sortDepthFirst(opts.Spec.allSchemas)
|
||||
|
||||
for _, key := range depthFirst {
|
||||
sch := opts.Spec.allSchemas[key]
|
||||
if sch.Schema != nil && sch.Schema.Ref.String() == "" && !sch.TopLevel { // inline schema
|
||||
asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
|
||||
if err != nil {
|
||||
return fmt.Errorf("schema analysis [%s]: %v", sch.Ref.String(), err)
|
||||
}
|
||||
|
||||
if !asch.IsSimpleSchema { // complex schemas get moved
|
||||
if err := namer.Name(key, sch.Schema, asch); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var depthGroupOrder = []string{"sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition"}
|
||||
|
||||
func sortDepthFirst(data map[string]SchemaRef) (sorted []string) {
|
||||
// group by category (shared params, op param, statuscode response, default response, definitions)
|
||||
// sort groups internally by number of parts in the key and lexical names
|
||||
// flatten groups into a single list of keys
|
||||
grouped := make(map[string]keys, len(data))
|
||||
for k := range data {
|
||||
split := keyParts(k)
|
||||
var pk string
|
||||
if split.IsSharedOperationParam() {
|
||||
pk = "sharedOpParam"
|
||||
}
|
||||
if split.IsOperationParam() {
|
||||
pk = "opParam"
|
||||
}
|
||||
if split.IsStatusCodeResponse() {
|
||||
pk = "codeResponse"
|
||||
}
|
||||
if split.IsDefaultResponse() {
|
||||
pk = "defaultResponse"
|
||||
}
|
||||
if split.IsDefinition() {
|
||||
pk = "definition"
|
||||
}
|
||||
grouped[pk] = append(grouped[pk], key{len(split), k})
|
||||
}
|
||||
|
||||
for _, pk := range depthGroupOrder {
|
||||
res := grouped[pk]
|
||||
sort.Sort(res)
|
||||
for _, v := range res {
|
||||
sorted = append(sorted, v.Key)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
type key struct {
|
||||
Segments int
|
||||
Key string
|
||||
}
|
||||
type keys []key
|
||||
|
||||
func (k keys) Len() int { return len(k) }
|
||||
func (k keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
|
||||
func (k keys) Less(i, j int) bool {
|
||||
return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
|
||||
}
|
||||
|
||||
type inlineSchemaNamer struct {
|
||||
Spec *swspec.Swagger
|
||||
Operations map[string]opRef
|
||||
}
|
||||
|
||||
func opRefsByRef(oprefs map[string]opRef) map[string]opRef {
|
||||
result := make(map[string]opRef, len(oprefs))
|
||||
for _, v := range oprefs {
|
||||
result[v.Ref.String()] = v
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func (isn *inlineSchemaNamer) Name(key string, schema *swspec.Schema, aschema *AnalyzedSchema) error {
|
||||
if swspec.Debug {
|
||||
log.Printf("naming inlined schema at %s", key)
|
||||
}
|
||||
|
||||
parts := keyParts(key)
|
||||
for _, name := range namesFromKey(parts, aschema, isn.Operations) {
|
||||
if name != "" {
|
||||
// create unique name
|
||||
newName := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
|
||||
|
||||
// clone schema
|
||||
sch, err := cloneSchema(schema)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// replace values on schema
|
||||
if err := rewriteSchemaToRef(isn.Spec, key, swspec.MustCreateRef("#/definitions/"+newName)); err != nil {
|
||||
return fmt.Errorf("name inlined schema: %v", err)
|
||||
}
|
||||
|
||||
sch.AddExtension("x-go-gen-location", genLocation(parts))
|
||||
// fmt.Printf("{\n %q,\n \"\",\n spec.MustCreateRef(%q),\n \"\",\n},\n", key, "#/definitions/"+newName)
|
||||
// save cloned schema to definitions
|
||||
saveSchema(isn.Spec, newName, sch)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func genLocation(parts splitKey) string {
|
||||
if parts.IsOperation() {
|
||||
return "operations"
|
||||
}
|
||||
if parts.IsDefinition() {
|
||||
return "models"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func uniqifyName(definitions swspec.Definitions, name string) string {
|
||||
if name == "" {
|
||||
name = "oaiGen"
|
||||
}
|
||||
if len(definitions) == 0 {
|
||||
return name
|
||||
}
|
||||
|
||||
if _, ok := definitions[name]; !ok {
|
||||
return name
|
||||
}
|
||||
name += "OAIGen"
|
||||
var idx int
|
||||
unique := name
|
||||
_, known := definitions[unique]
|
||||
for known {
|
||||
idx++
|
||||
unique = fmt.Sprintf("%s%d", name, idx)
|
||||
_, known = definitions[unique]
|
||||
}
|
||||
return unique
|
||||
}
|
||||
|
||||
func namesFromKey(parts splitKey, aschema *AnalyzedSchema, operations map[string]opRef) []string {
|
||||
var baseNames [][]string
|
||||
var startIndex int
|
||||
if parts.IsOperation() {
|
||||
// params
|
||||
if parts.IsOperationParam() || parts.IsSharedOperationParam() {
|
||||
piref := parts.PathItemRef()
|
||||
if piref.String() != "" && parts.IsOperationParam() {
|
||||
if op, ok := operations[piref.String()]; ok {
|
||||
startIndex = 5
|
||||
baseNames = append(baseNames, []string{op.ID, "params", "body"})
|
||||
}
|
||||
} else if parts.IsSharedOperationParam() {
|
||||
pref := parts.PathRef()
|
||||
for k, v := range operations {
|
||||
if strings.HasPrefix(k, pref.String()) {
|
||||
startIndex = 4
|
||||
baseNames = append(baseNames, []string{v.ID, "params", "body"})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// responses
|
||||
if parts.IsOperationResponse() {
|
||||
piref := parts.PathItemRef()
|
||||
if piref.String() != "" {
|
||||
if op, ok := operations[piref.String()]; ok {
|
||||
startIndex = 6
|
||||
baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// definitions
|
||||
if parts.IsDefinition() {
|
||||
nm := parts.DefinitionName()
|
||||
if nm != "" {
|
||||
startIndex = 2
|
||||
baseNames = append(baseNames, []string{parts.DefinitionName()})
|
||||
}
|
||||
}
|
||||
|
||||
var result []string
|
||||
for _, segments := range baseNames {
|
||||
nm := parts.BuildName(segments, startIndex, aschema)
|
||||
if nm != "" {
|
||||
result = append(result, nm)
|
||||
}
|
||||
}
|
||||
sort.Strings(result)
|
||||
return result
|
||||
}
|
||||
|
||||
const (
|
||||
pths = "paths"
|
||||
responses = "responses"
|
||||
parameters = "parameters"
|
||||
definitions = "definitions"
|
||||
)
|
||||
|
||||
var ignoredKeys map[string]struct{}
|
||||
|
||||
func init() {
|
||||
ignoredKeys = map[string]struct{}{
|
||||
"schema": {},
|
||||
"properties": {},
|
||||
"not": {},
|
||||
"anyOf": {},
|
||||
"oneOf": {},
|
||||
}
|
||||
}
|
||||
|
||||
type splitKey []string
|
||||
|
||||
func (s splitKey) IsDefinition() bool {
|
||||
return len(s) > 1 && s[0] == definitions
|
||||
}
|
||||
|
||||
func (s splitKey) DefinitionName() string {
|
||||
if !s.IsDefinition() {
|
||||
return ""
|
||||
}
|
||||
return s[1]
|
||||
}
|
||||
|
||||
func (s splitKey) BuildName(segments []string, startIndex int, aschema *AnalyzedSchema) string {
|
||||
for _, part := range s[startIndex:] {
|
||||
if _, ignored := ignoredKeys[part]; !ignored {
|
||||
if part == "items" || part == "additionalItems" {
|
||||
if aschema.IsTuple || aschema.IsTupleWithExtra {
|
||||
segments = append(segments, "tuple")
|
||||
} else {
|
||||
segments = append(segments, "items")
|
||||
}
|
||||
if part == "additionalItems" {
|
||||
segments = append(segments, part)
|
||||
}
|
||||
continue
|
||||
}
|
||||
segments = append(segments, part)
|
||||
}
|
||||
}
|
||||
return strings.Join(segments, " ")
|
||||
}
|
||||
|
||||
func (s splitKey) IsOperation() bool {
|
||||
return len(s) > 1 && s[0] == pths
|
||||
}
|
||||
|
||||
func (s splitKey) IsSharedOperationParam() bool {
|
||||
return len(s) > 2 && s[0] == pths && s[2] == parameters
|
||||
}
|
||||
|
||||
func (s splitKey) IsOperationParam() bool {
|
||||
return len(s) > 3 && s[0] == pths && s[3] == parameters
|
||||
}
|
||||
|
||||
func (s splitKey) IsOperationResponse() bool {
|
||||
return len(s) > 3 && s[0] == pths && s[3] == responses
|
||||
}
|
||||
|
||||
func (s splitKey) IsDefaultResponse() bool {
|
||||
return len(s) > 4 && s[0] == pths && s[3] == responses && s[4] == "default"
|
||||
}
|
||||
|
||||
func (s splitKey) IsStatusCodeResponse() bool {
|
||||
isInt := func() bool {
|
||||
_, err := strconv.Atoi(s[4])
|
||||
return err == nil
|
||||
}
|
||||
return len(s) > 4 && s[0] == pths && s[3] == responses && isInt()
|
||||
}
|
||||
|
||||
func (s splitKey) ResponseName() string {
|
||||
if s.IsStatusCodeResponse() {
|
||||
code, _ := strconv.Atoi(s[4])
|
||||
return http.StatusText(code)
|
||||
}
|
||||
if s.IsDefaultResponse() {
|
||||
return "Default"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var validMethods map[string]struct{}
|
||||
|
||||
func init() {
|
||||
validMethods = map[string]struct{}{
|
||||
"GET": {},
|
||||
"HEAD": {},
|
||||
"OPTIONS": {},
|
||||
"PATCH": {},
|
||||
"POST": {},
|
||||
"PUT": {},
|
||||
"DELETE": {},
|
||||
}
|
||||
}
|
||||
|
||||
func (s splitKey) PathItemRef() swspec.Ref {
|
||||
if len(s) < 3 {
|
||||
return swspec.Ref{}
|
||||
}
|
||||
pth, method := s[1], s[2]
|
||||
if _, validMethod := validMethods[strings.ToUpper(method)]; !validMethod && !strings.HasPrefix(method, "x-") {
|
||||
return swspec.Ref{}
|
||||
}
|
||||
return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(pth), strings.ToUpper(method)))
|
||||
}
|
||||
|
||||
func (s splitKey) PathRef() swspec.Ref {
|
||||
if !s.IsOperation() {
|
||||
return swspec.Ref{}
|
||||
}
|
||||
return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(s[1])))
|
||||
}
|
||||
|
||||
func keyParts(key string) splitKey {
|
||||
var res []string
|
||||
for _, part := range strings.Split(key[1:], "/") {
|
||||
if part != "" {
|
||||
res = append(res, jsonpointer.Unescape(part))
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func rewriteSchemaToRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
||||
if swspec.Debug {
|
||||
log.Printf("rewriting schema to ref for %s with %s", key, ref.String())
|
||||
}
|
||||
pth := key[1:]
|
||||
ptr, err := jsonpointer.New(pth)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, _, err := ptr.Get(spec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *swspec.Schema:
|
||||
return rewriteParentRef(spec, key, ref)
|
||||
case *swspec.SchemaOrBool:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
}
|
||||
case *swspec.SchemaOrArray:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
}
|
||||
case swspec.Schema:
|
||||
return rewriteParentRef(spec, key, ref)
|
||||
default:
|
||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func rewriteParentRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
||||
pth := key[1:]
|
||||
parent, entry := path.Dir(pth), path.Base(pth)
|
||||
if swspec.Debug {
|
||||
log.Println("getting schema holder at:", parent)
|
||||
}
|
||||
|
||||
pptr, err := jsonpointer.New(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pvalue, _, err := pptr.Get(spec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't get parent for %s: %v", parent, err)
|
||||
}
|
||||
if swspec.Debug {
|
||||
log.Printf("rewriting holder for %T", pvalue)
|
||||
}
|
||||
|
||||
switch container := pvalue.(type) {
|
||||
case swspec.Response:
|
||||
if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case *swspec.Response:
|
||||
container.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *swspec.Responses:
|
||||
statusCode, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
resp := container.StatusCodeResponses[statusCode]
|
||||
resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
container.StatusCodeResponses[statusCode] = resp
|
||||
|
||||
case map[string]swspec.Response:
|
||||
resp := container[entry]
|
||||
resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
container[entry] = resp
|
||||
|
||||
case swspec.Parameter:
|
||||
if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
case map[string]swspec.Parameter:
|
||||
param := container[entry]
|
||||
param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
container[entry] = param
|
||||
|
||||
case []swspec.Parameter:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
param := container[idx]
|
||||
param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
container[idx] = param
|
||||
|
||||
case swspec.Definitions:
|
||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case map[string]swspec.Schema:
|
||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case []swspec.Schema:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *swspec.SchemaOrArray:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
default:
|
||||
return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func cloneSchema(schema *swspec.Schema) (*swspec.Schema, error) {
|
||||
var sch swspec.Schema
|
||||
if err := swag.FromDynamicJSON(schema, &sch); err != nil {
|
||||
return nil, fmt.Errorf("name inlined schema: %v", err)
|
||||
}
|
||||
return &sch, nil
|
||||
}
|
||||
|
||||
func importExternalReferences(opts *FlattenOpts) error {
|
||||
groupedRefs := reverseIndexForSchemaRefs(opts)
|
||||
|
||||
for refStr, entry := range groupedRefs {
|
||||
if !entry.Ref.HasFragmentOnly {
|
||||
if swspec.Debug {
|
||||
log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
|
||||
}
|
||||
// resolve to actual schema
|
||||
sch, err := swspec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sch == nil {
|
||||
return fmt.Errorf("no schema found at %s for [%s]", refStr, strings.Join(entry.Keys, ", "))
|
||||
}
|
||||
if swspec.Debug {
|
||||
log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
|
||||
}
|
||||
|
||||
// generate a unique name
|
||||
newName := uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
|
||||
if swspec.Debug {
|
||||
log.Printf("new name for [%s]: %s", strings.Join(entry.Keys, ", "), newName)
|
||||
}
|
||||
|
||||
// rewrite the external refs to local ones
|
||||
for _, key := range entry.Keys {
|
||||
if err := updateRef(opts.Swagger(), key, swspec.MustCreateRef("#"+path.Join("/definitions", newName))); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
// add the resolved schema to the definitions
|
||||
saveSchema(opts.Swagger(), newName, sch)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type refRevIdx struct {
|
||||
Ref swspec.Ref
|
||||
Keys []string
|
||||
}
|
||||
|
||||
func reverseIndexForSchemaRefs(opts *FlattenOpts) map[string]refRevIdx {
|
||||
collected := make(map[string]refRevIdx)
|
||||
for key, schRef := range opts.Spec.references.schemas {
|
||||
if entry, ok := collected[schRef.String()]; ok {
|
||||
entry.Keys = append(entry.Keys, key)
|
||||
collected[schRef.String()] = entry
|
||||
} else {
|
||||
collected[schRef.String()] = refRevIdx{
|
||||
Ref: schRef,
|
||||
Keys: []string{key},
|
||||
}
|
||||
}
|
||||
}
|
||||
return collected
|
||||
}
|
||||
|
||||
func nameFromRef(ref swspec.Ref) string {
|
||||
u := ref.GetURL()
|
||||
if u.Fragment != "" {
|
||||
return swag.ToJSONName(path.Base(u.Fragment))
|
||||
}
|
||||
if u.Path != "" {
|
||||
bn := path.Base(u.Path)
|
||||
if bn != "" && bn != "/" {
|
||||
ext := path.Ext(bn)
|
||||
if ext != "" {
|
||||
return swag.ToJSONName(bn[:len(bn)-len(ext)])
|
||||
}
|
||||
return swag.ToJSONName(bn)
|
||||
}
|
||||
}
|
||||
return swag.ToJSONName(strings.Replace(u.Host, ".", " ", -1))
|
||||
}
|
||||
|
||||
func saveSchema(spec *swspec.Swagger, name string, schema *swspec.Schema) {
|
||||
if schema == nil {
|
||||
return
|
||||
}
|
||||
if spec.Definitions == nil {
|
||||
spec.Definitions = make(map[string]swspec.Schema, 150)
|
||||
}
|
||||
spec.Definitions[name] = *schema
|
||||
}
|
||||
|
||||
func updateRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
|
||||
if swspec.Debug {
|
||||
log.Printf("updating ref for %s with %s", key, ref.String())
|
||||
}
|
||||
pth := key[1:]
|
||||
ptr, err := jsonpointer.New(pth)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
value, _, err := ptr.Get(spec)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
switch refable := value.(type) {
|
||||
case *swspec.Schema:
|
||||
refable.Ref = ref
|
||||
case *swspec.SchemaOrBool:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema.Ref = ref
|
||||
}
|
||||
case *swspec.SchemaOrArray:
|
||||
if refable.Schema != nil {
|
||||
refable.Schema.Ref = ref
|
||||
}
|
||||
case swspec.Schema:
|
||||
parent, entry := path.Dir(pth), path.Base(pth)
|
||||
if swspec.Debug {
|
||||
log.Println("getting schema holder at:", parent)
|
||||
}
|
||||
|
||||
pptr, err := jsonpointer.New(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pvalue, _, err := pptr.Get(spec)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't get parent for %s: %v", parent, err)
|
||||
}
|
||||
|
||||
switch container := pvalue.(type) {
|
||||
case swspec.Definitions:
|
||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case map[string]swspec.Schema:
|
||||
container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case []swspec.Schema:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
case *swspec.SchemaOrArray:
|
||||
idx, err := strconv.Atoi(entry)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s not a number: %v", pth, err)
|
||||
}
|
||||
container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
|
||||
|
||||
}
|
||||
|
||||
default:
|
||||
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func containsString(names []string, name string) bool {
|
||||
for _, nm := range names {
|
||||
if nm == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
type opRef struct {
|
||||
Method string
|
||||
Path string
|
||||
Key string
|
||||
ID string
|
||||
Op *swspec.Operation
|
||||
Ref swspec.Ref
|
||||
}
|
||||
|
||||
type opRefs []opRef
|
||||
|
||||
func (o opRefs) Len() int { return len(o) }
|
||||
func (o opRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
|
||||
func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
|
||||
|
||||
func gatherOperations(specDoc *Spec, operationIDs []string) map[string]opRef {
|
||||
var oprefs opRefs
|
||||
|
||||
for method, pathItem := range specDoc.Operations() {
|
||||
for pth, operation := range pathItem {
|
||||
vv := *operation
|
||||
oprefs = append(oprefs, opRef{
|
||||
Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
|
||||
Method: method,
|
||||
Path: pth,
|
||||
ID: vv.ID,
|
||||
Op: &vv,
|
||||
Ref: swspec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
sort.Sort(oprefs)
|
||||
|
||||
operations := make(map[string]opRef)
|
||||
for _, opr := range oprefs {
|
||||
nm := opr.ID
|
||||
if nm == "" {
|
||||
nm = opr.Key
|
||||
}
|
||||
|
||||
oo, found := operations[nm]
|
||||
if found && oo.Method != opr.Method && oo.Path != opr.Path {
|
||||
nm = opr.Key
|
||||
}
|
||||
if len(operationIDs) == 0 || containsString(operationIDs, opr.ID) || containsString(operationIDs, nm) {
|
||||
opr.ID = nm
|
||||
opr.Op.ID = nm
|
||||
operations[nm] = opr
|
||||
}
|
||||
}
|
||||
|
||||
return operations
|
||||
}
|
805
vendor/github.com/go-openapi/analysis/flatten_test.go
generated
vendored
Normal file
805
vendor/github.com/go-openapi/analysis/flatten_test.go
generated
vendored
Normal file
|
@ -0,0 +1,805 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/go-openapi/jsonpointer"
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSaveDefinition(t *testing.T) {
|
||||
sp := &spec.Swagger{}
|
||||
saveSchema(sp, "theName", spec.StringProperty())
|
||||
assert.Contains(t, sp.Definitions, "theName")
|
||||
}
|
||||
|
||||
func TestNameFromRef(t *testing.T) {
|
||||
values := []struct{ Source, Expected string }{
|
||||
{"#/definitions/errorModel", "errorModel"},
|
||||
{"http://somewhere.com/definitions/errorModel", "errorModel"},
|
||||
{"http://somewhere.com/definitions/errorModel.json", "errorModel"},
|
||||
{"/definitions/errorModel", "errorModel"},
|
||||
{"/definitions/errorModel.json", "errorModel"},
|
||||
{"http://somewhere.com", "somewhereCom"},
|
||||
{"#", ""},
|
||||
}
|
||||
|
||||
for _, v := range values {
|
||||
assert.Equal(t, v.Expected, nameFromRef(spec.MustCreateRef(v.Source)))
|
||||
}
|
||||
}
|
||||
|
||||
func TestDefinitionName(t *testing.T) {
|
||||
values := []struct {
|
||||
Source, Expected string
|
||||
Definitions spec.Definitions
|
||||
}{
|
||||
{"#/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
|
||||
{"http://somewhere.com/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
|
||||
{"#/definitions/errorModel", "errorModel", map[string]spec.Schema{"apples": *spec.StringProperty()}},
|
||||
{"#/definitions/errorModel", "errorModelOAIGen", map[string]spec.Schema{"errorModel": *spec.StringProperty()}},
|
||||
{"#/definitions/errorModel", "errorModelOAIGen1", map[string]spec.Schema{"errorModel": *spec.StringProperty(), "errorModelOAIGen": *spec.StringProperty()}},
|
||||
{"#", "oaiGen", nil},
|
||||
}
|
||||
|
||||
for _, v := range values {
|
||||
assert.Equal(t, v.Expected, uniqifyName(v.Definitions, nameFromRef(spec.MustCreateRef(v.Source))))
|
||||
}
|
||||
}
|
||||
|
||||
func TestUpdateRef(t *testing.T) {
|
||||
bp := filepath.Join("fixtures", "external_definitions.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
if assert.NoError(t, err) {
|
||||
|
||||
values := []struct {
|
||||
Key string
|
||||
Ref spec.Ref
|
||||
}{
|
||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
||||
}
|
||||
|
||||
for _, v := range values {
|
||||
err := updateRef(sp, v.Key, v.Ref)
|
||||
if assert.NoError(t, err) {
|
||||
ptr, err := jsonpointer.New(v.Key[1:])
|
||||
if assert.NoError(t, err) {
|
||||
vv, _, err := ptr.Get(sp)
|
||||
|
||||
if assert.NoError(t, err) {
|
||||
switch tv := vv.(type) {
|
||||
case *spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String())
|
||||
case spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String())
|
||||
case *spec.SchemaOrBool:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
|
||||
case *spec.SchemaOrArray:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T", vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestImportExternalReferences(t *testing.T) {
|
||||
bp := filepath.Join(".", "fixtures", "external_definitions.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
if assert.NoError(t, err) {
|
||||
|
||||
values := []struct {
|
||||
Key string
|
||||
Ref spec.Ref
|
||||
}{
|
||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
||||
}
|
||||
for _, v := range values {
|
||||
// technically not necessary to run for each value, but if things go right
|
||||
// this is idempotent, so having it repeat shouldn't matter
|
||||
// this validates that behavior
|
||||
err := importExternalReferences(&FlattenOpts{
|
||||
Spec: New(sp),
|
||||
BasePath: bp,
|
||||
})
|
||||
|
||||
if assert.NoError(t, err) {
|
||||
|
||||
ptr, err := jsonpointer.New(v.Key[1:])
|
||||
if assert.NoError(t, err) {
|
||||
vv, _, err := ptr.Get(sp)
|
||||
|
||||
if assert.NoError(t, err) {
|
||||
switch tv := vv.(type) {
|
||||
case *spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
|
||||
case spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
|
||||
case *spec.SchemaOrBool:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
|
||||
case *spec.SchemaOrArray:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T", vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert.Len(t, sp.Definitions, 11)
|
||||
assert.Contains(t, sp.Definitions, "tag")
|
||||
assert.Contains(t, sp.Definitions, "named")
|
||||
assert.Contains(t, sp.Definitions, "record")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRewriteSchemaRef(t *testing.T) {
|
||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
if assert.NoError(t, err) {
|
||||
|
||||
values := []struct {
|
||||
Key string
|
||||
Ref spec.Ref
|
||||
}{
|
||||
{"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
|
||||
{"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
|
||||
{"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
|
||||
{"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
|
||||
}
|
||||
|
||||
for i, v := range values {
|
||||
err := rewriteSchemaToRef(sp, v.Key, v.Ref)
|
||||
if assert.NoError(t, err) {
|
||||
ptr, err := jsonpointer.New(v.Key[1:])
|
||||
if assert.NoError(t, err) {
|
||||
vv, _, err := ptr.Get(sp)
|
||||
|
||||
if assert.NoError(t, err) {
|
||||
switch tv := vv.(type) {
|
||||
case *spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrBool:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrArray:
|
||||
assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T", vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSplitKey(t *testing.T) {
|
||||
|
||||
type KeyFlag uint64
|
||||
|
||||
const (
|
||||
isOperation KeyFlag = 1 << iota
|
||||
isDefinition
|
||||
isSharedOperationParam
|
||||
isOperationParam
|
||||
isOperationResponse
|
||||
isDefaultResponse
|
||||
isStatusCodeResponse
|
||||
)
|
||||
|
||||
values := []struct {
|
||||
Key string
|
||||
Flags KeyFlag
|
||||
PathItemRef spec.Ref
|
||||
PathRef spec.Ref
|
||||
Name string
|
||||
}{
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/parameters/1/schema",
|
||||
isOperation | isSharedOperationParam,
|
||||
spec.Ref{},
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema",
|
||||
isOperation | isOperationParam,
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema",
|
||||
isOperation | isOperationResponse | isDefaultResponse,
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
||||
"Default",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
||||
isOperation | isOperationResponse | isStatusCodeResponse,
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
|
||||
spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
|
||||
"OK",
|
||||
},
|
||||
{
|
||||
"#/definitions/namedAgain",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"namedAgain",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedRecords/items/1",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"datedRecords",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedRecords/items/1",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"datedRecords",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedTaggedRecords/items/1",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"datedTaggedRecords",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedTaggedRecords/additionalItems",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"datedTaggedRecords",
|
||||
},
|
||||
{
|
||||
"#/definitions/otherRecords/items",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"otherRecords",
|
||||
},
|
||||
{
|
||||
"#/definitions/tags/additionalProperties",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"tags",
|
||||
},
|
||||
{
|
||||
"#/definitions/namedThing/properties/name",
|
||||
isDefinition,
|
||||
spec.Ref{},
|
||||
spec.Ref{},
|
||||
"namedThing",
|
||||
},
|
||||
}
|
||||
|
||||
for i, v := range values {
|
||||
parts := keyParts(v.Key)
|
||||
pref := parts.PathRef()
|
||||
piref := parts.PathItemRef()
|
||||
assert.Equal(t, v.PathRef.String(), pref.String(), "pathRef: %s at %d", v.Key, i)
|
||||
assert.Equal(t, v.PathItemRef.String(), piref.String(), "pathItemRef: %s at %d", v.Key, i)
|
||||
|
||||
if v.Flags&isOperation != 0 {
|
||||
assert.True(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
|
||||
}
|
||||
if v.Flags&isDefinition != 0 {
|
||||
assert.True(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
|
||||
assert.Equal(t, v.Name, parts.DefinitionName(), "definition name: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
|
||||
if v.Name != "" {
|
||||
assert.Equal(t, v.Name, parts.ResponseName(), "response name: %s at %d", v.Key, i)
|
||||
}
|
||||
}
|
||||
if v.Flags&isOperationParam != 0 {
|
||||
assert.True(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
|
||||
}
|
||||
if v.Flags&isSharedOperationParam != 0 {
|
||||
assert.True(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
|
||||
}
|
||||
if v.Flags&isOperationResponse != 0 {
|
||||
assert.True(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
|
||||
}
|
||||
if v.Flags&isDefaultResponse != 0 {
|
||||
assert.True(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
|
||||
}
|
||||
if v.Flags&isStatusCodeResponse != 0 {
|
||||
assert.True(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
|
||||
} else {
|
||||
assert.False(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func definitionPtr(key string) string {
|
||||
if !strings.HasPrefix(key, "#/definitions") {
|
||||
return key
|
||||
}
|
||||
return strings.Join(strings.Split(key, "/")[:3], "/")
|
||||
}
|
||||
|
||||
func TestNamesFromKey(t *testing.T) {
|
||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
if assert.NoError(t, err) {
|
||||
|
||||
values := []struct {
|
||||
Key string
|
||||
Names []string
|
||||
}{
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", []string{"GetSomeWhereID params body", "PostSomeWhereID params body"}},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", []string{"GetSomeWhereID params body"}},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", []string{"GetSomeWhereID Default body"}},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", []string{"GetSomeWhereID OK body"}},
|
||||
{"#/definitions/namedAgain", []string{"namedAgain"}},
|
||||
{"#/definitions/datedTag/allOf/1", []string{"datedTag allOf 1"}},
|
||||
{"#/definitions/datedRecords/items/1", []string{"datedRecords tuple 1"}},
|
||||
{"#/definitions/datedTaggedRecords/items/1", []string{"datedTaggedRecords tuple 1"}},
|
||||
{"#/definitions/datedTaggedRecords/additionalItems", []string{"datedTaggedRecords tuple additionalItems"}},
|
||||
{"#/definitions/otherRecords/items", []string{"otherRecords items"}},
|
||||
{"#/definitions/tags/additionalProperties", []string{"tags additionalProperties"}},
|
||||
{"#/definitions/namedThing/properties/name", []string{"namedThing name"}},
|
||||
}
|
||||
|
||||
for i, v := range values {
|
||||
ptr, err := jsonpointer.New(definitionPtr(v.Key)[1:])
|
||||
if assert.NoError(t, err) {
|
||||
vv, _, err := ptr.Get(sp)
|
||||
if assert.NoError(t, err) {
|
||||
switch tv := vv.(type) {
|
||||
case *spec.Schema:
|
||||
aschema, err := Schema(SchemaOpts{Schema: tv, Root: sp, BasePath: bp})
|
||||
if assert.NoError(t, err) {
|
||||
names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
|
||||
assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
|
||||
}
|
||||
case spec.Schema:
|
||||
aschema, err := Schema(SchemaOpts{Schema: &tv, Root: sp, BasePath: bp})
|
||||
if assert.NoError(t, err) {
|
||||
names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
|
||||
assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
|
||||
}
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T", vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestDepthFirstSort(t *testing.T) {
|
||||
bp := filepath.Join("fixtures", "inline_schemas.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
values := []string{
|
||||
"#/paths/~1some~1where~1{id}/parameters/1/schema/properties/createdAt",
|
||||
"#/paths/~1some~1where~1{id}/parameters/1/schema",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/createdAt",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/2/schema",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/id",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/value",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/404/schema",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/createdAt",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/default/schema",
|
||||
"#/definitions/datedRecords/items/1/properties/createdAt",
|
||||
"#/definitions/datedTaggedRecords/items/1/properties/createdAt",
|
||||
"#/definitions/namedThing/properties/name/properties/id",
|
||||
"#/definitions/records/items/0/properties/createdAt",
|
||||
"#/definitions/datedTaggedRecords/additionalItems/properties/id",
|
||||
"#/definitions/datedTaggedRecords/additionalItems/properties/value",
|
||||
"#/definitions/otherRecords/items/properties/createdAt",
|
||||
"#/definitions/tags/additionalProperties/properties/id",
|
||||
"#/definitions/tags/additionalProperties/properties/value",
|
||||
"#/definitions/datedRecords/items/0",
|
||||
"#/definitions/datedRecords/items/1",
|
||||
"#/definitions/datedTag/allOf/0",
|
||||
"#/definitions/datedTag/allOf/1",
|
||||
"#/definitions/datedTag/properties/id",
|
||||
"#/definitions/datedTag/properties/value",
|
||||
"#/definitions/datedTaggedRecords/items/0",
|
||||
"#/definitions/datedTaggedRecords/items/1",
|
||||
"#/definitions/namedAgain/properties/id",
|
||||
"#/definitions/namedThing/properties/name",
|
||||
"#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism/properties/floccinaucinihilipilificationCreatedAt",
|
||||
"#/definitions/records/items/0",
|
||||
"#/definitions/datedTaggedRecords/additionalItems",
|
||||
"#/definitions/otherRecords/items",
|
||||
"#/definitions/tags/additionalProperties",
|
||||
"#/definitions/datedRecords",
|
||||
"#/definitions/datedTag",
|
||||
"#/definitions/datedTaggedRecords",
|
||||
"#/definitions/namedAgain",
|
||||
"#/definitions/namedThing",
|
||||
"#/definitions/otherRecords",
|
||||
"#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism",
|
||||
"#/definitions/records",
|
||||
"#/definitions/tags",
|
||||
}
|
||||
if assert.NoError(t, err) {
|
||||
a := New(sp)
|
||||
result := sortDepthFirst(a.allSchemas)
|
||||
assert.Equal(t, values, result)
|
||||
}
|
||||
}
|
||||
|
||||
func TestNameInlinedSchemas(t *testing.T) {
|
||||
bp := filepath.Join(".", "fixtures", "nested_inline_schemas.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
values := []struct {
|
||||
Key string
|
||||
Location string
|
||||
Ref spec.Ref
|
||||
}{
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema/items", "#/definitions/postSomeWhereIdParamsBody/items", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBodyItems")},
|
||||
{"#/paths/~1some~1where~1{id}/parameters/1/schema", "#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBody")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/1", "#/definitions/getSomeWhereIdParamsBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2", "#/definitions/getSomeWhereIdParamsBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record", "#/definitions/getSomeWhereIdParamsBodyOAIGen/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord")},
|
||||
{"#/paths/~1some~1where~1{id}/get/parameters/2/schema", "#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyOAIGen")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdOKBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2Name")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/1", "#/definitions/getSomeWhereIdOKBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems1")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2", "#/definitions/getSomeWhereIdOKBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record", "#/definitions/getSomeWhereIdOKBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecord")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/200/schema", "#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdDefaultBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2Name")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/1", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems1")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record", "#/definitions/getSomeWhereIdDefaultBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecord")},
|
||||
{"#/paths/~1some~1where~1{id}/get/responses/default/schema", "#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBody")},
|
||||
{"#/definitions/nestedThing/properties/record/items/2/allOf/1/additionalProperties", "#/definitions/nestedThingRecordItems2AllOf1/additionalProperties", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1AdditionalProperties")},
|
||||
{"#/definitions/nestedThing/properties/record/items/2/allOf/1", "#/definitions/nestedThingRecordItems2/allOf/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1")},
|
||||
{"#/definitions/nestedThing/properties/record/items/2/properties/name", "#/definitions/nestedThingRecordItems2/properties/name", spec.MustCreateRef("#/definitions/nestedThingRecordItems2Name")},
|
||||
{"#/definitions/nestedThing/properties/record/items/1", "#/definitions/nestedThingRecord/items/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems1")},
|
||||
{"#/definitions/nestedThing/properties/record/items/2", "#/definitions/nestedThingRecord/items/2", spec.MustCreateRef("#/definitions/nestedThingRecordItems2")},
|
||||
{"#/definitions/datedRecords/items/1", "#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/datedRecordsItems1")},
|
||||
{"#/definitions/datedTaggedRecords/items/1", "#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/datedTaggedRecordsItems1")},
|
||||
{"#/definitions/namedThing/properties/name", "#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/namedThingName")},
|
||||
{"#/definitions/nestedThing/properties/record", "#/definitions/nestedThing/properties/record", spec.MustCreateRef("#/definitions/nestedThingRecord")},
|
||||
{"#/definitions/records/items/0", "#/definitions/records/items/0", spec.MustCreateRef("#/definitions/recordsItems0")},
|
||||
{"#/definitions/datedTaggedRecords/additionalItems", "#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/datedTaggedRecordsItemsAdditionalItems")},
|
||||
{"#/definitions/otherRecords/items", "#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/otherRecordsItems")},
|
||||
{"#/definitions/tags/additionalProperties", "#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tagsAdditionalProperties")},
|
||||
}
|
||||
if assert.NoError(t, err) {
|
||||
err := nameInlinedSchemas(&FlattenOpts{
|
||||
Spec: New(sp),
|
||||
BasePath: bp,
|
||||
})
|
||||
|
||||
if assert.NoError(t, err) {
|
||||
for i, v := range values {
|
||||
ptr, err := jsonpointer.New(v.Location[1:])
|
||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
||||
vv, _, err := ptr.Get(sp)
|
||||
|
||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
||||
switch tv := vv.(type) {
|
||||
case *spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrBool:
|
||||
var sRef spec.Ref
|
||||
if tv != nil && tv.Schema != nil {
|
||||
sRef = tv.Schema.Ref
|
||||
}
|
||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrArray:
|
||||
var sRef spec.Ref
|
||||
if tv != nil && tv.Schema != nil {
|
||||
sRef = tv.Schema.Ref
|
||||
}
|
||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T", vv)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for k, rr := range New(sp).allSchemas {
|
||||
if !strings.HasPrefix(k, "#/responses") && !strings.HasPrefix(k, "#/parameters") {
|
||||
if rr.Schema != nil && rr.Schema.Ref.String() == "" && !rr.TopLevel {
|
||||
asch, err := Schema(SchemaOpts{Schema: rr.Schema, Root: sp, BasePath: bp})
|
||||
if assert.NoError(t, err, "for key: %s", k) {
|
||||
if !asch.IsSimpleSchema {
|
||||
assert.Fail(t, "not a top level schema", "for key: %s", k)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestFlatten(t *testing.T) {
|
||||
bp := filepath.Join(".", "fixtures", "flatten.yml")
|
||||
sp, err := loadSpec(bp)
|
||||
values := []struct {
|
||||
Key string
|
||||
Location string
|
||||
Ref spec.Ref
|
||||
Expected interface{}
|
||||
}{
|
||||
{
|
||||
"#/responses/notFound/schema",
|
||||
"#/responses/notFound/schema",
|
||||
spec.MustCreateRef("#/definitions/error"),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/parameters/0",
|
||||
"#/paths/~1some~1where~1{id}/parameters/0/name",
|
||||
spec.Ref{},
|
||||
"id",
|
||||
},
|
||||
{
|
||||
"#/paths/~1other~1place",
|
||||
"#/paths/~1other~1place/get/operationId",
|
||||
spec.Ref{},
|
||||
"modelOp",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/0",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/0/name",
|
||||
spec.Ref{},
|
||||
"limit",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/1",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/1/name",
|
||||
spec.Ref{},
|
||||
"some",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/2",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/2/name",
|
||||
spec.Ref{},
|
||||
"other",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/3",
|
||||
"#/paths/~1some~1where~1{id}/get/parameters/3/schema",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBody"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200",
|
||||
"#/paths/~1some~1where~1{id}/get/responses/200/schema",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/namedAgain",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/named"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/namedThing/properties/name",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/named"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/namedThing/properties/namedAgain",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/namedAgain"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedRecords/items/1",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/record"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/otherRecords/items",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/record"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/tags/additionalProperties",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/tag"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/datedTag/allOf/1",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/tag"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/nestedThingRecordItems2/allOf/1",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/nestedThingRecord/items/1",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems1"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/nestedThingRecord/items/2",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/nestedThingRecordItems2"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/nestedThing/properties/record",
|
||||
"",
|
||||
spec.MustCreateRef("#/definitions/nestedThingRecord"),
|
||||
"",
|
||||
},
|
||||
{
|
||||
"#/definitions/named",
|
||||
"#/definitions/named/type",
|
||||
spec.Ref{},
|
||||
spec.StringOrArray{"string"},
|
||||
},
|
||||
{
|
||||
"#/definitions/error",
|
||||
"#/definitions/error/properties/id/type",
|
||||
spec.Ref{},
|
||||
spec.StringOrArray{"integer"},
|
||||
},
|
||||
{
|
||||
"#/definitions/record",
|
||||
"#/definitions/record/properties/createdAt/format",
|
||||
spec.Ref{},
|
||||
"date-time",
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdOKBody",
|
||||
"#/definitions/getSomeWhereIdOKBody/properties/record",
|
||||
spec.MustCreateRef("#/definitions/nestedThing"),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBody",
|
||||
"#/definitions/getSomeWhereIdParamsBody/properties/record",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord"),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecord",
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecord/items/1",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1"),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecord",
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecord/items/2",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2"),
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2",
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2/allOf/0/format",
|
||||
spec.Ref{},
|
||||
"date",
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2Name",
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2Name/properties/createdAt/format",
|
||||
spec.Ref{},
|
||||
"date-time",
|
||||
},
|
||||
{
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2",
|
||||
"#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name",
|
||||
spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name"),
|
||||
"date",
|
||||
},
|
||||
}
|
||||
if assert.NoError(t, err) {
|
||||
err := Flatten(FlattenOpts{Spec: New(sp), BasePath: bp})
|
||||
if assert.NoError(t, err) {
|
||||
for i, v := range values {
|
||||
pk := v.Key[1:]
|
||||
if v.Location != "" {
|
||||
pk = v.Location[1:]
|
||||
}
|
||||
ptr, err := jsonpointer.New(pk)
|
||||
if assert.NoError(t, err, "at %d for %s", i, v.Key) {
|
||||
d, _, err := ptr.Get(sp)
|
||||
if assert.NoError(t, err) {
|
||||
if v.Ref.String() != "" {
|
||||
switch s := d.(type) {
|
||||
case *spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case spec.Schema:
|
||||
assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrArray:
|
||||
var sRef spec.Ref
|
||||
if s != nil && s.Schema != nil {
|
||||
sRef = s.Schema.Ref
|
||||
}
|
||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
||||
case *spec.SchemaOrBool:
|
||||
var sRef spec.Ref
|
||||
if s != nil && s.Schema != nil {
|
||||
sRef = s.Schema.Ref
|
||||
}
|
||||
assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
|
||||
default:
|
||||
assert.Fail(t, "unknown type", "got %T at %d for %s", d, i, v.Key)
|
||||
}
|
||||
} else {
|
||||
assert.Equal(t, v.Expected, d)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
199
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
Normal file
199
vendor/github.com/go-openapi/analysis/mixin.go
generated
vendored
Normal file
|
@ -0,0 +1,199 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
)
|
||||
|
||||
// Mixin modifies the primary swagger spec by adding the paths and
|
||||
// definitions from the mixin specs. Top level parameters and
|
||||
// responses from the mixins are also carried over. Operation id
|
||||
// collisions are avoided by appending "Mixin<N>" but only if
|
||||
// needed. No other parts of primary are modified. Consider calling
|
||||
// FixEmptyResponseDescriptions() on the modified primary if you read
|
||||
// them from storage and they are valid to start with.
|
||||
//
|
||||
// Entries in "paths", "definitions", "parameters" and "responses" are
|
||||
// added to the primary in the order of the given mixins. If the entry
|
||||
// already exists in primary it is skipped with a warning message.
|
||||
//
|
||||
// The count of skipped entries (from collisions) is returned so any
|
||||
// deviation from the number expected can flag warning in your build
|
||||
// scripts. Carefully review the collisions before accepting them;
|
||||
// consider renaming things if possible.
|
||||
//
|
||||
// No normalization of any keys takes place (paths, type defs,
|
||||
// etc). Ensure they are canonical if your downstream tools do
|
||||
// key normalization of any form.
|
||||
func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
|
||||
var skipped []string
|
||||
opIds := getOpIds(primary)
|
||||
if primary.Paths == nil {
|
||||
primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
|
||||
}
|
||||
if primary.Paths.Paths == nil {
|
||||
primary.Paths.Paths = make(map[string]spec.PathItem)
|
||||
}
|
||||
if primary.Definitions == nil {
|
||||
primary.Definitions = make(spec.Definitions)
|
||||
}
|
||||
if primary.Parameters == nil {
|
||||
primary.Parameters = make(map[string]spec.Parameter)
|
||||
}
|
||||
if primary.Responses == nil {
|
||||
primary.Responses = make(map[string]spec.Response)
|
||||
}
|
||||
|
||||
for i, m := range mixins {
|
||||
for k, v := range m.Definitions {
|
||||
// assume name collisions represent IDENTICAL type. careful.
|
||||
if _, exists := primary.Definitions[k]; exists {
|
||||
warn := fmt.Sprintf("definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
continue
|
||||
}
|
||||
primary.Definitions[k] = v
|
||||
}
|
||||
if m.Paths != nil {
|
||||
for k, v := range m.Paths.Paths {
|
||||
if _, exists := primary.Paths.Paths[k]; exists {
|
||||
warn := fmt.Sprintf("paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
continue
|
||||
}
|
||||
|
||||
// Swagger requires that operationIds be
|
||||
// unique within a spec. If we find a
|
||||
// collision we append "Mixin0" to the
|
||||
// operatoinId we are adding, where 0 is mixin
|
||||
// index. We assume that operationIds with
|
||||
// all the proivded specs are already unique.
|
||||
piops := pathItemOps(v)
|
||||
for _, piop := range piops {
|
||||
if opIds[piop.ID] {
|
||||
piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", i)
|
||||
}
|
||||
opIds[piop.ID] = true
|
||||
}
|
||||
primary.Paths.Paths[k] = v
|
||||
}
|
||||
}
|
||||
for k, v := range m.Parameters {
|
||||
// could try to rename on conflict but would
|
||||
// have to fix $refs in the mixin. Complain
|
||||
// for now
|
||||
if _, exists := primary.Parameters[k]; exists {
|
||||
warn := fmt.Sprintf("top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
continue
|
||||
}
|
||||
primary.Parameters[k] = v
|
||||
}
|
||||
for k, v := range m.Responses {
|
||||
// could try to rename on conflict but would
|
||||
// have to fix $refs in the mixin. Complain
|
||||
// for now
|
||||
if _, exists := primary.Responses[k]; exists {
|
||||
warn := fmt.Sprintf("top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
|
||||
skipped = append(skipped, warn)
|
||||
continue
|
||||
}
|
||||
primary.Responses[k] = v
|
||||
}
|
||||
}
|
||||
return skipped
|
||||
}
|
||||
|
||||
// FixEmptyResponseDescriptions replaces empty ("") response
|
||||
// descriptions in the input with "(empty)" to ensure that the
|
||||
// resulting Swagger is stays valid. The problem appears to arise
|
||||
// from reading in valid specs that have a explicit response
|
||||
// description of "" (valid, response.description is required), but
|
||||
// due to zero values being omitted upon re-serializing (omitempty) we
|
||||
// lose them unless we stick some chars in there.
|
||||
func FixEmptyResponseDescriptions(s *spec.Swagger) {
|
||||
if s.Paths != nil {
|
||||
for _, v := range s.Paths.Paths {
|
||||
if v.Get != nil {
|
||||
FixEmptyDescs(v.Get.Responses)
|
||||
}
|
||||
if v.Put != nil {
|
||||
FixEmptyDescs(v.Put.Responses)
|
||||
}
|
||||
if v.Post != nil {
|
||||
FixEmptyDescs(v.Post.Responses)
|
||||
}
|
||||
if v.Delete != nil {
|
||||
FixEmptyDescs(v.Delete.Responses)
|
||||
}
|
||||
if v.Options != nil {
|
||||
FixEmptyDescs(v.Options.Responses)
|
||||
}
|
||||
if v.Head != nil {
|
||||
FixEmptyDescs(v.Head.Responses)
|
||||
}
|
||||
if v.Patch != nil {
|
||||
FixEmptyDescs(v.Patch.Responses)
|
||||
}
|
||||
}
|
||||
}
|
||||
for k, v := range s.Responses {
|
||||
FixEmptyDesc(&v)
|
||||
s.Responses[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// FixEmptyDescs adds "(empty)" as the description for any Response in
|
||||
// the given Responses object that doesn't already have one.
|
||||
func FixEmptyDescs(rs *spec.Responses) {
|
||||
FixEmptyDesc(rs.Default)
|
||||
for k, v := range rs.StatusCodeResponses {
|
||||
FixEmptyDesc(&v)
|
||||
rs.StatusCodeResponses[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// FixEmptyDesc adds "(empty)" as the description to the given
|
||||
// Response object if it doesn't already have one and isn't a
|
||||
// ref. No-op on nil input.
|
||||
func FixEmptyDesc(rs *spec.Response) {
|
||||
if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
|
||||
return
|
||||
}
|
||||
rs.Description = "(empty)"
|
||||
}
|
||||
|
||||
// getOpIds extracts all the paths.<path>.operationIds from the given
|
||||
// spec and returns them as the keys in a map with 'true' values.
|
||||
func getOpIds(s *spec.Swagger) map[string]bool {
|
||||
rv := make(map[string]bool)
|
||||
if s.Paths == nil {
|
||||
return rv
|
||||
}
|
||||
for _, v := range s.Paths.Paths {
|
||||
piops := pathItemOps(v)
|
||||
for _, op := range piops {
|
||||
rv[op.ID] = true
|
||||
}
|
||||
}
|
||||
return rv
|
||||
}
|
||||
|
||||
func pathItemOps(p spec.PathItem) []*spec.Operation {
|
||||
var rv []*spec.Operation
|
||||
rv = appendOp(rv, p.Get)
|
||||
rv = appendOp(rv, p.Put)
|
||||
rv = appendOp(rv, p.Post)
|
||||
rv = appendOp(rv, p.Delete)
|
||||
rv = appendOp(rv, p.Head)
|
||||
rv = appendOp(rv, p.Patch)
|
||||
return rv
|
||||
}
|
||||
|
||||
func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
|
||||
if op == nil {
|
||||
return ops
|
||||
}
|
||||
return append(ops, op)
|
||||
}
|
64
vendor/github.com/go-openapi/analysis/mixin_test.go
generated
vendored
Normal file
64
vendor/github.com/go-openapi/analysis/mixin_test.go
generated
vendored
Normal file
|
@ -0,0 +1,64 @@
|
|||
package analysis
|
||||
|
||||
import "testing"
|
||||
|
||||
const (
|
||||
widgetFile = "fixtures/widget-crud.yml"
|
||||
fooFile = "fixtures/foo-crud.yml"
|
||||
barFile = "fixtures/bar-crud.yml"
|
||||
noPathsFile = "fixtures/no-paths.yml"
|
||||
emptyPathsFile = "fixtures/empty-paths.json"
|
||||
)
|
||||
|
||||
func TestMixin(t *testing.T) {
|
||||
|
||||
primary, err := loadSpec(widgetFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not load '%v': %v\n", widgetFile, err)
|
||||
}
|
||||
mixin1, err := loadSpec(fooFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not load '%v': %v\n", fooFile, err)
|
||||
}
|
||||
mixin2, err := loadSpec(barFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not load '%v': %v\n", barFile, err)
|
||||
}
|
||||
mixin3, err := loadSpec(noPathsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not load '%v': %v\n", noPathsFile, err)
|
||||
}
|
||||
|
||||
collisions := Mixin(primary, mixin1, mixin2, mixin3)
|
||||
if len(collisions) != 16 {
|
||||
t.Errorf("TestMixin: Expected 16 collisions, got %v\n%v", len(collisions), collisions)
|
||||
}
|
||||
|
||||
if len(primary.Paths.Paths) != 7 {
|
||||
t.Errorf("TestMixin: Expected 7 paths in merged, got %v\n", len(primary.Paths.Paths))
|
||||
}
|
||||
|
||||
if len(primary.Definitions) != 8 {
|
||||
t.Errorf("TestMixin: Expected 8 definitions in merged, got %v\n", len(primary.Definitions))
|
||||
}
|
||||
|
||||
if len(primary.Parameters) != 4 {
|
||||
t.Errorf("TestMixin: Expected 4 top level parameters in merged, got %v\n", len(primary.Parameters))
|
||||
}
|
||||
|
||||
if len(primary.Responses) != 2 {
|
||||
t.Errorf("TestMixin: Expected 2 top level responses in merged, got %v\n", len(primary.Responses))
|
||||
}
|
||||
|
||||
// test that adding paths to a primary with no paths works (was NPE)
|
||||
emptyPaths, err := loadSpec(emptyPathsFile)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not load '%v': %v\n", emptyPathsFile, err)
|
||||
}
|
||||
|
||||
collisions = Mixin(emptyPaths, primary)
|
||||
if len(collisions) != 0 {
|
||||
t.Errorf("TestMixin: Expected 0 collisions, got %v\n%v", len(collisions), collisions)
|
||||
}
|
||||
|
||||
}
|
233
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
Normal file
233
vendor/github.com/go-openapi/analysis/schema.go
generated
vendored
Normal file
|
@ -0,0 +1,233 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/go-openapi/strfmt"
|
||||
)
|
||||
|
||||
// SchemaOpts configures the schema analyzer
|
||||
type SchemaOpts struct {
|
||||
Schema *spec.Schema
|
||||
Root interface{}
|
||||
BasePath string
|
||||
_ struct{}
|
||||
}
|
||||
|
||||
// Schema analysis, will classify the schema according to known
|
||||
// patterns.
|
||||
func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
|
||||
a := &AnalyzedSchema{
|
||||
schema: opts.Schema,
|
||||
root: opts.Root,
|
||||
basePath: opts.BasePath,
|
||||
}
|
||||
|
||||
a.initializeFlags()
|
||||
a.inferKnownType()
|
||||
a.inferEnum()
|
||||
a.inferBaseType()
|
||||
|
||||
if err := a.inferMap(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := a.inferArray(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := a.inferTuple(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := a.inferFromRef(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
a.inferSimpleSchema()
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// AnalyzedSchema indicates what the schema represents
|
||||
type AnalyzedSchema struct {
|
||||
schema *spec.Schema
|
||||
root interface{}
|
||||
basePath string
|
||||
|
||||
hasProps bool
|
||||
hasAllOf bool
|
||||
hasItems bool
|
||||
hasAdditionalProps bool
|
||||
hasAdditionalItems bool
|
||||
hasRef bool
|
||||
|
||||
IsKnownType bool
|
||||
IsSimpleSchema bool
|
||||
IsArray bool
|
||||
IsSimpleArray bool
|
||||
IsMap bool
|
||||
IsSimpleMap bool
|
||||
IsExtendedObject bool
|
||||
IsTuple bool
|
||||
IsTupleWithExtra bool
|
||||
IsBaseType bool
|
||||
IsEnum bool
|
||||
}
|
||||
|
||||
// Inherits copies value fields from other onto this schema
|
||||
func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
|
||||
if other == nil {
|
||||
return
|
||||
}
|
||||
a.hasProps = other.hasProps
|
||||
a.hasAllOf = other.hasAllOf
|
||||
a.hasItems = other.hasItems
|
||||
a.hasAdditionalItems = other.hasAdditionalItems
|
||||
a.hasAdditionalProps = other.hasAdditionalProps
|
||||
a.hasRef = other.hasRef
|
||||
|
||||
a.IsKnownType = other.IsKnownType
|
||||
a.IsSimpleSchema = other.IsSimpleSchema
|
||||
a.IsArray = other.IsArray
|
||||
a.IsSimpleArray = other.IsSimpleArray
|
||||
a.IsMap = other.IsMap
|
||||
a.IsSimpleMap = other.IsSimpleMap
|
||||
a.IsExtendedObject = other.IsExtendedObject
|
||||
a.IsTuple = other.IsTuple
|
||||
a.IsTupleWithExtra = other.IsTupleWithExtra
|
||||
a.IsBaseType = other.IsBaseType
|
||||
a.IsEnum = other.IsEnum
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferFromRef() error {
|
||||
if a.hasRef {
|
||||
opts := &spec.ExpandOptions{RelativeBase: a.basePath}
|
||||
sch, err := spec.ResolveRefWithBase(a.root, &a.schema.Ref, opts)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if sch != nil {
|
||||
rsch, err := Schema(SchemaOpts{
|
||||
Schema: sch,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.inherits(rsch)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferSimpleSchema() {
|
||||
a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferKnownType() {
|
||||
tpe := a.schema.Type
|
||||
format := a.schema.Format
|
||||
a.IsKnownType = tpe.Contains("boolean") ||
|
||||
tpe.Contains("integer") ||
|
||||
tpe.Contains("number") ||
|
||||
tpe.Contains("string") ||
|
||||
(format != "" && strfmt.Default.ContainsName(format)) ||
|
||||
(a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferMap() error {
|
||||
if a.isObjectType() {
|
||||
hasExtra := a.hasProps || a.hasAllOf
|
||||
a.IsMap = a.hasAdditionalProps && !hasExtra
|
||||
a.IsExtendedObject = a.hasAdditionalProps && hasExtra
|
||||
if a.IsMap {
|
||||
if a.schema.AdditionalProperties.Schema != nil {
|
||||
msch, err := Schema(SchemaOpts{
|
||||
Schema: a.schema.AdditionalProperties.Schema,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.IsSimpleMap = msch.IsSimpleSchema
|
||||
} else if a.schema.AdditionalProperties.Allows {
|
||||
a.IsSimpleMap = true
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferArray() error {
|
||||
fromValid := a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Len() < 2)
|
||||
a.IsArray = fromValid || (a.hasItems && a.schema.Items.Len() < 2)
|
||||
if a.IsArray && a.hasItems {
|
||||
if a.schema.Items.Schema != nil {
|
||||
itsch, err := Schema(SchemaOpts{
|
||||
Schema: a.schema.Items.Schema,
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.IsSimpleArray = itsch.IsSimpleSchema
|
||||
}
|
||||
if len(a.schema.Items.Schemas) > 0 {
|
||||
itsch, err := Schema(SchemaOpts{
|
||||
Schema: &a.schema.Items.Schemas[0],
|
||||
Root: a.root,
|
||||
BasePath: a.basePath,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
a.IsSimpleArray = itsch.IsSimpleSchema
|
||||
}
|
||||
}
|
||||
if a.IsArray && !a.hasItems {
|
||||
a.IsSimpleArray = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferTuple() error {
|
||||
tuple := a.hasItems && a.schema.Items.Len() > 1
|
||||
a.IsTuple = tuple && !a.hasAdditionalItems
|
||||
a.IsTupleWithExtra = tuple && a.hasAdditionalItems
|
||||
return nil
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferBaseType() {
|
||||
if a.isObjectType() {
|
||||
a.IsBaseType = a.schema.Discriminator != ""
|
||||
}
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) inferEnum() {
|
||||
a.IsEnum = len(a.schema.Enum) > 0
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) initializeFlags() {
|
||||
a.hasProps = len(a.schema.Properties) > 0
|
||||
a.hasAllOf = len(a.schema.AllOf) > 0
|
||||
a.hasRef = a.schema.Ref.String() != ""
|
||||
|
||||
a.hasItems = a.schema.Items != nil &&
|
||||
(a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
|
||||
|
||||
a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
|
||||
(a.schema.AdditionalProperties != nil || a.schema.AdditionalProperties.Allows)
|
||||
|
||||
a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
|
||||
(a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
|
||||
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) isObjectType() bool {
|
||||
return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
|
||||
}
|
||||
|
||||
func (a *AnalyzedSchema) isArrayType() bool {
|
||||
return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
|
||||
}
|
266
vendor/github.com/go-openapi/analysis/schema_test.go
generated
vendored
Normal file
266
vendor/github.com/go-openapi/analysis/schema_test.go
generated
vendored
Normal file
|
@ -0,0 +1,266 @@
|
|||
package analysis
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"path"
|
||||
"testing"
|
||||
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
|
||||
"github.com/go-openapi/spec"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
var knownSchemas = []*spec.Schema{
|
||||
spec.BoolProperty(), // 0
|
||||
spec.StringProperty(), // 1
|
||||
spec.Int8Property(), // 2
|
||||
spec.Int16Property(), // 3
|
||||
spec.Int32Property(), // 4
|
||||
spec.Int64Property(), // 5
|
||||
spec.Float32Property(), // 6
|
||||
spec.Float64Property(), // 7
|
||||
spec.DateProperty(), // 8
|
||||
spec.DateTimeProperty(), // 9
|
||||
(&spec.Schema{}), // 10
|
||||
(&spec.Schema{}).Typed("object", ""), // 11
|
||||
(&spec.Schema{}).Typed("", ""), // 12
|
||||
(&spec.Schema{}).Typed("", "uuid"), // 13
|
||||
}
|
||||
|
||||
func newCObj() *spec.Schema {
|
||||
return (&spec.Schema{}).Typed("object", "").SetProperty("id", *spec.Int64Property())
|
||||
}
|
||||
|
||||
var complexObject = newCObj()
|
||||
|
||||
var complexSchemas = []*spec.Schema{
|
||||
complexObject,
|
||||
spec.ArrayProperty(complexObject),
|
||||
spec.MapProperty(complexObject),
|
||||
}
|
||||
|
||||
func knownRefs(base string) []spec.Ref {
|
||||
urls := []string{"bool", "string", "integer", "float", "date", "object", "format"}
|
||||
|
||||
var result []spec.Ref
|
||||
for _, u := range urls {
|
||||
result = append(result, spec.MustCreateRef(fmt.Sprintf("%s/%s", base, path.Join("known", u))))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func complexRefs(base string) []spec.Ref {
|
||||
urls := []string{"object", "array", "map"}
|
||||
|
||||
var result []spec.Ref
|
||||
for _, u := range urls {
|
||||
result = append(result, spec.MustCreateRef(fmt.Sprintf("%s/%s", base, path.Join("complex", u))))
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func refServer() *httptest.Server {
|
||||
mux := http.NewServeMux()
|
||||
mux.Handle("/known/bool", schemaHandler(knownSchemas[0]))
|
||||
mux.Handle("/known/string", schemaHandler(knownSchemas[1]))
|
||||
mux.Handle("/known/integer", schemaHandler(knownSchemas[5]))
|
||||
mux.Handle("/known/float", schemaHandler(knownSchemas[6]))
|
||||
mux.Handle("/known/date", schemaHandler(knownSchemas[8]))
|
||||
mux.Handle("/known/object", schemaHandler(knownSchemas[11]))
|
||||
mux.Handle("/known/format", schemaHandler(knownSchemas[13]))
|
||||
|
||||
mux.Handle("/complex/object", schemaHandler(complexSchemas[0]))
|
||||
mux.Handle("/complex/array", schemaHandler(complexSchemas[1]))
|
||||
mux.Handle("/complex/map", schemaHandler(complexSchemas[2]))
|
||||
|
||||
return httptest.NewServer(mux)
|
||||
}
|
||||
|
||||
func refSchema(ref spec.Ref) *spec.Schema {
|
||||
return &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
|
||||
}
|
||||
|
||||
func schemaHandler(schema *spec.Schema) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
writeJSON(w, schema)
|
||||
})
|
||||
}
|
||||
|
||||
func writeJSON(w http.ResponseWriter, data interface{}) {
|
||||
w.Header().Add("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
enc := json.NewEncoder(w)
|
||||
if err := enc.Encode(data); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_KnownTypes(t *testing.T) {
|
||||
for i, v := range knownSchemas {
|
||||
sch, err := Schema(SchemaOpts{Schema: v})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsKnownType, "item at %d should be a known type", i)
|
||||
}
|
||||
}
|
||||
for i, v := range complexSchemas {
|
||||
sch, err := Schema(SchemaOpts{Schema: v})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
|
||||
}
|
||||
}
|
||||
|
||||
serv := refServer()
|
||||
defer serv.Close()
|
||||
|
||||
for i, ref := range knownRefs(serv.URL) {
|
||||
sch, err := Schema(SchemaOpts{Schema: refSchema(ref)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsKnownType, "item at %d should be a known type", i)
|
||||
}
|
||||
}
|
||||
for i, ref := range complexRefs(serv.URL) {
|
||||
sch, err := Schema(SchemaOpts{Schema: refSchema(ref)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_Array(t *testing.T) {
|
||||
for i, v := range append(knownSchemas, (&spec.Schema{}).Typed("array", "")) {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsArray, "item at %d should be an array type", i)
|
||||
assert.True(t, sch.IsSimpleArray, "item at %d should be a simple array type", i)
|
||||
}
|
||||
}
|
||||
|
||||
for i, v := range complexSchemas {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsArray, "item at %d should be an array type", i)
|
||||
assert.False(t, sch.IsSimpleArray, "item at %d should not be a simple array type", i)
|
||||
}
|
||||
}
|
||||
|
||||
serv := refServer()
|
||||
defer serv.Close()
|
||||
|
||||
for i, ref := range knownRefs(serv.URL) {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(refSchema(ref))})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsArray, "item at %d should be an array type", i)
|
||||
assert.True(t, sch.IsSimpleArray, "item at %d should be a simple array type", i)
|
||||
}
|
||||
}
|
||||
for i, ref := range complexRefs(serv.URL) {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(refSchema(ref))})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
|
||||
assert.True(t, sch.IsArray, "item at %d should be an array type", i)
|
||||
assert.False(t, sch.IsSimpleArray, "item at %d should not be a simple array type", i)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_Map(t *testing.T) {
|
||||
for i, v := range append(knownSchemas, spec.MapProperty(nil)) {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsMap, "item at %d should be a map type", i)
|
||||
assert.True(t, sch.IsSimpleMap, "item at %d should be a simple map type", i)
|
||||
}
|
||||
}
|
||||
|
||||
for i, v := range complexSchemas {
|
||||
sch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsMap, "item at %d should be a map type", i)
|
||||
assert.False(t, sch.IsSimpleMap, "item at %d should not be a simple map type", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_ExtendedObject(t *testing.T) {
|
||||
for i, v := range knownSchemas {
|
||||
wex := spec.MapProperty(v).SetProperty("name", *spec.StringProperty())
|
||||
sch, err := Schema(SchemaOpts{Schema: wex})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsExtendedObject, "item at %d should be an extended map object type", i)
|
||||
assert.False(t, sch.IsMap, "item at %d should not be a map type", i)
|
||||
assert.False(t, sch.IsSimpleMap, "item at %d should not be a simple map type", i)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_Tuple(t *testing.T) {
|
||||
at := spec.ArrayProperty(nil)
|
||||
at.Items = &spec.SchemaOrArray{}
|
||||
at.Items.Schemas = append(at.Items.Schemas, *spec.StringProperty(), *spec.Int64Property())
|
||||
|
||||
sch, err := Schema(SchemaOpts{Schema: at})
|
||||
if assert.NoError(t, err) {
|
||||
assert.True(t, sch.IsTuple)
|
||||
assert.False(t, sch.IsTupleWithExtra)
|
||||
assert.False(t, sch.IsKnownType)
|
||||
assert.False(t, sch.IsSimpleSchema)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_TupleWithExtra(t *testing.T) {
|
||||
at := spec.ArrayProperty(nil)
|
||||
at.Items = &spec.SchemaOrArray{}
|
||||
at.Items.Schemas = append(at.Items.Schemas, *spec.StringProperty(), *spec.Int64Property())
|
||||
at.AdditionalItems = &spec.SchemaOrBool{Allows: true}
|
||||
at.AdditionalItems.Schema = spec.Int32Property()
|
||||
|
||||
sch, err := Schema(SchemaOpts{Schema: at})
|
||||
if assert.NoError(t, err) {
|
||||
assert.False(t, sch.IsTuple)
|
||||
assert.True(t, sch.IsTupleWithExtra)
|
||||
assert.False(t, sch.IsKnownType)
|
||||
assert.False(t, sch.IsSimpleSchema)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_BaseType(t *testing.T) {
|
||||
cl := (&spec.Schema{}).Typed("object", "").SetProperty("type", *spec.StringProperty()).WithDiscriminator("type")
|
||||
|
||||
sch, err := Schema(SchemaOpts{Schema: cl})
|
||||
if assert.NoError(t, err) {
|
||||
assert.True(t, sch.IsBaseType)
|
||||
assert.False(t, sch.IsKnownType)
|
||||
assert.False(t, sch.IsSimpleSchema)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSchemaAnalysis_SimpleSchema(t *testing.T) {
|
||||
for i, v := range append(knownSchemas, spec.ArrayProperty(nil), spec.MapProperty(nil)) {
|
||||
sch, err := Schema(SchemaOpts{Schema: v})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.True(t, sch.IsSimpleSchema, "item at %d should be a simple schema", i)
|
||||
}
|
||||
|
||||
asch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze array schema at %d: %v", i, err) {
|
||||
assert.True(t, asch.IsSimpleSchema, "array item at %d should be a simple schema", i)
|
||||
}
|
||||
|
||||
msch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
|
||||
if assert.NoError(t, err, "failed to analyze map schema at %d: %v", i, err) {
|
||||
assert.True(t, msch.IsSimpleSchema, "map item at %d should be a simple schema", i)
|
||||
}
|
||||
}
|
||||
|
||||
for i, v := range complexSchemas {
|
||||
sch, err := Schema(SchemaOpts{Schema: v})
|
||||
if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
|
||||
assert.False(t, sch.IsSimpleSchema, "item at %d should not be a simple schema", i)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue