[bugfix] Fix Swagger spec and add test script (#2698)

* Add Swagger spec test script

* Fix Swagger spec errors not related to statuses with polls

* Add API tests that post a status with a poll

* Fix creating a status with a poll from form params

* Fix Swagger spec errors related to statuses with polls (this is the last error)

* Fix Swagger spec warnings not related to unused definitions

* Suppress a duplicate list update params definition that was somehow causing wrong param names

* Add Swagger test to CI

- updates Drone config
- vendorizes go-swagger
- fixes a file extension issue that caused the test script to generate JSON instead of YAML with the vendorized version

* Put `Sample: ` on its own line everywhere

* Remove unused id param from emojiCategoriesGet

* Add 5 more pairs of profile fields to account update API Swagger

* Remove Swagger prefix from dummy fields

It makes the generated code look weird

* Manually annotate params for statusCreate operation

* Fix all remaining Swagger spec warnings

- Change some models into operation parameters
- Ignore models that already correspond to manually documented operation parameters but can't be trivially changed (those with file fields)

* Documented that creating a status with scheduled_at isn't implemented yet

* sign drone.yml

* Fix filter API Swagger errors

* fixup! Fix filter API Swagger errors

---------

Co-authored-by: tobi <tobi.smethurst@protonmail.com>
This commit is contained in:
Vyr Cossont
2024-03-06 09:05:45 -08:00
committed by GitHub
parent 68c8fe67cc
commit fc3741365c
672 changed files with 135624 additions and 713 deletions

5
vendor/github.com/go-openapi/analysis/.codecov.yml generated vendored Normal file
View File

@ -0,0 +1,5 @@
coverage:
status:
patch:
default:
target: 80%

2
vendor/github.com/go-openapi/analysis/.gitattributes generated vendored Normal file
View File

@ -0,0 +1,2 @@
*.go text eol=lf

5
vendor/github.com/go-openapi/analysis/.gitignore generated vendored Normal file
View File

@ -0,0 +1,5 @@
secrets.yml
coverage.out
coverage.txt
*.cov
.idea

56
vendor/github.com/go-openapi/analysis/.golangci.yml generated vendored Normal file
View File

@ -0,0 +1,56 @@
linters-settings:
govet:
check-shadowing: true
golint:
min-confidence: 0
gocyclo:
min-complexity: 40
gocognit:
min-complexity: 40
maligned:
suggest-new: true
dupl:
threshold: 150
goconst:
min-len: 2
min-occurrences: 4
linters:
enable-all: true
disable:
- maligned
- lll
- gochecknoglobals
- gochecknoinits
# scopelint is useful, but also reports false positives
# that unfortunately can't be disabled. So we disable the
# linter rather than changing code that works.
# see: https://github.com/kyoh86/scopelint/issues/4
- scopelint
- godox
- gocognit
#- whitespace
- wsl
- funlen
- testpackage
- wrapcheck
#- nlreturn
- gomnd
- goerr113
- exhaustivestruct
#- errorlint
#- nestif
- gofumpt
- godot
- gci
- dogsled
- paralleltest
- tparallel
- thelper
- ifshort
- forbidigo
- cyclop
- varnamelen
- exhaustruct
- nonamedreturns
- nosnakecase

View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/analysis/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

31
vendor/github.com/go-openapi/analysis/README.md generated vendored Normal file
View File

@ -0,0 +1,31 @@
# OpenAPI initiative analysis
[![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis)
[![Build status](https://ci.appveyor.com/api/projects/status/x377t5o9ennm847o/branch/master?svg=true)](https://ci.appveyor.com/project/casualjim/go-openapi/analysis/branch/master)
[![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis)
[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE)
[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/analysis.svg)](https://pkg.go.dev/github.com/go-openapi/analysis)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/analysis)](https://goreportcard.com/report/github.com/go-openapi/analysis)
A foundational library to analyze an OAI specification document for easier reasoning about the content.
## What's inside?
* A analyzer providing methods to walk the functional content of a specification
* A spec flattener producing a self-contained document bundle, while preserving `$ref`s
* A spec merger ("mixin") to merge several spec documents into a primary spec
* A spec "fixer" ensuring that response descriptions are non empty
[Documentation](https://godoc.org/github.com/go-openapi/analysis)
## FAQ
* Does this library support OpenAPI 3?
> No.
> This package currently only supports OpenAPI 2.0 (aka Swagger 2.0).
> There is no plan to make it evolve toward supporting OpenAPI 3.x.
> This [discussion thread](https://github.com/go-openapi/spec/issues/21) relates the full story.
>

1064
vendor/github.com/go-openapi/analysis/analyzer.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

32
vendor/github.com/go-openapi/analysis/appveyor.yml generated vendored Normal file
View File

@ -0,0 +1,32 @@
version: "0.1.{build}"
clone_folder: C:\go-openapi\analysis
shallow_clone: true # for startup speed
pull_requests:
do_not_increment_build_number: true
#skip_tags: true
#skip_branch_with_pr: true
# appveyor.yml
build: off
environment:
GOPATH: c:\gopath
stack: go 1.16
test_script:
- go test -v -timeout 20m ./...
deploy: off
notifications:
- provider: Slack
incoming_webhook: https://hooks.slack.com/services/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
auth_token:
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
channel: bots
on_build_success: false
on_build_failure: true
on_build_status_changed: true

23
vendor/github.com/go-openapi/analysis/debug.go generated vendored Normal file
View File

@ -0,0 +1,23 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import (
"os"
"github.com/go-openapi/analysis/internal/debug"
)
var debugLog = debug.GetLogger("analysis", os.Getenv("SWAGGER_DEBUG") != "")

43
vendor/github.com/go-openapi/analysis/doc.go generated vendored Normal file
View File

@ -0,0 +1,43 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package analysis provides methods to work with a Swagger specification document from
package go-openapi/spec.
Analyzing a specification
An analysed specification object (type Spec) provides methods to work with swagger definition.
Flattening or expanding a specification
Flattening a specification bundles all remote $ref in the main spec document.
Depending on flattening options, additional preprocessing may take place:
- full flattening: replacing all inline complex constructs by a named entry in #/definitions
- expand: replace all $ref's in the document by their expanded content
Merging several specifications
Mixin several specifications merges all Swagger constructs, and warns about found conflicts.
Fixing a specification
Unmarshalling a specification with golang json unmarshalling may lead to
some unwanted result on present but empty fields.
Analyzing a Swagger schema
Swagger schemas are analyzed to determine their complexity and qualify their content.
*/
package analysis

79
vendor/github.com/go-openapi/analysis/fixer.go generated vendored Normal file
View File

@ -0,0 +1,79 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import "github.com/go-openapi/spec"
// FixEmptyResponseDescriptions replaces empty ("") response
// descriptions in the input with "(empty)" to ensure that the
// resulting Swagger is stays valid. The problem appears to arise
// from reading in valid specs that have a explicit response
// description of "" (valid, response.description is required), but
// due to zero values being omitted upon re-serializing (omitempty) we
// lose them unless we stick some chars in there.
func FixEmptyResponseDescriptions(s *spec.Swagger) {
for k, v := range s.Responses {
FixEmptyDesc(&v) //#nosec
s.Responses[k] = v
}
if s.Paths == nil {
return
}
for _, v := range s.Paths.Paths {
if v.Get != nil {
FixEmptyDescs(v.Get.Responses)
}
if v.Put != nil {
FixEmptyDescs(v.Put.Responses)
}
if v.Post != nil {
FixEmptyDescs(v.Post.Responses)
}
if v.Delete != nil {
FixEmptyDescs(v.Delete.Responses)
}
if v.Options != nil {
FixEmptyDescs(v.Options.Responses)
}
if v.Head != nil {
FixEmptyDescs(v.Head.Responses)
}
if v.Patch != nil {
FixEmptyDescs(v.Patch.Responses)
}
}
}
// FixEmptyDescs adds "(empty)" as the description for any Response in
// the given Responses object that doesn't already have one.
func FixEmptyDescs(rs *spec.Responses) {
FixEmptyDesc(rs.Default)
for k, v := range rs.StatusCodeResponses {
FixEmptyDesc(&v) //#nosec
rs.StatusCodeResponses[k] = v
}
}
// FixEmptyDesc adds "(empty)" as the description to the given
// Response object if it doesn't already have one and isn't a
// ref. No-op on nil input.
func FixEmptyDesc(rs *spec.Response) {
if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
return
}
rs.Description = "(empty)"
}

802
vendor/github.com/go-openapi/analysis/flatten.go generated vendored Normal file
View File

@ -0,0 +1,802 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import (
"fmt"
"log"
"path"
"sort"
"strings"
"github.com/go-openapi/analysis/internal/flatten/normalize"
"github.com/go-openapi/analysis/internal/flatten/operations"
"github.com/go-openapi/analysis/internal/flatten/replace"
"github.com/go-openapi/analysis/internal/flatten/schutils"
"github.com/go-openapi/analysis/internal/flatten/sortref"
"github.com/go-openapi/jsonpointer"
"github.com/go-openapi/spec"
)
const definitionsPath = "#/definitions"
// newRef stores information about refs created during the flattening process
type newRef struct {
key string
newName string
path string
isOAIGen bool
resolved bool
schema *spec.Schema
parents []string
}
// context stores intermediary results from flatten
type context struct {
newRefs map[string]*newRef
warnings []string
resolved map[string]string
}
func newContext() *context {
return &context{
newRefs: make(map[string]*newRef, 150),
warnings: make([]string, 0),
resolved: make(map[string]string, 50),
}
}
// Flatten an analyzed spec and produce a self-contained spec bundle.
//
// There is a minimal and a full flattening mode.
//
//
// Minimally flattening a spec means:
// - Expanding parameters, responses, path items, parameter items and header items (references to schemas are left
// unscathed)
// - Importing external (http, file) references so they become internal to the document
// - Moving every JSON pointer to a $ref to a named definition (i.e. the reworked spec does not contain pointers
// like "$ref": "#/definitions/myObject/allOfs/1")
//
// A minimally flattened spec thus guarantees the following properties:
// - all $refs point to a local definition (i.e. '#/definitions/...')
// - definitions are unique
//
// NOTE: arbitrary JSON pointers (other than $refs to top level definitions) are rewritten as definitions if they
// represent a complex schema or express commonality in the spec.
// Otherwise, they are simply expanded.
// Self-referencing JSON pointers cannot resolve to a type and trigger an error.
//
//
// Minimal flattening is necessary and sufficient for codegen rendering using go-swagger.
//
// Fully flattening a spec means:
// - Moving every complex inline schema to be a definition with an auto-generated name in a depth-first fashion.
//
// By complex, we mean every JSON object with some properties.
// Arrays, when they do not define a tuple,
// or empty objects with or without additionalProperties, are not considered complex and remain inline.
//
// NOTE: rewritten schemas get a vendor extension x-go-gen-location so we know from which part of the spec definitions
// have been created.
//
// Available flattening options:
// - Minimal: stops flattening after minimal $ref processing, leaving schema constructs untouched
// - Expand: expand all $ref's in the document (inoperant if Minimal set to true)
// - Verbose: croaks about name conflicts detected
// - RemoveUnused: removes unused parameters, responses and definitions after expansion/flattening
//
// NOTE: expansion removes all $ref save circular $ref, which remain in place
//
// TODO: additional options
// - ProgagateNameExtensions: ensure that created entries properly follow naming rules when their parent have set a
// x-go-name extension
// - LiftAllOfs:
// - limit the flattening of allOf members when simple objects
// - merge allOf with validation only
// - merge allOf with extensions only
// - ...
//
func Flatten(opts FlattenOpts) error {
debugLog("FlattenOpts: %#v", opts)
opts.flattenContext = newContext()
// 1. Recursively expand responses, parameters, path items and items in simple schemas.
//
// This simplifies the spec and leaves only the $ref's in schema objects.
if err := expand(&opts); err != nil {
return err
}
// 2. Strip the current document from absolute $ref's that actually a in the root,
// so we can recognize them as proper definitions
//
// In particular, this works around issue go-openapi/spec#76: leading absolute file in $ref is stripped
if err := normalizeRef(&opts); err != nil {
return err
}
// 3. Optionally remove shared parameters and responses already expanded (now unused).
//
// Operation parameters (i.e. under paths) remain.
if opts.RemoveUnused {
removeUnusedShared(&opts)
}
// 4. Import all remote references.
if err := importReferences(&opts); err != nil {
return err
}
// 5. full flattening: rewrite inline schemas (schemas that aren't simple types or arrays or maps)
if !opts.Minimal && !opts.Expand {
if err := nameInlinedSchemas(&opts); err != nil {
return err
}
}
// 6. Rewrite JSON pointers other than $ref to named definitions
// and attempt to resolve conflicting names whenever possible.
if err := stripPointersAndOAIGen(&opts); err != nil {
return err
}
// 7. Strip the spec from unused definitions
if opts.RemoveUnused {
removeUnused(&opts)
}
// 8. Issue warning notifications, if any
opts.croak()
// TODO: simplify known schema patterns to flat objects with properties
// examples:
// - lift simple allOf object,
// - empty allOf with validation only or extensions only
// - rework allOf arrays
// - rework allOf additionalProperties
return nil
}
func expand(opts *FlattenOpts) error {
if err := spec.ExpandSpec(opts.Swagger(), opts.ExpandOpts(!opts.Expand)); err != nil {
return err
}
opts.Spec.reload() // re-analyze
return nil
}
// normalizeRef strips the current file from any absolute file $ref. This works around issue go-openapi/spec#76:
// leading absolute file in $ref is stripped
func normalizeRef(opts *FlattenOpts) error {
debugLog("normalizeRef")
altered := false
for k, w := range opts.Spec.references.allRefs {
if !strings.HasPrefix(w.String(), opts.BasePath+definitionsPath) { // may be a mix of / and \, depending on OS
continue
}
altered = true
debugLog("stripping absolute path for: %s", w.String())
// strip the base path from definition
if err := replace.UpdateRef(opts.Swagger(), k,
spec.MustCreateRef(path.Join(definitionsPath, path.Base(w.String())))); err != nil {
return err
}
}
if altered {
opts.Spec.reload() // re-analyze
}
return nil
}
func removeUnusedShared(opts *FlattenOpts) {
opts.Swagger().Parameters = nil
opts.Swagger().Responses = nil
opts.Spec.reload() // re-analyze
}
func importReferences(opts *FlattenOpts) error {
var (
imported bool
err error
)
for !imported && err == nil {
// iteratively import remote references until none left.
// This inlining deals with name conflicts by introducing auto-generated names ("OAIGen")
imported, err = importExternalReferences(opts)
opts.Spec.reload() // re-analyze
}
return err
}
// nameInlinedSchemas replaces every complex inline construct by a named definition.
func nameInlinedSchemas(opts *FlattenOpts) error {
debugLog("nameInlinedSchemas")
namer := &InlineSchemaNamer{
Spec: opts.Swagger(),
Operations: operations.AllOpRefsByRef(opts.Spec, nil),
flattenContext: opts.flattenContext,
opts: opts,
}
depthFirst := sortref.DepthFirst(opts.Spec.allSchemas)
for _, key := range depthFirst {
sch := opts.Spec.allSchemas[key]
if sch.Schema == nil || sch.Schema.Ref.String() != "" || sch.TopLevel {
continue
}
asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
if err != nil {
return fmt.Errorf("schema analysis [%s]: %w", key, err)
}
if asch.isAnalyzedAsComplex() { // move complex schemas to definitions
if err := namer.Name(key, sch.Schema, asch); err != nil {
return err
}
}
}
opts.Spec.reload() // re-analyze
return nil
}
func removeUnused(opts *FlattenOpts) {
expected := make(map[string]struct{})
for k := range opts.Swagger().Definitions {
expected[path.Join(definitionsPath, jsonpointer.Escape(k))] = struct{}{}
}
for _, k := range opts.Spec.AllDefinitionReferences() {
delete(expected, k)
}
for k := range expected {
debugLog("removing unused definition %s", path.Base(k))
if opts.Verbose {
log.Printf("info: removing unused definition: %s", path.Base(k))
}
delete(opts.Swagger().Definitions, path.Base(k))
}
opts.Spec.reload() // re-analyze
}
func importKnownRef(entry sortref.RefRevIdx, refStr, newName string, opts *FlattenOpts) error {
// rewrite ref with already resolved external ref (useful for cyclical refs):
// rewrite external refs to local ones
debugLog("resolving known ref [%s] to %s", refStr, newName)
for _, key := range entry.Keys {
if err := replace.UpdateRef(opts.Swagger(), key, spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
return err
}
}
return nil
}
func importNewRef(entry sortref.RefRevIdx, refStr string, opts *FlattenOpts) error {
var (
isOAIGen bool
newName string
)
debugLog("resolving schema from remote $ref [%s]", refStr)
sch, err := spec.ResolveRefWithBase(opts.Swagger(), &entry.Ref, opts.ExpandOpts(false))
if err != nil {
return fmt.Errorf("could not resolve schema: %w", err)
}
// at this stage only $ref analysis matters
partialAnalyzer := &Spec{
references: referenceAnalysis{},
patterns: patternAnalysis{},
enums: enumAnalysis{},
}
partialAnalyzer.reset()
partialAnalyzer.analyzeSchema("", sch, "/")
// now rewrite those refs with rebase
for key, ref := range partialAnalyzer.references.allRefs {
if err := replace.UpdateRef(sch, key, spec.MustCreateRef(normalize.RebaseRef(entry.Ref.String(), ref.String()))); err != nil {
return fmt.Errorf("failed to rewrite ref for key %q at %s: %w", key, entry.Ref.String(), err)
}
}
// generate a unique name - isOAIGen means that a naming conflict was resolved by changing the name
newName, isOAIGen = uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
debugLog("new name for [%s]: %s - with name conflict:%t", strings.Join(entry.Keys, ", "), newName, isOAIGen)
opts.flattenContext.resolved[refStr] = newName
// rewrite the external refs to local ones
for _, key := range entry.Keys {
if err := replace.UpdateRef(opts.Swagger(), key,
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
return err
}
// keep track of created refs
resolved := false
if _, ok := opts.flattenContext.newRefs[key]; ok {
resolved = opts.flattenContext.newRefs[key].resolved
}
debugLog("keeping track of ref: %s (%s), resolved: %t", key, newName, resolved)
opts.flattenContext.newRefs[key] = &newRef{
key: key,
newName: newName,
path: path.Join(definitionsPath, newName),
isOAIGen: isOAIGen,
resolved: resolved,
schema: sch,
}
}
// add the resolved schema to the definitions
schutils.Save(opts.Swagger(), newName, sch)
return nil
}
// importExternalReferences iteratively digs remote references and imports them into the main schema.
//
// At every iteration, new remotes may be found when digging deeper: they are rebased to the current schema before being imported.
//
// This returns true when no more remote references can be found.
func importExternalReferences(opts *FlattenOpts) (bool, error) {
debugLog("importExternalReferences")
groupedRefs := sortref.ReverseIndex(opts.Spec.references.schemas, opts.BasePath)
sortedRefStr := make([]string, 0, len(groupedRefs))
if opts.flattenContext == nil {
opts.flattenContext = newContext()
}
// sort $ref resolution to ensure deterministic name conflict resolution
for refStr := range groupedRefs {
sortedRefStr = append(sortedRefStr, refStr)
}
sort.Strings(sortedRefStr)
complete := true
for _, refStr := range sortedRefStr {
entry := groupedRefs[refStr]
if entry.Ref.HasFragmentOnly {
continue
}
complete = false
newName := opts.flattenContext.resolved[refStr]
if newName != "" {
if err := importKnownRef(entry, refStr, newName, opts); err != nil {
return false, err
}
continue
}
// resolve schemas
if err := importNewRef(entry, refStr, opts); err != nil {
return false, err
}
}
// maintains ref index entries
for k := range opts.flattenContext.newRefs {
r := opts.flattenContext.newRefs[k]
// update tracking with resolved schemas
if r.schema.Ref.String() != "" {
ref := spec.MustCreateRef(r.path)
sch, err := spec.ResolveRefWithBase(opts.Swagger(), &ref, opts.ExpandOpts(false))
if err != nil {
return false, fmt.Errorf("could not resolve schema: %w", err)
}
r.schema = sch
}
if r.path == k {
continue
}
// update tracking with renamed keys: got a cascade of refs
renamed := *r
renamed.key = r.path
opts.flattenContext.newRefs[renamed.path] = &renamed
// indirect ref
r.newName = path.Base(k)
r.schema = spec.RefSchema(r.path)
r.path = k
r.isOAIGen = strings.Contains(k, "OAIGen")
}
return complete, nil
}
// stripPointersAndOAIGen removes anonymous JSON pointers from spec and chain with name conflicts handler.
// This loops until the spec has no such pointer and all name conflicts have been reduced as much as possible.
func stripPointersAndOAIGen(opts *FlattenOpts) error {
// name all JSON pointers to anonymous documents
if err := namePointers(opts); err != nil {
return err
}
// remove unnecessary OAIGen ref (created when flattening external refs creates name conflicts)
hasIntroducedPointerOrInline, ers := stripOAIGen(opts)
if ers != nil {
return ers
}
// iterate as pointer or OAIGen resolution may introduce inline schemas or pointers
for hasIntroducedPointerOrInline {
if !opts.Minimal {
opts.Spec.reload() // re-analyze
if err := nameInlinedSchemas(opts); err != nil {
return err
}
}
if err := namePointers(opts); err != nil {
return err
}
// restrip and re-analyze
var err error
if hasIntroducedPointerOrInline, err = stripOAIGen(opts); err != nil {
return err
}
}
return nil
}
// stripOAIGen strips the spec from unnecessary OAIGen constructs, initially created to dedupe flattened definitions.
//
// A dedupe is deemed unnecessary whenever:
// - the only conflict is with its (single) parent: OAIGen is merged into its parent (reinlining)
// - there is a conflict with multiple parents: merge OAIGen in first parent, the rewrite other parents to point to
// the first parent.
//
// This function returns true whenever it re-inlined a complex schema, so the caller may chose to iterate
// pointer and name resolution again.
func stripOAIGen(opts *FlattenOpts) (bool, error) {
debugLog("stripOAIGen")
replacedWithComplex := false
// figure out referers of OAIGen definitions (doing it before the ref start mutating)
for _, r := range opts.flattenContext.newRefs {
updateRefParents(opts.Spec.references.allRefs, r)
}
for k := range opts.flattenContext.newRefs {
r := opts.flattenContext.newRefs[k]
debugLog("newRefs[%s]: isOAIGen: %t, resolved: %t, name: %s, path:%s, #parents: %d, parents: %v, ref: %s",
k, r.isOAIGen, r.resolved, r.newName, r.path, len(r.parents), r.parents, r.schema.Ref.String())
if !r.isOAIGen || len(r.parents) == 0 {
continue
}
hasReplacedWithComplex, err := stripOAIGenForRef(opts, k, r)
if err != nil {
return replacedWithComplex, err
}
replacedWithComplex = replacedWithComplex || hasReplacedWithComplex
}
debugLog("replacedWithComplex: %t", replacedWithComplex)
opts.Spec.reload() // re-analyze
return replacedWithComplex, nil
}
// updateRefParents updates all parents of an updated $ref
func updateRefParents(allRefs map[string]spec.Ref, r *newRef) {
if !r.isOAIGen || r.resolved { // bail on already resolved entries (avoid looping)
return
}
for k, v := range allRefs {
if r.path != v.String() {
continue
}
found := false
for _, p := range r.parents {
if p == k {
found = true
break
}
}
if !found {
r.parents = append(r.parents, k)
}
}
}
func stripOAIGenForRef(opts *FlattenOpts, k string, r *newRef) (bool, error) {
replacedWithComplex := false
pr := sortref.TopmostFirst(r.parents)
// rewrite first parent schema in hierarchical then lexicographical order
debugLog("rewrite first parent %s with schema", pr[0])
if err := replace.UpdateRefWithSchema(opts.Swagger(), pr[0], r.schema); err != nil {
return false, err
}
if pa, ok := opts.flattenContext.newRefs[pr[0]]; ok && pa.isOAIGen {
// update parent in ref index entry
debugLog("update parent entry: %s", pr[0])
pa.schema = r.schema
pa.resolved = false
replacedWithComplex = true
}
// rewrite other parents to point to first parent
if len(pr) > 1 {
for _, p := range pr[1:] {
replacingRef := spec.MustCreateRef(pr[0])
// set complex when replacing ref is an anonymous jsonpointer: further processing may be required
replacedWithComplex = replacedWithComplex || path.Dir(replacingRef.String()) != definitionsPath
debugLog("rewrite parent with ref: %s", replacingRef.String())
// NOTE: it is possible at this stage to introduce json pointers (to non-definitions places).
// Those are stripped later on.
if err := replace.UpdateRef(opts.Swagger(), p, replacingRef); err != nil {
return false, err
}
if pa, ok := opts.flattenContext.newRefs[p]; ok && pa.isOAIGen {
// update parent in ref index
debugLog("update parent entry: %s", p)
pa.schema = r.schema
pa.resolved = false
replacedWithComplex = true
}
}
}
// remove OAIGen definition
debugLog("removing definition %s", path.Base(r.path))
delete(opts.Swagger().Definitions, path.Base(r.path))
// propagate changes in ref index for keys which have this one as a parent
for kk, value := range opts.flattenContext.newRefs {
if kk == k || !value.isOAIGen || value.resolved {
continue
}
found := false
newParents := make([]string, 0, len(value.parents))
for _, parent := range value.parents {
switch {
case parent == r.path:
found = true
parent = pr[0]
case strings.HasPrefix(parent, r.path+"/"):
found = true
parent = path.Join(pr[0], strings.TrimPrefix(parent, r.path))
}
newParents = append(newParents, parent)
}
if found {
value.parents = newParents
}
}
// mark naming conflict as resolved
debugLog("marking naming conflict resolved for key: %s", r.key)
opts.flattenContext.newRefs[r.key].isOAIGen = false
opts.flattenContext.newRefs[r.key].resolved = true
// determine if the previous substitution did inline a complex schema
if r.schema != nil && r.schema.Ref.String() == "" { // inline schema
asch, err := Schema(SchemaOpts{Schema: r.schema, Root: opts.Swagger(), BasePath: opts.BasePath})
if err != nil {
return false, err
}
debugLog("re-inlined schema: parent: %s, %t", pr[0], asch.isAnalyzedAsComplex())
replacedWithComplex = replacedWithComplex || !(path.Dir(pr[0]) == definitionsPath) && asch.isAnalyzedAsComplex()
}
return replacedWithComplex, nil
}
// namePointers replaces all JSON pointers to anonymous documents by a $ref to a new named definitions.
//
// This is carried on depth-first. Pointers to $refs which are top level definitions are replaced by the $ref itself.
// Pointers to simple types are expanded, unless they express commonality (i.e. several such $ref are used).
func namePointers(opts *FlattenOpts) error {
debugLog("name pointers")
refsToReplace := make(map[string]SchemaRef, len(opts.Spec.references.schemas))
for k, ref := range opts.Spec.references.allRefs {
if path.Dir(ref.String()) == definitionsPath {
// this a ref to a top-level definition: ok
continue
}
result, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), ref)
if err != nil {
return fmt.Errorf("at %s, %w", k, err)
}
replacingRef := result.Ref
sch := result.Schema
if opts.flattenContext != nil {
opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
}
debugLog("planning pointer to replace at %s: %s, resolved to: %s", k, ref.String(), replacingRef.String())
refsToReplace[k] = SchemaRef{
Name: k, // caller
Ref: replacingRef, // called
Schema: sch,
TopLevel: path.Dir(replacingRef.String()) == definitionsPath,
}
}
depthFirst := sortref.DepthFirst(refsToReplace)
namer := &InlineSchemaNamer{
Spec: opts.Swagger(),
Operations: operations.AllOpRefsByRef(opts.Spec, nil),
flattenContext: opts.flattenContext,
opts: opts,
}
for _, key := range depthFirst {
v := refsToReplace[key]
// update current replacement, which may have been updated by previous changes of deeper elements
result, erd := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), v.Ref)
if erd != nil {
return fmt.Errorf("at %s, %w", key, erd)
}
if opts.flattenContext != nil {
opts.flattenContext.warnings = append(opts.flattenContext.warnings, result.Warnings...)
}
v.Ref = result.Ref
v.Schema = result.Schema
v.TopLevel = path.Dir(result.Ref.String()) == definitionsPath
debugLog("replacing pointer at %s: resolved to: %s", key, v.Ref.String())
if v.TopLevel {
debugLog("replace pointer %s by canonical definition: %s", key, v.Ref.String())
// if the schema is a $ref to a top level definition, just rewrite the pointer to this $ref
if err := replace.UpdateRef(opts.Swagger(), key, v.Ref); err != nil {
return err
}
continue
}
if err := flattenAnonPointer(key, v, refsToReplace, namer, opts); err != nil {
return err
}
}
opts.Spec.reload() // re-analyze
return nil
}
func flattenAnonPointer(key string, v SchemaRef, refsToReplace map[string]SchemaRef, namer *InlineSchemaNamer, opts *FlattenOpts) error {
// this is a JSON pointer to an anonymous document (internal or external):
// create a definition for this schema when:
// - it is a complex schema
// - or it is pointed by more than one $ref (i.e. expresses commonality)
// otherwise, expand the pointer (single reference to a simple type)
//
// The named definition for this follows the target's key, not the caller's
debugLog("namePointers at %s for %s", key, v.Ref.String())
// qualify the expanded schema
asch, ers := Schema(SchemaOpts{Schema: v.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
if ers != nil {
return fmt.Errorf("schema analysis [%s]: %w", key, ers)
}
callers := make([]string, 0, 64)
debugLog("looking for callers")
an := New(opts.Swagger())
for k, w := range an.references.allRefs {
r, err := replace.DeepestRef(opts.Swagger(), opts.ExpandOpts(false), w)
if err != nil {
return fmt.Errorf("at %s, %w", key, err)
}
if opts.flattenContext != nil {
opts.flattenContext.warnings = append(opts.flattenContext.warnings, r.Warnings...)
}
if r.Ref.String() == v.Ref.String() {
callers = append(callers, k)
}
}
debugLog("callers for %s: %d", v.Ref.String(), len(callers))
if len(callers) == 0 {
// has already been updated and resolved
return nil
}
parts := sortref.KeyParts(v.Ref.String())
debugLog("number of callers for %s: %d", v.Ref.String(), len(callers))
// identifying edge case when the namer did nothing because we point to a non-schema object
// no definition is created and we expand the $ref for all callers
if (!asch.IsSimpleSchema || len(callers) > 1) && !parts.IsSharedParam() && !parts.IsSharedResponse() {
debugLog("replace JSON pointer at [%s] by definition: %s", key, v.Ref.String())
if err := namer.Name(v.Ref.String(), v.Schema, asch); err != nil {
return err
}
// regular case: we named the $ref as a definition, and we move all callers to this new $ref
for _, caller := range callers {
if caller == key {
continue
}
// move $ref for next to resolve
debugLog("identified caller of %s at [%s]", v.Ref.String(), caller)
c := refsToReplace[caller]
c.Ref = v.Ref
refsToReplace[caller] = c
}
return nil
}
debugLog("expand JSON pointer for key=%s", key)
if err := replace.UpdateRefWithSchema(opts.Swagger(), key, v.Schema); err != nil {
return err
}
// NOTE: there is no other caller to update
return nil
}

293
vendor/github.com/go-openapi/analysis/flatten_name.go generated vendored Normal file
View File

@ -0,0 +1,293 @@
package analysis
import (
"fmt"
"path"
"sort"
"strings"
"github.com/go-openapi/analysis/internal/flatten/operations"
"github.com/go-openapi/analysis/internal/flatten/replace"
"github.com/go-openapi/analysis/internal/flatten/schutils"
"github.com/go-openapi/analysis/internal/flatten/sortref"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
// InlineSchemaNamer finds a new name for an inlined type
type InlineSchemaNamer struct {
Spec *spec.Swagger
Operations map[string]operations.OpRef
flattenContext *context
opts *FlattenOpts
}
// Name yields a new name for the inline schema
func (isn *InlineSchemaNamer) Name(key string, schema *spec.Schema, aschema *AnalyzedSchema) error {
debugLog("naming inlined schema at %s", key)
parts := sortref.KeyParts(key)
for _, name := range namesFromKey(parts, aschema, isn.Operations) {
if name == "" {
continue
}
// create unique name
newName, isOAIGen := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
// clone schema
sch := schutils.Clone(schema)
// replace values on schema
if err := replace.RewriteSchemaToRef(isn.Spec, key,
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
return fmt.Errorf("error while creating definition %q from inline schema: %w", newName, err)
}
// rewrite any dependent $ref pointing to this place,
// when not already pointing to a top-level definition.
//
// NOTE: this is important if such referers use arbitrary JSON pointers.
an := New(isn.Spec)
for k, v := range an.references.allRefs {
r, erd := replace.DeepestRef(isn.opts.Swagger(), isn.opts.ExpandOpts(false), v)
if erd != nil {
return fmt.Errorf("at %s, %w", k, erd)
}
if isn.opts.flattenContext != nil {
isn.opts.flattenContext.warnings = append(isn.opts.flattenContext.warnings, r.Warnings...)
}
if r.Ref.String() != key && (r.Ref.String() != path.Join(definitionsPath, newName) || path.Dir(v.String()) == definitionsPath) {
continue
}
debugLog("found a $ref to a rewritten schema: %s points to %s", k, v.String())
// rewrite $ref to the new target
if err := replace.UpdateRef(isn.Spec, k,
spec.MustCreateRef(path.Join(definitionsPath, newName))); err != nil {
return err
}
}
// NOTE: this extension is currently not used by go-swagger (provided for information only)
sch.AddExtension("x-go-gen-location", GenLocation(parts))
// save cloned schema to definitions
schutils.Save(isn.Spec, newName, sch)
// keep track of created refs
if isn.flattenContext == nil {
continue
}
debugLog("track created ref: key=%s, newName=%s, isOAIGen=%t", key, newName, isOAIGen)
resolved := false
if _, ok := isn.flattenContext.newRefs[key]; ok {
resolved = isn.flattenContext.newRefs[key].resolved
}
isn.flattenContext.newRefs[key] = &newRef{
key: key,
newName: newName,
path: path.Join(definitionsPath, newName),
isOAIGen: isOAIGen,
resolved: resolved,
schema: sch,
}
}
return nil
}
// uniqifyName yields a unique name for a definition
func uniqifyName(definitions spec.Definitions, name string) (string, bool) {
isOAIGen := false
if name == "" {
name = "oaiGen"
isOAIGen = true
}
if len(definitions) == 0 {
return name, isOAIGen
}
unq := true
for k := range definitions {
if strings.EqualFold(k, name) {
unq = false
break
}
}
if unq {
return name, isOAIGen
}
name += "OAIGen"
isOAIGen = true
var idx int
unique := name
_, known := definitions[unique]
for known {
idx++
unique = fmt.Sprintf("%s%d", name, idx)
_, known = definitions[unique]
}
return unique, isOAIGen
}
func namesFromKey(parts sortref.SplitKey, aschema *AnalyzedSchema, operations map[string]operations.OpRef) []string {
var (
baseNames [][]string
startIndex int
)
if parts.IsOperation() {
baseNames, startIndex = namesForOperation(parts, operations)
}
// definitions
if parts.IsDefinition() {
baseNames, startIndex = namesForDefinition(parts)
}
result := make([]string, 0, len(baseNames))
for _, segments := range baseNames {
nm := parts.BuildName(segments, startIndex, partAdder(aschema))
if nm == "" {
continue
}
result = append(result, nm)
}
sort.Strings(result)
return result
}
func namesForParam(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
var (
baseNames [][]string
startIndex int
)
piref := parts.PathItemRef()
if piref.String() != "" && parts.IsOperationParam() {
if op, ok := operations[piref.String()]; ok {
startIndex = 5
baseNames = append(baseNames, []string{op.ID, "params", "body"})
}
} else if parts.IsSharedOperationParam() {
pref := parts.PathRef()
for k, v := range operations {
if strings.HasPrefix(k, pref.String()) {
startIndex = 4
baseNames = append(baseNames, []string{v.ID, "params", "body"})
}
}
}
return baseNames, startIndex
}
func namesForOperation(parts sortref.SplitKey, operations map[string]operations.OpRef) ([][]string, int) {
var (
baseNames [][]string
startIndex int
)
// params
if parts.IsOperationParam() || parts.IsSharedOperationParam() {
baseNames, startIndex = namesForParam(parts, operations)
}
// responses
if parts.IsOperationResponse() {
piref := parts.PathItemRef()
if piref.String() != "" {
if op, ok := operations[piref.String()]; ok {
startIndex = 6
baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
}
}
}
return baseNames, startIndex
}
func namesForDefinition(parts sortref.SplitKey) ([][]string, int) {
nm := parts.DefinitionName()
if nm != "" {
return [][]string{{parts.DefinitionName()}}, 2
}
return [][]string{}, 0
}
// partAdder knows how to interpret a schema when it comes to build a name from parts
func partAdder(aschema *AnalyzedSchema) sortref.PartAdder {
return func(part string) []string {
segments := make([]string, 0, 2)
if part == "items" || part == "additionalItems" {
if aschema.IsTuple || aschema.IsTupleWithExtra {
segments = append(segments, "tuple")
} else {
segments = append(segments, "items")
}
if part == "additionalItems" {
segments = append(segments, part)
}
return segments
}
segments = append(segments, part)
return segments
}
}
func nameFromRef(ref spec.Ref) string {
u := ref.GetURL()
if u.Fragment != "" {
return swag.ToJSONName(path.Base(u.Fragment))
}
if u.Path != "" {
bn := path.Base(u.Path)
if bn != "" && bn != "/" {
ext := path.Ext(bn)
if ext != "" {
return swag.ToJSONName(bn[:len(bn)-len(ext)])
}
return swag.ToJSONName(bn)
}
}
return swag.ToJSONName(strings.ReplaceAll(u.Host, ".", " "))
}
// GenLocation indicates from which section of the specification (models or operations) a definition has been created.
//
// This is reflected in the output spec with a "x-go-gen-location" extension. At the moment, this is is provided
// for information only.
func GenLocation(parts sortref.SplitKey) string {
switch {
case parts.IsOperation():
return "operations"
case parts.IsDefinition():
return "models"
default:
return ""
}
}

View File

@ -0,0 +1,78 @@
package analysis
import (
"log"
"github.com/go-openapi/spec"
)
// FlattenOpts configuration for flattening a swagger specification.
//
// The BasePath parameter is used to locate remote relative $ref found in the specification.
// This path is a file: it points to the location of the root document and may be either a local
// file path or a URL.
//
// If none specified, relative references (e.g. "$ref": "folder/schema.yaml#/definitions/...")
// found in the spec are searched from the current working directory.
type FlattenOpts struct {
Spec *Spec // The analyzed spec to work with
flattenContext *context // Internal context to track flattening activity
BasePath string // The location of the root document for this spec to resolve relative $ref
// Flattening options
Expand bool // When true, skip flattening the spec and expand it instead (if Minimal is false)
Minimal bool // When true, do not decompose complex structures such as allOf
Verbose bool // enable some reporting on possible name conflicts detected
RemoveUnused bool // When true, remove unused parameters, responses and definitions after expansion/flattening
ContinueOnError bool // Continue when spec expansion issues are found
/* Extra keys */
_ struct{} // require keys
}
// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *spec.ExpandOptions {
return &spec.ExpandOptions{
RelativeBase: f.BasePath,
SkipSchemas: skipSchemas,
ContinueOnError: f.ContinueOnError,
}
}
// Swagger gets the swagger specification for this flatten operation
func (f *FlattenOpts) Swagger() *spec.Swagger {
return f.Spec.spec
}
// croak logs notifications and warnings about valid, but possibly unwanted constructs resulting
// from flattening a spec
func (f *FlattenOpts) croak() {
if !f.Verbose {
return
}
reported := make(map[string]bool, len(f.flattenContext.newRefs))
for _, v := range f.Spec.references.allRefs {
// warns about duplicate handling
for _, r := range f.flattenContext.newRefs {
if r.isOAIGen && r.path == v.String() {
reported[r.newName] = true
}
}
}
for k := range reported {
log.Printf("warning: duplicate flattened definition name resolved as %s", k)
}
// warns about possible type mismatches
uniqueMsg := make(map[string]bool)
for _, msg := range f.flattenContext.warnings {
if _, ok := uniqueMsg[msg]; ok {
continue
}
log.Printf("warning: %s", msg)
uniqueMsg[msg] = true
}
}

View File

@ -0,0 +1,41 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package debug
import (
"fmt"
"log"
"os"
"path/filepath"
"runtime"
)
var (
output = os.Stdout
)
// GetLogger provides a prefix debug logger
func GetLogger(prefix string, debug bool) func(string, ...interface{}) {
if debug {
logger := log.New(output, fmt.Sprintf("%s:", prefix), log.LstdFlags)
return func(msg string, args ...interface{}) {
_, file1, pos1, _ := runtime.Caller(1)
logger.Printf("%s:%d: %s", filepath.Base(file1), pos1, fmt.Sprintf(msg, args...))
}
}
return func(msg string, args ...interface{}) {}
}

View File

@ -0,0 +1,87 @@
package normalize
import (
"net/url"
"path"
"path/filepath"
"strings"
"github.com/go-openapi/spec"
)
// RebaseRef rebases a remote ref relative to a base ref.
//
// NOTE: does not support JSONschema ID for $ref (we assume we are working with swagger specs here).
//
// NOTE(windows):
// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
// * "/ in paths may appear as escape sequences
func RebaseRef(baseRef string, ref string) string {
baseRef, _ = url.PathUnescape(baseRef)
ref, _ = url.PathUnescape(ref)
if baseRef == "" || baseRef == "." || strings.HasPrefix(baseRef, "#") {
return ref
}
parts := strings.Split(ref, "#")
baseParts := strings.Split(baseRef, "#")
baseURL, _ := url.Parse(baseParts[0])
if strings.HasPrefix(ref, "#") {
if baseURL.Host == "" {
return strings.Join([]string{baseParts[0], parts[1]}, "#")
}
return strings.Join([]string{baseParts[0], parts[1]}, "#")
}
refURL, _ := url.Parse(parts[0])
if refURL.Host != "" || filepath.IsAbs(parts[0]) {
// not rebasing an absolute path
return ref
}
// there is a relative path
var basePath string
if baseURL.Host != "" {
// when there is a host, standard URI rules apply (with "/")
baseURL.Path = path.Dir(baseURL.Path)
baseURL.Path = path.Join(baseURL.Path, "/"+parts[0])
return baseURL.String()
}
// this is a local relative path
// basePart[0] and parts[0] are local filesystem directories/files
basePath = filepath.Dir(baseParts[0])
relPath := filepath.Join(basePath, string(filepath.Separator)+parts[0])
if len(parts) > 1 {
return strings.Join([]string{relPath, parts[1]}, "#")
}
return relPath
}
// Path renders absolute path on remote file refs
//
// NOTE(windows):
// * refs are assumed to have been normalized with drive letter lower cased (from go-openapi/spec)
// * "/ in paths may appear as escape sequences
func Path(ref spec.Ref, basePath string) string {
uri, _ := url.PathUnescape(ref.String())
if ref.HasFragmentOnly || filepath.IsAbs(uri) {
return uri
}
refURL, _ := url.Parse(uri)
if refURL.Host != "" {
return uri
}
parts := strings.Split(uri, "#")
// BasePath, parts[0] are local filesystem directories, guaranteed to be absolute at this stage
parts[0] = filepath.Join(filepath.Dir(basePath), parts[0])
return strings.Join(parts, "#")
}

View File

@ -0,0 +1,90 @@
package operations
import (
"path"
"sort"
"strings"
"github.com/go-openapi/jsonpointer"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
// AllOpRefsByRef returns an index of sortable operations
func AllOpRefsByRef(specDoc Provider, operationIDs []string) map[string]OpRef {
return OpRefsByRef(GatherOperations(specDoc, operationIDs))
}
// OpRefsByRef indexes a map of sortable operations
func OpRefsByRef(oprefs map[string]OpRef) map[string]OpRef {
result := make(map[string]OpRef, len(oprefs))
for _, v := range oprefs {
result[v.Ref.String()] = v
}
return result
}
// OpRef is an indexable, sortable operation
type OpRef struct {
Method string
Path string
Key string
ID string
Op *spec.Operation
Ref spec.Ref
}
// OpRefs is a sortable collection of operations
type OpRefs []OpRef
func (o OpRefs) Len() int { return len(o) }
func (o OpRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
func (o OpRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
// Provider knows how to collect operations from a spec
type Provider interface {
Operations() map[string]map[string]*spec.Operation
}
// GatherOperations builds a map of sorted operations from a spec
func GatherOperations(specDoc Provider, operationIDs []string) map[string]OpRef {
var oprefs OpRefs
for method, pathItem := range specDoc.Operations() {
for pth, operation := range pathItem {
vv := *operation
oprefs = append(oprefs, OpRef{
Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
Method: method,
Path: pth,
ID: vv.ID,
Op: &vv,
Ref: spec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
})
}
}
sort.Sort(oprefs)
operations := make(map[string]OpRef)
for _, opr := range oprefs {
nm := opr.ID
if nm == "" {
nm = opr.Key
}
oo, found := operations[nm]
if found && oo.Method != opr.Method && oo.Path != opr.Path {
nm = opr.Key
}
if len(operationIDs) == 0 || swag.ContainsStrings(operationIDs, opr.ID) || swag.ContainsStrings(operationIDs, nm) {
opr.ID = nm
opr.Op.ID = nm
operations[nm] = opr
}
}
return operations
}

View File

@ -0,0 +1,434 @@
package replace
import (
"fmt"
"net/url"
"os"
"path"
"strconv"
"github.com/go-openapi/analysis/internal/debug"
"github.com/go-openapi/jsonpointer"
"github.com/go-openapi/spec"
)
const definitionsPath = "#/definitions"
var debugLog = debug.GetLogger("analysis/flatten/replace", os.Getenv("SWAGGER_DEBUG") != "")
// RewriteSchemaToRef replaces a schema with a Ref
func RewriteSchemaToRef(sp *spec.Swagger, key string, ref spec.Ref) error {
debugLog("rewriting schema to ref for %s with %s", key, ref.String())
_, value, err := getPointerFromKey(sp, key)
if err != nil {
return err
}
switch refable := value.(type) {
case *spec.Schema:
return rewriteParentRef(sp, key, ref)
case spec.Schema:
return rewriteParentRef(sp, key, ref)
case *spec.SchemaOrArray:
if refable.Schema != nil {
refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
}
case *spec.SchemaOrBool:
if refable.Schema != nil {
refable.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
}
default:
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
}
return nil
}
func rewriteParentRef(sp *spec.Swagger, key string, ref spec.Ref) error {
parent, entry, pvalue, err := getParentFromKey(sp, key)
if err != nil {
return err
}
debugLog("rewriting holder for %T", pvalue)
switch container := pvalue.(type) {
case spec.Response:
if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
return err
}
case *spec.Response:
container.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case *spec.Responses:
statusCode, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", key[1:], err)
}
resp := container.StatusCodeResponses[statusCode]
resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
container.StatusCodeResponses[statusCode] = resp
case map[string]spec.Response:
resp := container[entry]
resp.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
container[entry] = resp
case spec.Parameter:
if err := rewriteParentRef(sp, "#"+parent, ref); err != nil {
return err
}
case map[string]spec.Parameter:
param := container[entry]
param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
container[entry] = param
case []spec.Parameter:
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", key[1:], err)
}
param := container[idx]
param.Schema = &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
container[idx] = param
case spec.Definitions:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case map[string]spec.Schema:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case []spec.Schema:
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", key[1:], err)
}
container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case *spec.SchemaOrArray:
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", key[1:], err)
}
container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case spec.SchemaProperties:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
default:
return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
}
return nil
}
// getPointerFromKey retrieves the content of the JSON pointer "key"
func getPointerFromKey(sp interface{}, key string) (string, interface{}, error) {
switch sp.(type) {
case *spec.Schema:
case *spec.Swagger:
default:
panic("unexpected type used in getPointerFromKey")
}
if key == "#/" {
return "", sp, nil
}
// unescape chars in key, e.g. "{}" from path params
pth, _ := url.PathUnescape(key[1:])
ptr, err := jsonpointer.New(pth)
if err != nil {
return "", nil, err
}
value, _, err := ptr.Get(sp)
if err != nil {
debugLog("error when getting key: %s with path: %s", key, pth)
return "", nil, err
}
return pth, value, nil
}
// getParentFromKey retrieves the container of the JSON pointer "key"
func getParentFromKey(sp interface{}, key string) (string, string, interface{}, error) {
switch sp.(type) {
case *spec.Schema:
case *spec.Swagger:
default:
panic("unexpected type used in getPointerFromKey")
}
// unescape chars in key, e.g. "{}" from path params
pth, _ := url.PathUnescape(key[1:])
parent, entry := path.Dir(pth), path.Base(pth)
debugLog("getting schema holder at: %s, with entry: %s", parent, entry)
pptr, err := jsonpointer.New(parent)
if err != nil {
return "", "", nil, err
}
pvalue, _, err := pptr.Get(sp)
if err != nil {
return "", "", nil, fmt.Errorf("can't get parent for %s: %w", parent, err)
}
return parent, entry, pvalue, nil
}
// UpdateRef replaces a ref by another one
func UpdateRef(sp interface{}, key string, ref spec.Ref) error {
switch sp.(type) {
case *spec.Schema:
case *spec.Swagger:
default:
panic("unexpected type used in getPointerFromKey")
}
debugLog("updating ref for %s with %s", key, ref.String())
pth, value, err := getPointerFromKey(sp, key)
if err != nil {
return err
}
switch refable := value.(type) {
case *spec.Schema:
refable.Ref = ref
case *spec.SchemaOrArray:
if refable.Schema != nil {
refable.Schema.Ref = ref
}
case *spec.SchemaOrBool:
if refable.Schema != nil {
refable.Schema.Ref = ref
}
case spec.Schema:
debugLog("rewriting holder for %T", refable)
_, entry, pvalue, erp := getParentFromKey(sp, key)
if erp != nil {
return err
}
switch container := pvalue.(type) {
case spec.Definitions:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case map[string]spec.Schema:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case []spec.Schema:
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", pth, err)
}
container[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case *spec.SchemaOrArray:
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", pth, err)
}
container.Schemas[idx] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
case spec.SchemaProperties:
container[entry] = spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
default:
return fmt.Errorf("unhandled container type at %s: %T", key, value)
}
default:
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
}
return nil
}
// UpdateRefWithSchema replaces a ref with a schema (i.e. re-inline schema)
func UpdateRefWithSchema(sp *spec.Swagger, key string, sch *spec.Schema) error {
debugLog("updating ref for %s with schema", key)
pth, value, err := getPointerFromKey(sp, key)
if err != nil {
return err
}
switch refable := value.(type) {
case *spec.Schema:
*refable = *sch
case spec.Schema:
_, entry, pvalue, erp := getParentFromKey(sp, key)
if erp != nil {
return err
}
switch container := pvalue.(type) {
case spec.Definitions:
container[entry] = *sch
case map[string]spec.Schema:
container[entry] = *sch
case []spec.Schema:
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", pth, err)
}
container[idx] = *sch
case *spec.SchemaOrArray:
// NOTE: this is necessarily an array - otherwise, the parent would be *Schema
idx, err := strconv.Atoi(entry)
if err != nil {
return fmt.Errorf("%s not a number: %w", pth, err)
}
container.Schemas[idx] = *sch
case spec.SchemaProperties:
container[entry] = *sch
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
default:
return fmt.Errorf("unhandled type for parent of [%s]: %T", key, value)
}
case *spec.SchemaOrArray:
*refable.Schema = *sch
// NOTE: can't have case *spec.SchemaOrBool = parent in this case is *Schema
case *spec.SchemaOrBool:
*refable.Schema = *sch
default:
return fmt.Errorf("no schema with ref found at %s for %T", key, value)
}
return nil
}
// DeepestRefResult holds the results from DeepestRef analysis
type DeepestRefResult struct {
Ref spec.Ref
Schema *spec.Schema
Warnings []string
}
// DeepestRef finds the first definition ref, from a cascade of nested refs which are not definitions.
// - if no definition is found, returns the deepest ref.
// - pointers to external files are expanded
//
// NOTE: all external $ref's are assumed to be already expanded at this stage.
func DeepestRef(sp *spec.Swagger, opts *spec.ExpandOptions, ref spec.Ref) (*DeepestRefResult, error) {
if !ref.HasFragmentOnly {
// we found an external $ref, which is odd at this stage:
// do nothing on external $refs
return &DeepestRefResult{Ref: ref}, nil
}
currentRef := ref
visited := make(map[string]bool, 64)
warnings := make([]string, 0, 2)
DOWNREF:
for currentRef.String() != "" {
if path.Dir(currentRef.String()) == definitionsPath {
// this is a top-level definition: stop here and return this ref
return &DeepestRefResult{Ref: currentRef}, nil
}
if _, beenThere := visited[currentRef.String()]; beenThere {
return nil,
fmt.Errorf("cannot resolve cyclic chain of pointers under %s", currentRef.String())
}
visited[currentRef.String()] = true
value, _, err := currentRef.GetPointer().Get(sp)
if err != nil {
return nil, err
}
switch refable := value.(type) {
case *spec.Schema:
if refable.Ref.String() == "" {
break DOWNREF
}
currentRef = refable.Ref
case spec.Schema:
if refable.Ref.String() == "" {
break DOWNREF
}
currentRef = refable.Ref
case *spec.SchemaOrArray:
if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
break DOWNREF
}
currentRef = refable.Schema.Ref
case *spec.SchemaOrBool:
if refable.Schema == nil || refable.Schema != nil && refable.Schema.Ref.String() == "" {
break DOWNREF
}
currentRef = refable.Schema.Ref
case spec.Response:
// a pointer points to a schema initially marshalled in responses section...
// Attempt to convert this to a schema. If this fails, the spec is invalid
asJSON, _ := refable.MarshalJSON()
var asSchema spec.Schema
err := asSchema.UnmarshalJSON(asJSON)
if err != nil {
return nil,
fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
currentRef.String(), value)
}
warnings = append(warnings, fmt.Sprintf("found $ref %q (response) interpreted as schema", currentRef.String()))
if asSchema.Ref.String() == "" {
break DOWNREF
}
currentRef = asSchema.Ref
case spec.Parameter:
// a pointer points to a schema initially marshalled in parameters section...
// Attempt to convert this to a schema. If this fails, the spec is invalid
asJSON, _ := refable.MarshalJSON()
var asSchema spec.Schema
if err := asSchema.UnmarshalJSON(asJSON); err != nil {
return nil,
fmt.Errorf("invalid type for resolved JSON pointer %s. Expected a schema a, got: %T",
currentRef.String(), value)
}
warnings = append(warnings, fmt.Sprintf("found $ref %q (parameter) interpreted as schema", currentRef.String()))
if asSchema.Ref.String() == "" {
break DOWNREF
}
currentRef = asSchema.Ref
default:
return nil,
fmt.Errorf("unhandled type to resolve JSON pointer %s. Expected a Schema, got: %T",
currentRef.String(), value)
}
}
// assess what schema we're ending with
sch, erv := spec.ResolveRefWithBase(sp, &currentRef, opts)
if erv != nil {
return nil, erv
}
if sch == nil {
return nil, fmt.Errorf("no schema found at %s", currentRef.String())
}
return &DeepestRefResult{Ref: currentRef, Schema: sch, Warnings: warnings}, nil
}

View File

@ -0,0 +1,29 @@
// Package schutils provides tools to save or clone a schema
// when flattening a spec.
package schutils
import (
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
// Save registers a schema as an entry in spec #/definitions
func Save(sp *spec.Swagger, name string, schema *spec.Schema) {
if schema == nil {
return
}
if sp.Definitions == nil {
sp.Definitions = make(map[string]spec.Schema, 150)
}
sp.Definitions[name] = *schema
}
// Clone deep-clones a schema
func Clone(schema *spec.Schema) *spec.Schema {
var sch spec.Schema
_ = swag.FromDynamicJSON(schema, &sch)
return &sch
}

View File

@ -0,0 +1,201 @@
package sortref
import (
"net/http"
"path"
"strconv"
"strings"
"github.com/go-openapi/jsonpointer"
"github.com/go-openapi/spec"
)
const (
paths = "paths"
responses = "responses"
parameters = "parameters"
definitions = "definitions"
)
var (
ignoredKeys map[string]struct{}
validMethods map[string]struct{}
)
func init() {
ignoredKeys = map[string]struct{}{
"schema": {},
"properties": {},
"not": {},
"anyOf": {},
"oneOf": {},
}
validMethods = map[string]struct{}{
"GET": {},
"HEAD": {},
"OPTIONS": {},
"PATCH": {},
"POST": {},
"PUT": {},
"DELETE": {},
}
}
// Key represent a key item constructed from /-separated segments
type Key struct {
Segments int
Key string
}
// Keys is a sortable collable collection of Keys
type Keys []Key
func (k Keys) Len() int { return len(k) }
func (k Keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
func (k Keys) Less(i, j int) bool {
return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
}
// KeyParts construct a SplitKey with all its /-separated segments decomposed. It is sortable.
func KeyParts(key string) SplitKey {
var res []string
for _, part := range strings.Split(key[1:], "/") {
if part != "" {
res = append(res, jsonpointer.Unescape(part))
}
}
return res
}
// SplitKey holds of the parts of a /-separated key, soi that their location may be determined.
type SplitKey []string
// IsDefinition is true when the split key is in the #/definitions section of a spec
func (s SplitKey) IsDefinition() bool {
return len(s) > 1 && s[0] == definitions
}
// DefinitionName yields the name of the definition
func (s SplitKey) DefinitionName() string {
if !s.IsDefinition() {
return ""
}
return s[1]
}
func (s SplitKey) isKeyName(i int) bool {
if i <= 0 {
return false
}
count := 0
for idx := i - 1; idx > 0; idx-- {
if s[idx] != "properties" {
break
}
count++
}
return count%2 != 0
}
// PartAdder know how to construct the components of a new name
type PartAdder func(string) []string
// BuildName builds a name from segments
func (s SplitKey) BuildName(segments []string, startIndex int, adder PartAdder) string {
for i, part := range s[startIndex:] {
if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
segments = append(segments, adder(part)...)
}
}
return strings.Join(segments, " ")
}
// IsOperation is true when the split key is in the operations section
func (s SplitKey) IsOperation() bool {
return len(s) > 1 && s[0] == paths
}
// IsSharedOperationParam is true when the split key is in the parameters section of a path
func (s SplitKey) IsSharedOperationParam() bool {
return len(s) > 2 && s[0] == paths && s[2] == parameters
}
// IsSharedParam is true when the split key is in the #/parameters section of a spec
func (s SplitKey) IsSharedParam() bool {
return len(s) > 1 && s[0] == parameters
}
// IsOperationParam is true when the split key is in the parameters section of an operation
func (s SplitKey) IsOperationParam() bool {
return len(s) > 3 && s[0] == paths && s[3] == parameters
}
// IsOperationResponse is true when the split key is in the responses section of an operation
func (s SplitKey) IsOperationResponse() bool {
return len(s) > 3 && s[0] == paths && s[3] == responses
}
// IsSharedResponse is true when the split key is in the #/responses section of a spec
func (s SplitKey) IsSharedResponse() bool {
return len(s) > 1 && s[0] == responses
}
// IsDefaultResponse is true when the split key is the default response for an operation
func (s SplitKey) IsDefaultResponse() bool {
return len(s) > 4 && s[0] == paths && s[3] == responses && s[4] == "default"
}
// IsStatusCodeResponse is true when the split key is an operation response with a status code
func (s SplitKey) IsStatusCodeResponse() bool {
isInt := func() bool {
_, err := strconv.Atoi(s[4])
return err == nil
}
return len(s) > 4 && s[0] == paths && s[3] == responses && isInt()
}
// ResponseName yields either the status code or "Default" for a response
func (s SplitKey) ResponseName() string {
if s.IsStatusCodeResponse() {
code, _ := strconv.Atoi(s[4])
return http.StatusText(code)
}
if s.IsDefaultResponse() {
return "Default"
}
return ""
}
// PathItemRef constructs a $ref object from a split key of the form /{path}/{method}
func (s SplitKey) PathItemRef() spec.Ref {
if len(s) < 3 {
return spec.Ref{}
}
pth, method := s[1], s[2]
if _, isValidMethod := validMethods[strings.ToUpper(method)]; !isValidMethod && !strings.HasPrefix(method, "x-") {
return spec.Ref{}
}
return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(pth), strings.ToUpper(method)))
}
// PathRef constructs a $ref object from a split key of the form /paths/{reference}
func (s SplitKey) PathRef() spec.Ref {
if !s.IsOperation() {
return spec.Ref{}
}
return spec.MustCreateRef("#" + path.Join("/", paths, jsonpointer.Escape(s[1])))
}

View File

@ -0,0 +1,141 @@
package sortref
import (
"reflect"
"sort"
"strings"
"github.com/go-openapi/analysis/internal/flatten/normalize"
"github.com/go-openapi/spec"
)
var depthGroupOrder = []string{
"sharedParam", "sharedResponse", "sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition",
}
type mapIterator struct {
len int
mapIter *reflect.MapIter
}
func (i *mapIterator) Next() bool {
return i.mapIter.Next()
}
func (i *mapIterator) Len() int {
return i.len
}
func (i *mapIterator) Key() string {
return i.mapIter.Key().String()
}
func mustMapIterator(anyMap interface{}) *mapIterator {
val := reflect.ValueOf(anyMap)
return &mapIterator{mapIter: val.MapRange(), len: val.Len()}
}
// DepthFirst sorts a map of anything. It groups keys by category
// (shared params, op param, statuscode response, default response, definitions)
// sort groups internally by number of parts in the key and lexical names
// flatten groups into a single list of keys
func DepthFirst(in interface{}) []string {
iterator := mustMapIterator(in)
sorted := make([]string, 0, iterator.Len())
grouped := make(map[string]Keys, iterator.Len())
for iterator.Next() {
k := iterator.Key()
split := KeyParts(k)
var pk string
if split.IsSharedOperationParam() {
pk = "sharedOpParam"
}
if split.IsOperationParam() {
pk = "opParam"
}
if split.IsStatusCodeResponse() {
pk = "codeResponse"
}
if split.IsDefaultResponse() {
pk = "defaultResponse"
}
if split.IsDefinition() {
pk = "definition"
}
if split.IsSharedParam() {
pk = "sharedParam"
}
if split.IsSharedResponse() {
pk = "sharedResponse"
}
grouped[pk] = append(grouped[pk], Key{Segments: len(split), Key: k})
}
for _, pk := range depthGroupOrder {
res := grouped[pk]
sort.Sort(res)
for _, v := range res {
sorted = append(sorted, v.Key)
}
}
return sorted
}
// topMostRefs is able to sort refs by hierarchical then lexicographic order,
// yielding refs ordered breadth-first.
type topmostRefs []string
func (k topmostRefs) Len() int { return len(k) }
func (k topmostRefs) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
func (k topmostRefs) Less(i, j int) bool {
li, lj := len(strings.Split(k[i], "/")), len(strings.Split(k[j], "/"))
if li == lj {
return k[i] < k[j]
}
return li < lj
}
// TopmostFirst sorts references by depth
func TopmostFirst(refs []string) []string {
res := topmostRefs(refs)
sort.Sort(res)
return res
}
// RefRevIdx is a reverse index for references
type RefRevIdx struct {
Ref spec.Ref
Keys []string
}
// ReverseIndex builds a reverse index for references in schemas
func ReverseIndex(schemas map[string]spec.Ref, basePath string) map[string]RefRevIdx {
collected := make(map[string]RefRevIdx)
for key, schRef := range schemas {
// normalize paths before sorting,
// so we get together keys that are from the same external file
normalizedPath := normalize.Path(schRef, basePath)
entry, ok := collected[normalizedPath]
if ok {
entry.Keys = append(entry.Keys, key)
collected[normalizedPath] = entry
continue
}
collected[normalizedPath] = RefRevIdx{
Ref: schRef,
Keys: []string{key},
}
}
return collected
}

515
vendor/github.com/go-openapi/analysis/mixin.go generated vendored Normal file
View File

@ -0,0 +1,515 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analysis
import (
"fmt"
"reflect"
"github.com/go-openapi/spec"
)
// Mixin modifies the primary swagger spec by adding the paths and
// definitions from the mixin specs. Top level parameters and
// responses from the mixins are also carried over. Operation id
// collisions are avoided by appending "Mixin<N>" but only if
// needed.
//
// The following parts of primary are subject to merge, filling empty details
// - Info
// - BasePath
// - Host
// - ExternalDocs
//
// Consider calling FixEmptyResponseDescriptions() on the modified primary
// if you read them from storage and they are valid to start with.
//
// Entries in "paths", "definitions", "parameters" and "responses" are
// added to the primary in the order of the given mixins. If the entry
// already exists in primary it is skipped with a warning message.
//
// The count of skipped entries (from collisions) is returned so any
// deviation from the number expected can flag a warning in your build
// scripts. Carefully review the collisions before accepting them;
// consider renaming things if possible.
//
// No key normalization takes place (paths, type defs,
// etc). Ensure they are canonical if your downstream tools do
// key normalization of any form.
//
// Merging schemes (http, https), and consumers/producers do not account for
// collisions.
func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
skipped := make([]string, 0, len(mixins))
opIds := getOpIds(primary)
initPrimary(primary)
for i, m := range mixins {
skipped = append(skipped, mergeSwaggerProps(primary, m)...)
skipped = append(skipped, mergeConsumes(primary, m)...)
skipped = append(skipped, mergeProduces(primary, m)...)
skipped = append(skipped, mergeTags(primary, m)...)
skipped = append(skipped, mergeSchemes(primary, m)...)
skipped = append(skipped, mergeSecurityDefinitions(primary, m)...)
skipped = append(skipped, mergeSecurityRequirements(primary, m)...)
skipped = append(skipped, mergeDefinitions(primary, m)...)
// merging paths requires a map of operationIDs to work with
skipped = append(skipped, mergePaths(primary, m, opIds, i)...)
skipped = append(skipped, mergeParameters(primary, m)...)
skipped = append(skipped, mergeResponses(primary, m)...)
}
return skipped
}
// getOpIds extracts all the paths.<path>.operationIds from the given
// spec and returns them as the keys in a map with 'true' values.
func getOpIds(s *spec.Swagger) map[string]bool {
rv := make(map[string]bool)
if s.Paths == nil {
return rv
}
for _, v := range s.Paths.Paths {
piops := pathItemOps(v)
for _, op := range piops {
rv[op.ID] = true
}
}
return rv
}
func pathItemOps(p spec.PathItem) []*spec.Operation {
var rv []*spec.Operation
rv = appendOp(rv, p.Get)
rv = appendOp(rv, p.Put)
rv = appendOp(rv, p.Post)
rv = appendOp(rv, p.Delete)
rv = appendOp(rv, p.Head)
rv = appendOp(rv, p.Patch)
return rv
}
func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
if op == nil {
return ops
}
return append(ops, op)
}
func mergeSecurityDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for k, v := range m.SecurityDefinitions {
if _, exists := primary.SecurityDefinitions[k]; exists {
warn := fmt.Sprintf(
"SecurityDefinitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
skipped = append(skipped, warn)
continue
}
primary.SecurityDefinitions[k] = v
}
return
}
func mergeSecurityRequirements(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for _, v := range m.Security {
found := false
for _, vv := range primary.Security {
if reflect.DeepEqual(v, vv) {
found = true
break
}
}
if found {
warn := fmt.Sprintf(
"Security requirement: '%v' already exists in primary or higher priority mixin, skipping\n", v)
skipped = append(skipped, warn)
continue
}
primary.Security = append(primary.Security, v)
}
return
}
func mergeDefinitions(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for k, v := range m.Definitions {
// assume name collisions represent IDENTICAL type. careful.
if _, exists := primary.Definitions[k]; exists {
warn := fmt.Sprintf(
"definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
skipped = append(skipped, warn)
continue
}
primary.Definitions[k] = v
}
return
}
func mergePaths(primary *spec.Swagger, m *spec.Swagger, opIds map[string]bool, mixIndex int) (skipped []string) {
if m.Paths != nil {
for k, v := range m.Paths.Paths {
if _, exists := primary.Paths.Paths[k]; exists {
warn := fmt.Sprintf(
"paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
skipped = append(skipped, warn)
continue
}
// Swagger requires that operationIds be
// unique within a spec. If we find a
// collision we append "Mixin0" to the
// operatoinId we are adding, where 0 is mixin
// index. We assume that operationIds with
// all the proivded specs are already unique.
piops := pathItemOps(v)
for _, piop := range piops {
if opIds[piop.ID] {
piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", mixIndex)
}
opIds[piop.ID] = true
}
primary.Paths.Paths[k] = v
}
}
return
}
func mergeParameters(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for k, v := range m.Parameters {
// could try to rename on conflict but would
// have to fix $refs in the mixin. Complain
// for now
if _, exists := primary.Parameters[k]; exists {
warn := fmt.Sprintf(
"top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
skipped = append(skipped, warn)
continue
}
primary.Parameters[k] = v
}
return
}
func mergeResponses(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for k, v := range m.Responses {
// could try to rename on conflict but would
// have to fix $refs in the mixin. Complain
// for now
if _, exists := primary.Responses[k]; exists {
warn := fmt.Sprintf(
"top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
skipped = append(skipped, warn)
continue
}
primary.Responses[k] = v
}
return skipped
}
func mergeConsumes(primary *spec.Swagger, m *spec.Swagger) []string {
for _, v := range m.Consumes {
found := false
for _, vv := range primary.Consumes {
if v == vv {
found = true
break
}
}
if found {
// no warning here: we just skip it
continue
}
primary.Consumes = append(primary.Consumes, v)
}
return []string{}
}
func mergeProduces(primary *spec.Swagger, m *spec.Swagger) []string {
for _, v := range m.Produces {
found := false
for _, vv := range primary.Produces {
if v == vv {
found = true
break
}
}
if found {
// no warning here: we just skip it
continue
}
primary.Produces = append(primary.Produces, v)
}
return []string{}
}
func mergeTags(primary *spec.Swagger, m *spec.Swagger) (skipped []string) {
for _, v := range m.Tags {
found := false
for _, vv := range primary.Tags {
if v.Name == vv.Name {
found = true
break
}
}
if found {
warn := fmt.Sprintf(
"top level tags entry with name '%v' already exists in primary or higher priority mixin, skipping\n",
v.Name,
)
skipped = append(skipped, warn)
continue
}
primary.Tags = append(primary.Tags, v)
}
return
}
func mergeSchemes(primary *spec.Swagger, m *spec.Swagger) []string {
for _, v := range m.Schemes {
found := false
for _, vv := range primary.Schemes {
if v == vv {
found = true
break
}
}
if found {
// no warning here: we just skip it
continue
}
primary.Schemes = append(primary.Schemes, v)
}
return []string{}
}
func mergeSwaggerProps(primary *spec.Swagger, m *spec.Swagger) []string {
var skipped, skippedInfo, skippedDocs []string
primary.Extensions, skipped = mergeExtensions(primary.Extensions, m.Extensions)
// merging details in swagger top properties
if primary.Host == "" {
primary.Host = m.Host
}
if primary.BasePath == "" {
primary.BasePath = m.BasePath
}
if primary.Info == nil {
primary.Info = m.Info
} else if m.Info != nil {
skippedInfo = mergeInfo(primary.Info, m.Info)
skipped = append(skipped, skippedInfo...)
}
if primary.ExternalDocs == nil {
primary.ExternalDocs = m.ExternalDocs
} else if m != nil {
skippedDocs = mergeExternalDocs(primary.ExternalDocs, m.ExternalDocs)
skipped = append(skipped, skippedDocs...)
}
return skipped
}
// nolint: unparam
func mergeExternalDocs(primary *spec.ExternalDocumentation, m *spec.ExternalDocumentation) []string {
if primary.Description == "" {
primary.Description = m.Description
}
if primary.URL == "" {
primary.URL = m.URL
}
return nil
}
func mergeInfo(primary *spec.Info, m *spec.Info) []string {
var sk, skipped []string
primary.Extensions, sk = mergeExtensions(primary.Extensions, m.Extensions)
skipped = append(skipped, sk...)
if primary.Description == "" {
primary.Description = m.Description
}
if primary.Title == "" {
primary.Description = m.Description
}
if primary.TermsOfService == "" {
primary.TermsOfService = m.TermsOfService
}
if primary.Version == "" {
primary.Version = m.Version
}
if primary.Contact == nil {
primary.Contact = m.Contact
} else if m.Contact != nil {
var csk []string
primary.Contact.Extensions, csk = mergeExtensions(primary.Contact.Extensions, m.Contact.Extensions)
skipped = append(skipped, csk...)
if primary.Contact.Name == "" {
primary.Contact.Name = m.Contact.Name
}
if primary.Contact.URL == "" {
primary.Contact.URL = m.Contact.URL
}
if primary.Contact.Email == "" {
primary.Contact.Email = m.Contact.Email
}
}
if primary.License == nil {
primary.License = m.License
} else if m.License != nil {
var lsk []string
primary.License.Extensions, lsk = mergeExtensions(primary.License.Extensions, m.License.Extensions)
skipped = append(skipped, lsk...)
if primary.License.Name == "" {
primary.License.Name = m.License.Name
}
if primary.License.URL == "" {
primary.License.URL = m.License.URL
}
}
return skipped
}
func mergeExtensions(primary spec.Extensions, m spec.Extensions) (result spec.Extensions, skipped []string) {
if primary == nil {
result = m
return
}
if m == nil {
result = primary
return
}
result = primary
for k, v := range m {
if _, found := primary[k]; found {
skipped = append(skipped, k)
continue
}
primary[k] = v
}
return
}
func initPrimary(primary *spec.Swagger) {
if primary.SecurityDefinitions == nil {
primary.SecurityDefinitions = make(map[string]*spec.SecurityScheme)
}
if primary.Security == nil {
primary.Security = make([]map[string][]string, 0, 10)
}
if primary.Produces == nil {
primary.Produces = make([]string, 0, 10)
}
if primary.Consumes == nil {
primary.Consumes = make([]string, 0, 10)
}
if primary.Tags == nil {
primary.Tags = make([]spec.Tag, 0, 10)
}
if primary.Schemes == nil {
primary.Schemes = make([]string, 0, 10)
}
if primary.Paths == nil {
primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
}
if primary.Paths.Paths == nil {
primary.Paths.Paths = make(map[string]spec.PathItem)
}
if primary.Definitions == nil {
primary.Definitions = make(spec.Definitions)
}
if primary.Parameters == nil {
primary.Parameters = make(map[string]spec.Parameter)
}
if primary.Responses == nil {
primary.Responses = make(map[string]spec.Response)
}
}

256
vendor/github.com/go-openapi/analysis/schema.go generated vendored Normal file
View File

@ -0,0 +1,256 @@
package analysis
import (
"fmt"
"github.com/go-openapi/spec"
"github.com/go-openapi/strfmt"
)
// SchemaOpts configures the schema analyzer
type SchemaOpts struct {
Schema *spec.Schema
Root interface{}
BasePath string
_ struct{}
}
// Schema analysis, will classify the schema according to known
// patterns.
func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
if opts.Schema == nil {
return nil, fmt.Errorf("no schema to analyze")
}
a := &AnalyzedSchema{
schema: opts.Schema,
root: opts.Root,
basePath: opts.BasePath,
}
a.initializeFlags()
a.inferKnownType()
a.inferEnum()
a.inferBaseType()
if err := a.inferMap(); err != nil {
return nil, err
}
if err := a.inferArray(); err != nil {
return nil, err
}
a.inferTuple()
if err := a.inferFromRef(); err != nil {
return nil, err
}
a.inferSimpleSchema()
return a, nil
}
// AnalyzedSchema indicates what the schema represents
type AnalyzedSchema struct {
schema *spec.Schema
root interface{}
basePath string
hasProps bool
hasAllOf bool
hasItems bool
hasAdditionalProps bool
hasAdditionalItems bool
hasRef bool
IsKnownType bool
IsSimpleSchema bool
IsArray bool
IsSimpleArray bool
IsMap bool
IsSimpleMap bool
IsExtendedObject bool
IsTuple bool
IsTupleWithExtra bool
IsBaseType bool
IsEnum bool
}
// Inherits copies value fields from other onto this schema
func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
if other == nil {
return
}
a.hasProps = other.hasProps
a.hasAllOf = other.hasAllOf
a.hasItems = other.hasItems
a.hasAdditionalItems = other.hasAdditionalItems
a.hasAdditionalProps = other.hasAdditionalProps
a.hasRef = other.hasRef
a.IsKnownType = other.IsKnownType
a.IsSimpleSchema = other.IsSimpleSchema
a.IsArray = other.IsArray
a.IsSimpleArray = other.IsSimpleArray
a.IsMap = other.IsMap
a.IsSimpleMap = other.IsSimpleMap
a.IsExtendedObject = other.IsExtendedObject
a.IsTuple = other.IsTuple
a.IsTupleWithExtra = other.IsTupleWithExtra
a.IsBaseType = other.IsBaseType
a.IsEnum = other.IsEnum
}
func (a *AnalyzedSchema) inferFromRef() error {
if a.hasRef {
sch := new(spec.Schema)
sch.Ref = a.schema.Ref
err := spec.ExpandSchema(sch, a.root, nil)
if err != nil {
return err
}
rsch, err := Schema(SchemaOpts{
Schema: sch,
Root: a.root,
BasePath: a.basePath,
})
if err != nil {
// NOTE(fredbi): currently the only cause for errors is
// unresolved ref. Since spec.ExpandSchema() expands the
// schema recursively, there is no chance to get there,
// until we add more causes for error in this schema analysis.
return err
}
a.inherits(rsch)
}
return nil
}
func (a *AnalyzedSchema) inferSimpleSchema() {
a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
}
func (a *AnalyzedSchema) inferKnownType() {
tpe := a.schema.Type
format := a.schema.Format
a.IsKnownType = tpe.Contains("boolean") ||
tpe.Contains("integer") ||
tpe.Contains("number") ||
tpe.Contains("string") ||
(format != "" && strfmt.Default.ContainsName(format)) ||
(a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
}
func (a *AnalyzedSchema) inferMap() error {
if !a.isObjectType() {
return nil
}
hasExtra := a.hasProps || a.hasAllOf
a.IsMap = a.hasAdditionalProps && !hasExtra
a.IsExtendedObject = a.hasAdditionalProps && hasExtra
if !a.IsMap {
return nil
}
// maps
if a.schema.AdditionalProperties.Schema != nil {
msch, err := Schema(SchemaOpts{
Schema: a.schema.AdditionalProperties.Schema,
Root: a.root,
BasePath: a.basePath,
})
if err != nil {
return err
}
a.IsSimpleMap = msch.IsSimpleSchema
} else if a.schema.AdditionalProperties.Allows {
a.IsSimpleMap = true
}
return nil
}
func (a *AnalyzedSchema) inferArray() error {
// an array has Items defined as an object schema, otherwise we qualify this JSON array as a tuple
// (yes, even if the Items array contains only one element).
// arrays in JSON schema may be unrestricted (i.e no Items specified).
// Note that arrays in Swagger MUST have Items. Nonetheless, we analyze unrestricted arrays.
//
// NOTE: the spec package misses the distinction between:
// items: [] and items: {}, so we consider both arrays here.
a.IsArray = a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Schemas == nil)
if a.IsArray && a.hasItems {
if a.schema.Items.Schema != nil {
itsch, err := Schema(SchemaOpts{
Schema: a.schema.Items.Schema,
Root: a.root,
BasePath: a.basePath,
})
if err != nil {
return err
}
a.IsSimpleArray = itsch.IsSimpleSchema
}
}
if a.IsArray && !a.hasItems {
a.IsSimpleArray = true
}
return nil
}
func (a *AnalyzedSchema) inferTuple() {
tuple := a.hasItems && a.schema.Items.Schemas != nil
a.IsTuple = tuple && !a.hasAdditionalItems
a.IsTupleWithExtra = tuple && a.hasAdditionalItems
}
func (a *AnalyzedSchema) inferBaseType() {
if a.isObjectType() {
a.IsBaseType = a.schema.Discriminator != ""
}
}
func (a *AnalyzedSchema) inferEnum() {
a.IsEnum = len(a.schema.Enum) > 0
}
func (a *AnalyzedSchema) initializeFlags() {
a.hasProps = len(a.schema.Properties) > 0
a.hasAllOf = len(a.schema.AllOf) > 0
a.hasRef = a.schema.Ref.String() != ""
a.hasItems = a.schema.Items != nil &&
(a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
(a.schema.AdditionalProperties.Schema != nil || a.schema.AdditionalProperties.Allows)
a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
(a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
}
func (a *AnalyzedSchema) isObjectType() bool {
return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
}
func (a *AnalyzedSchema) isArrayType() bool {
return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
}
// isAnalyzedAsComplex determines if an analyzed schema is eligible to flattening (i.e. it is "complex").
//
// Complex means the schema is any of:
// - a simple type (primitive)
// - an array of something (items are possibly complex ; if this is the case, items will generate a definition)
// - a map of something (additionalProperties are possibly complex ; if this is the case, additionalProperties will
// generate a definition)
func (a *AnalyzedSchema) isAnalyzedAsComplex() bool {
return !a.IsSimpleSchema && !a.IsArray && !a.IsMap
}

1
vendor/github.com/go-openapi/errors/.gitattributes generated vendored Normal file
View File

@ -0,0 +1 @@
*.go text eol=lf

2
vendor/github.com/go-openapi/errors/.gitignore generated vendored Normal file
View File

@ -0,0 +1,2 @@
secrets.yml
coverage.out

48
vendor/github.com/go-openapi/errors/.golangci.yml generated vendored Normal file
View File

@ -0,0 +1,48 @@
linters-settings:
govet:
check-shadowing: true
golint:
min-confidence: 0
gocyclo:
min-complexity: 30
maligned:
suggest-new: true
dupl:
threshold: 100
goconst:
min-len: 2
min-occurrences: 4
linters:
enable-all: true
disable:
- maligned
- lll
- gochecknoglobals
- godox
- gocognit
- whitespace
- wsl
- funlen
- gochecknoglobals
- gochecknoinits
- scopelint
- wrapcheck
- exhaustivestruct
- exhaustive
- nlreturn
- testpackage
- gci
- gofumpt
- goerr113
- gomnd
- tparallel
- nestif
- godot
- errorlint
- paralleltest
- tparallel
- cyclop
- errname
- varnamelen
- exhaustruct
- maintidx

74
vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/errors/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

11
vendor/github.com/go-openapi/errors/README.md generated vendored Normal file
View File

@ -0,0 +1,11 @@
# OpenAPI errors
[![Build Status](https://travis-ci.org/go-openapi/errors.svg?branch=master)](https://travis-ci.org/go-openapi/errors)
[![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors)
[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE)
[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/errors.svg)](https://pkg.go.dev/github.com/go-openapi/errors)
[![GolangCI](https://golangci.com/badges/github.com/go-openapi/errors.svg)](https://golangci.com)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/errors)](https://goreportcard.com/report/github.com/go-openapi/errors)
Shared errors and error interface used throughout the various libraries found in the go-openapi toolkit.

182
vendor/github.com/go-openapi/errors/api.go generated vendored Normal file
View File

@ -0,0 +1,182 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import (
"encoding/json"
"fmt"
"net/http"
"reflect"
"strings"
)
// DefaultHTTPCode is used when the error Code cannot be used as an HTTP code.
var DefaultHTTPCode = http.StatusUnprocessableEntity
// Error represents a error interface all swagger framework errors implement
type Error interface {
error
Code() int32
}
type apiError struct {
code int32
message string
}
func (a *apiError) Error() string {
return a.message
}
func (a *apiError) Code() int32 {
return a.code
}
// MarshalJSON implements the JSON encoding interface
func (a apiError) MarshalJSON() ([]byte, error) {
return json.Marshal(map[string]interface{}{
"code": a.code,
"message": a.message,
})
}
// New creates a new API error with a code and a message
func New(code int32, message string, args ...interface{}) Error {
if len(args) > 0 {
return &apiError{code, fmt.Sprintf(message, args...)}
}
return &apiError{code, message}
}
// NotFound creates a new not found error
func NotFound(message string, args ...interface{}) Error {
if message == "" {
message = "Not found"
}
return New(http.StatusNotFound, fmt.Sprintf(message, args...))
}
// NotImplemented creates a new not implemented error
func NotImplemented(message string) Error {
return New(http.StatusNotImplemented, message)
}
// MethodNotAllowedError represents an error for when the path matches but the method doesn't
type MethodNotAllowedError struct {
code int32
Allowed []string
message string
}
func (m *MethodNotAllowedError) Error() string {
return m.message
}
// Code the error code
func (m *MethodNotAllowedError) Code() int32 {
return m.code
}
// MarshalJSON implements the JSON encoding interface
func (m MethodNotAllowedError) MarshalJSON() ([]byte, error) {
return json.Marshal(map[string]interface{}{
"code": m.code,
"message": m.message,
"allowed": m.Allowed,
})
}
func errorAsJSON(err Error) []byte {
//nolint:errchkjson
b, _ := json.Marshal(struct {
Code int32 `json:"code"`
Message string `json:"message"`
}{err.Code(), err.Error()})
return b
}
func flattenComposite(errs *CompositeError) *CompositeError {
var res []error
for _, er := range errs.Errors {
switch e := er.(type) {
case *CompositeError:
if e != nil && len(e.Errors) > 0 {
flat := flattenComposite(e)
if len(flat.Errors) > 0 {
res = append(res, flat.Errors...)
}
}
default:
if e != nil {
res = append(res, e)
}
}
}
return CompositeValidationError(res...)
}
// MethodNotAllowed creates a new method not allowed error
func MethodNotAllowed(requested string, allow []string) Error {
msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg}
}
// ServeError the error handler interface implementation
func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
rw.Header().Set("Content-Type", "application/json")
switch e := err.(type) {
case *CompositeError:
er := flattenComposite(e)
// strips composite errors to first element only
if len(er.Errors) > 0 {
ServeError(rw, r, er.Errors[0])
} else {
// guard against empty CompositeError (invalid construct)
ServeError(rw, r, nil)
}
case *MethodNotAllowedError:
rw.Header().Add("Allow", strings.Join(e.Allowed, ","))
rw.WriteHeader(asHTTPCode(int(e.Code())))
if r == nil || r.Method != http.MethodHead {
_, _ = rw.Write(errorAsJSON(e))
}
case Error:
value := reflect.ValueOf(e)
if value.Kind() == reflect.Ptr && value.IsNil() {
rw.WriteHeader(http.StatusInternalServerError)
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
return
}
rw.WriteHeader(asHTTPCode(int(e.Code())))
if r == nil || r.Method != http.MethodHead {
_, _ = rw.Write(errorAsJSON(e))
}
case nil:
rw.WriteHeader(http.StatusInternalServerError)
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
default:
rw.WriteHeader(http.StatusInternalServerError)
if r == nil || r.Method != http.MethodHead {
_, _ = rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
}
}
}
func asHTTPCode(input int) int {
if input >= 600 {
return DefaultHTTPCode
}
return input
}

22
vendor/github.com/go-openapi/errors/auth.go generated vendored Normal file
View File

@ -0,0 +1,22 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import "net/http"
// Unauthenticated returns an unauthenticated error
func Unauthenticated(scheme string) Error {
return New(http.StatusUnauthorized, "unauthenticated for %s", scheme)
}

26
vendor/github.com/go-openapi/errors/doc.go generated vendored Normal file
View File

@ -0,0 +1,26 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package errors provides an Error interface and several concrete types
implementing this interface to manage API errors and JSON-schema validation
errors.
A middleware handler ServeError() is provided to serve the errors types
it defines.
It is used throughout the various go-openapi toolkit libraries
(https://github.com/go-openapi).
*/
package errors

103
vendor/github.com/go-openapi/errors/headers.go generated vendored Normal file
View File

@ -0,0 +1,103 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import (
"encoding/json"
"fmt"
"net/http"
)
// Validation represents a failure of a precondition
type Validation struct {
code int32
Name string
In string
Value interface{}
message string
Values []interface{}
}
func (e *Validation) Error() string {
return e.message
}
// Code the error code
func (e *Validation) Code() int32 {
return e.code
}
// MarshalJSON implements the JSON encoding interface
func (e Validation) MarshalJSON() ([]byte, error) {
return json.Marshal(map[string]interface{}{
"code": e.code,
"message": e.message,
"in": e.In,
"name": e.Name,
"value": e.Value,
"values": e.Values,
})
}
// ValidateName sets the name for a validation or updates it for a nested property
func (e *Validation) ValidateName(name string) *Validation {
if name != "" {
if e.Name == "" {
e.Name = name
e.message = name + e.message
} else {
e.Name = name + "." + e.Name
e.message = name + "." + e.message
}
}
return e
}
const (
contentTypeFail = `unsupported media type %q, only %v are allowed`
responseFormatFail = `unsupported media type requested, only %v are available`
)
// InvalidContentType error for an invalid content type
func InvalidContentType(value string, allowed []string) *Validation {
values := make([]interface{}, 0, len(allowed))
for _, v := range allowed {
values = append(values, v)
}
return &Validation{
code: http.StatusUnsupportedMediaType,
Name: "Content-Type",
In: "header",
Value: value,
Values: values,
message: fmt.Sprintf(contentTypeFail, value, allowed),
}
}
// InvalidResponseFormat error for an unacceptable response format request
func InvalidResponseFormat(value string, allowed []string) *Validation {
values := make([]interface{}, 0, len(allowed))
for _, v := range allowed {
values = append(values, v)
}
return &Validation{
code: http.StatusNotAcceptable,
Name: "Accept",
In: "header",
Value: value,
Values: values,
message: fmt.Sprintf(responseFormatFail, allowed),
}
}

50
vendor/github.com/go-openapi/errors/middleware.go generated vendored Normal file
View File

@ -0,0 +1,50 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import (
"bytes"
"fmt"
"strings"
)
// APIVerificationFailed is an error that contains all the missing info for a mismatched section
// between the api registrations and the api spec
type APIVerificationFailed struct {
Section string `json:"section,omitempty"`
MissingSpecification []string `json:"missingSpecification,omitempty"`
MissingRegistration []string `json:"missingRegistration,omitempty"`
}
func (v *APIVerificationFailed) Error() string {
buf := bytes.NewBuffer(nil)
hasRegMissing := len(v.MissingRegistration) > 0
hasSpecMissing := len(v.MissingSpecification) > 0
if hasRegMissing {
buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
}
if hasRegMissing && hasSpecMissing {
buf.WriteString("\n")
}
if hasSpecMissing {
buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
}
return buf.String()
}

78
vendor/github.com/go-openapi/errors/parsing.go generated vendored Normal file
View File

@ -0,0 +1,78 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import (
"encoding/json"
"fmt"
)
// ParseError represents a parsing error
type ParseError struct {
code int32
Name string
In string
Value string
Reason error
message string
}
func (e *ParseError) Error() string {
return e.message
}
// Code returns the http status code for this error
func (e *ParseError) Code() int32 {
return e.code
}
// MarshalJSON implements the JSON encoding interface
func (e ParseError) MarshalJSON() ([]byte, error) {
var reason string
if e.Reason != nil {
reason = e.Reason.Error()
}
return json.Marshal(map[string]interface{}{
"code": e.code,
"message": e.message,
"in": e.In,
"name": e.Name,
"value": e.Value,
"reason": reason,
})
}
const (
parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
)
// NewParseError creates a new parse error
func NewParseError(name, in, value string, reason error) *ParseError {
var msg string
if in == "" {
msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
} else {
msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
}
return &ParseError{
code: 400,
Name: name,
In: in,
Value: value,
Reason: reason,
message: msg,
}
}

611
vendor/github.com/go-openapi/errors/schema.go generated vendored Normal file
View File

@ -0,0 +1,611 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package errors
import (
"encoding/json"
"fmt"
"strings"
)
const (
invalidType = "%s is an invalid type name"
typeFail = "%s in %s must be of type %s"
typeFailWithData = "%s in %s must be of type %s: %q"
typeFailWithError = "%s in %s must be of type %s, because: %s"
requiredFail = "%s in %s is required"
readOnlyFail = "%s in %s is readOnly"
tooLongMessage = "%s in %s should be at most %d chars long"
tooShortMessage = "%s in %s should be at least %d chars long"
patternFail = "%s in %s should match '%s'"
enumFail = "%s in %s should be one of %v"
multipleOfFail = "%s in %s should be a multiple of %v"
maxIncFail = "%s in %s should be less than or equal to %v"
maxExcFail = "%s in %s should be less than %v"
minIncFail = "%s in %s should be greater than or equal to %v"
minExcFail = "%s in %s should be greater than %v"
uniqueFail = "%s in %s shouldn't contain duplicates"
maxItemsFail = "%s in %s should have at most %d items"
minItemsFail = "%s in %s should have at least %d items"
typeFailNoIn = "%s must be of type %s"
typeFailWithDataNoIn = "%s must be of type %s: %q"
typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
requiredFailNoIn = "%s is required"
readOnlyFailNoIn = "%s is readOnly"
tooLongMessageNoIn = "%s should be at most %d chars long"
tooShortMessageNoIn = "%s should be at least %d chars long"
patternFailNoIn = "%s should match '%s'"
enumFailNoIn = "%s should be one of %v"
multipleOfFailNoIn = "%s should be a multiple of %v"
maxIncFailNoIn = "%s should be less than or equal to %v"
maxExcFailNoIn = "%s should be less than %v"
minIncFailNoIn = "%s should be greater than or equal to %v"
minExcFailNoIn = "%s should be greater than %v"
uniqueFailNoIn = "%s shouldn't contain duplicates"
maxItemsFailNoIn = "%s should have at most %d items"
minItemsFailNoIn = "%s should have at least %d items"
noAdditionalItems = "%s in %s can't have additional items"
noAdditionalItemsNoIn = "%s can't have additional items"
tooFewProperties = "%s in %s should have at least %d properties"
tooFewPropertiesNoIn = "%s should have at least %d properties"
tooManyProperties = "%s in %s should have at most %d properties"
tooManyPropertiesNoIn = "%s should have at most %d properties"
unallowedProperty = "%s.%s in %s is a forbidden property"
unallowedPropertyNoIn = "%s.%s is a forbidden property"
failedAllPatternProps = "%s.%s in %s failed all pattern properties"
failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
multipleOfMustBePositive = "factor MultipleOf declared for %s must be positive: %v"
)
// All code responses can be used to differentiate errors for different handling
// by the consuming program
const (
// CompositeErrorCode remains 422 for backwards-compatibility
// and to separate it from validation errors with cause
CompositeErrorCode = 422
// InvalidTypeCode is used for any subclass of invalid types
InvalidTypeCode = 600 + iota
RequiredFailCode
TooLongFailCode
TooShortFailCode
PatternFailCode
EnumFailCode
MultipleOfFailCode
MaxFailCode
MinFailCode
UniqueFailCode
MaxItemsFailCode
MinItemsFailCode
NoAdditionalItemsCode
TooFewPropertiesCode
TooManyPropertiesCode
UnallowedPropertyCode
FailedAllPatternPropsCode
MultipleOfMustBePositiveCode
ReadOnlyFailCode
)
// CompositeError is an error that groups several errors together
type CompositeError struct {
Errors []error
code int32
message string
}
// Code for this error
func (c *CompositeError) Code() int32 {
return c.code
}
func (c *CompositeError) Error() string {
if len(c.Errors) > 0 {
msgs := []string{c.message + ":"}
for _, e := range c.Errors {
msgs = append(msgs, e.Error())
}
return strings.Join(msgs, "\n")
}
return c.message
}
// MarshalJSON implements the JSON encoding interface
func (c CompositeError) MarshalJSON() ([]byte, error) {
return json.Marshal(map[string]interface{}{
"code": c.code,
"message": c.message,
"errors": c.Errors,
})
}
// CompositeValidationError an error to wrap a bunch of other errors
func CompositeValidationError(errors ...error) *CompositeError {
return &CompositeError{
code: CompositeErrorCode,
Errors: append([]error{}, errors...),
message: "validation failure list",
}
}
// ValidateName recursively sets the name for all validations or updates them for nested properties
func (c *CompositeError) ValidateName(name string) *CompositeError {
for i, e := range c.Errors {
if ve, ok := e.(*Validation); ok {
c.Errors[i] = ve.ValidateName(name)
} else if ce, ok := e.(*CompositeError); ok {
c.Errors[i] = ce.ValidateName(name)
}
}
return c
}
// FailedAllPatternProperties an error for when the property doesn't match a pattern
func FailedAllPatternProperties(name, in, key string) *Validation {
msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
if in == "" {
msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
}
return &Validation{
code: FailedAllPatternPropsCode,
Name: name,
In: in,
Value: key,
message: msg,
}
}
// PropertyNotAllowed an error for when the property doesn't match a pattern
func PropertyNotAllowed(name, in, key string) *Validation {
msg := fmt.Sprintf(unallowedProperty, name, key, in)
if in == "" {
msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
}
return &Validation{
code: UnallowedPropertyCode,
Name: name,
In: in,
Value: key,
message: msg,
}
}
// TooFewProperties an error for an object with too few properties
func TooFewProperties(name, in string, n int64) *Validation {
msg := fmt.Sprintf(tooFewProperties, name, in, n)
if in == "" {
msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
}
return &Validation{
code: TooFewPropertiesCode,
Name: name,
In: in,
Value: n,
message: msg,
}
}
// TooManyProperties an error for an object with too many properties
func TooManyProperties(name, in string, n int64) *Validation {
msg := fmt.Sprintf(tooManyProperties, name, in, n)
if in == "" {
msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
}
return &Validation{
code: TooManyPropertiesCode,
Name: name,
In: in,
Value: n,
message: msg,
}
}
// AdditionalItemsNotAllowed an error for invalid additional items
func AdditionalItemsNotAllowed(name, in string) *Validation {
msg := fmt.Sprintf(noAdditionalItems, name, in)
if in == "" {
msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
}
return &Validation{
code: NoAdditionalItemsCode,
Name: name,
In: in,
message: msg,
}
}
// InvalidCollectionFormat another flavor of invalid type error
func InvalidCollectionFormat(name, in, format string) *Validation {
return &Validation{
code: InvalidTypeCode,
Name: name,
In: in,
Value: format,
message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
}
}
// InvalidTypeName an error for when the type is invalid
func InvalidTypeName(typeName string) *Validation {
return &Validation{
code: InvalidTypeCode,
Value: typeName,
message: fmt.Sprintf(invalidType, typeName),
}
}
// InvalidType creates an error for when the type is invalid
func InvalidType(name, in, typeName string, value interface{}) *Validation {
var message string
if in != "" {
switch value.(type) {
case string:
message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
case error:
message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
default:
message = fmt.Sprintf(typeFail, name, in, typeName)
}
} else {
switch value.(type) {
case string:
message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
case error:
message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
default:
message = fmt.Sprintf(typeFailNoIn, name, typeName)
}
}
return &Validation{
code: InvalidTypeCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// DuplicateItems error for when an array contains duplicates
func DuplicateItems(name, in string) *Validation {
msg := fmt.Sprintf(uniqueFail, name, in)
if in == "" {
msg = fmt.Sprintf(uniqueFailNoIn, name)
}
return &Validation{
code: UniqueFailCode,
Name: name,
In: in,
message: msg,
}
}
// TooManyItems error for when an array contains too many items
func TooManyItems(name, in string, max int64, value interface{}) *Validation {
msg := fmt.Sprintf(maxItemsFail, name, in, max)
if in == "" {
msg = fmt.Sprintf(maxItemsFailNoIn, name, max)
}
return &Validation{
code: MaxItemsFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// TooFewItems error for when an array contains too few items
func TooFewItems(name, in string, min int64, value interface{}) *Validation {
msg := fmt.Sprintf(minItemsFail, name, in, min)
if in == "" {
msg = fmt.Sprintf(minItemsFailNoIn, name, min)
}
return &Validation{
code: MinItemsFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// ExceedsMaximumInt error for when maximum validation fails
func ExceedsMaximumInt(name, in string, max int64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := maxIncFailNoIn
if exclusive {
m = maxExcFailNoIn
}
message = fmt.Sprintf(m, name, max)
} else {
m := maxIncFail
if exclusive {
m = maxExcFail
}
message = fmt.Sprintf(m, name, in, max)
}
return &Validation{
code: MaxFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// ExceedsMaximumUint error for when maximum validation fails
func ExceedsMaximumUint(name, in string, max uint64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := maxIncFailNoIn
if exclusive {
m = maxExcFailNoIn
}
message = fmt.Sprintf(m, name, max)
} else {
m := maxIncFail
if exclusive {
m = maxExcFail
}
message = fmt.Sprintf(m, name, in, max)
}
return &Validation{
code: MaxFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// ExceedsMaximum error for when maximum validation fails
func ExceedsMaximum(name, in string, max float64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := maxIncFailNoIn
if exclusive {
m = maxExcFailNoIn
}
message = fmt.Sprintf(m, name, max)
} else {
m := maxIncFail
if exclusive {
m = maxExcFail
}
message = fmt.Sprintf(m, name, in, max)
}
return &Validation{
code: MaxFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// ExceedsMinimumInt error for when minimum validation fails
func ExceedsMinimumInt(name, in string, min int64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := minIncFailNoIn
if exclusive {
m = minExcFailNoIn
}
message = fmt.Sprintf(m, name, min)
} else {
m := minIncFail
if exclusive {
m = minExcFail
}
message = fmt.Sprintf(m, name, in, min)
}
return &Validation{
code: MinFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// ExceedsMinimumUint error for when minimum validation fails
func ExceedsMinimumUint(name, in string, min uint64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := minIncFailNoIn
if exclusive {
m = minExcFailNoIn
}
message = fmt.Sprintf(m, name, min)
} else {
m := minIncFail
if exclusive {
m = minExcFail
}
message = fmt.Sprintf(m, name, in, min)
}
return &Validation{
code: MinFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// ExceedsMinimum error for when minimum validation fails
func ExceedsMinimum(name, in string, min float64, exclusive bool, value interface{}) *Validation {
var message string
if in == "" {
m := minIncFailNoIn
if exclusive {
m = minExcFailNoIn
}
message = fmt.Sprintf(m, name, min)
} else {
m := minIncFail
if exclusive {
m = minExcFail
}
message = fmt.Sprintf(m, name, in, min)
}
return &Validation{
code: MinFailCode,
Name: name,
In: in,
Value: value,
message: message,
}
}
// NotMultipleOf error for when multiple of validation fails
func NotMultipleOf(name, in string, multiple, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
} else {
msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
}
return &Validation{
code: MultipleOfFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// EnumFail error for when an enum validation fails
func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(enumFailNoIn, name, values)
} else {
msg = fmt.Sprintf(enumFail, name, in, values)
}
return &Validation{
code: EnumFailCode,
Name: name,
In: in,
Value: value,
Values: values,
message: msg,
}
}
// Required error for when a value is missing
func Required(name, in string, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(requiredFailNoIn, name)
} else {
msg = fmt.Sprintf(requiredFail, name, in)
}
return &Validation{
code: RequiredFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// ReadOnly error for when a value is present in request
func ReadOnly(name, in string, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(readOnlyFailNoIn, name)
} else {
msg = fmt.Sprintf(readOnlyFail, name, in)
}
return &Validation{
code: ReadOnlyFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// TooLong error for when a string is too long
func TooLong(name, in string, max int64, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(tooLongMessageNoIn, name, max)
} else {
msg = fmt.Sprintf(tooLongMessage, name, in, max)
}
return &Validation{
code: TooLongFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// TooShort error for when a string is too short
func TooShort(name, in string, min int64, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(tooShortMessageNoIn, name, min)
} else {
msg = fmt.Sprintf(tooShortMessage, name, in, min)
}
return &Validation{
code: TooShortFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// FailedPattern error for when a string fails a regex pattern match
// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
func FailedPattern(name, in, pattern string, value interface{}) *Validation {
var msg string
if in == "" {
msg = fmt.Sprintf(patternFailNoIn, name, pattern)
} else {
msg = fmt.Sprintf(patternFail, name, in, pattern)
}
return &Validation{
code: PatternFailCode,
Name: name,
In: in,
Value: value,
message: msg,
}
}
// MultipleOfMustBePositive error for when a
// multipleOf factor is negative
func MultipleOfMustBePositive(name, in string, factor interface{}) *Validation {
return &Validation{
code: MultipleOfMustBePositiveCode,
Name: name,
In: in,
Value: factor,
message: fmt.Sprintf(multipleOfMustBePositive, name, factor),
}
}

1
vendor/github.com/go-openapi/inflect/.hgignore generated vendored Normal file
View File

@ -0,0 +1 @@
swp$

7
vendor/github.com/go-openapi/inflect/LICENCE generated vendored Normal file
View File

@ -0,0 +1,7 @@
Copyright (c) 2011 Chris Farmiloe
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

168
vendor/github.com/go-openapi/inflect/README generated vendored Normal file
View File

@ -0,0 +1,168 @@
INSTALLATION
go get bitbucket.org/pkg/inflect
PACKAGE
package inflect
FUNCTIONS
func AddAcronym(word string)
func AddHuman(suffix, replacement string)
func AddIrregular(singular, plural string)
func AddPlural(suffix, replacement string)
func AddSingular(suffix, replacement string)
func AddUncountable(word string)
func Asciify(word string) string
func Camelize(word string) string
func CamelizeDownFirst(word string) string
func Capitalize(word string) string
func Dasherize(word string) string
func ForeignKey(word string) string
func ForeignKeyCondensed(word string) string
func Humanize(word string) string
func Ordinalize(word string) string
func Parameterize(word string) string
func ParameterizeJoin(word, sep string) string
func Pluralize(word string) string
func Singularize(word string) string
func Tableize(word string) string
func Titleize(word string) string
func Typeify(word string) string
func Uncountables() map[string]bool
func Underscore(word string) string
TYPES
type Rule struct {
// contains filtered or unexported fields
}
used by rulesets
type Ruleset struct {
// contains filtered or unexported fields
}
a Ruleset is the config of pluralization rules
you can extend the rules with the Add* methods
func NewDefaultRuleset() *Ruleset
create a new ruleset and load it with the default
set of common English pluralization rules
func NewRuleset() *Ruleset
create a blank ruleset. Unless you are going to
build your own rules from scratch you probably
won't need this and can just use the defaultRuleset
via the global inflect.* methods
func (rs *Ruleset) AddAcronym(word string)
if you use acronym you may need to add them to the ruleset
to prevent Underscored words of things like "HTML" coming out
as "h_t_m_l"
func (rs *Ruleset) AddHuman(suffix, replacement string)
Human rules are applied by humanize to show more friendly
versions of words
func (rs *Ruleset) AddIrregular(singular, plural string)
Add any inconsistant pluralizing/sinularizing rules
to the set here.
func (rs *Ruleset) AddPlural(suffix, replacement string)
add a pluralization rule
func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool)
add a pluralization rule with full string match
func (rs *Ruleset) AddSingular(suffix, replacement string)
add a singular rule
func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool)
same as AddSingular but you can set `exact` to force
a full string match
func (rs *Ruleset) AddUncountable(word string)
add a word to this ruleset that has the same singular and plural form
for example: "rice"
func (rs *Ruleset) Asciify(word string) string
transforms latin characters like é -> e
func (rs *Ruleset) Camelize(word string) string
"dino_party" -> "DinoParty"
func (rs *Ruleset) CamelizeDownFirst(word string) string
same as Camelcase but with first letter downcased
func (rs *Ruleset) Capitalize(word string) string
uppercase first character
func (rs *Ruleset) Dasherize(word string) string
"SomeText" -> "some-text"
func (rs *Ruleset) ForeignKey(word string) string
an underscored foreign key name "Person" -> "person_id"
func (rs *Ruleset) ForeignKeyCondensed(word string) string
a foreign key (with an underscore) "Person" -> "personid"
func (rs *Ruleset) Humanize(word string) string
First letter of sentance captitilized
Uses custom friendly replacements via AddHuman()
func (rs *Ruleset) Ordinalize(str string) string
"1031" -> "1031st"
func (rs *Ruleset) Parameterize(word string) string
param safe dasherized names like "my-param"
func (rs *Ruleset) ParameterizeJoin(word, sep string) string
param safe dasherized names with custom seperator
func (rs *Ruleset) Pluralize(word string) string
returns the plural form of a singular word
func (rs *Ruleset) Singularize(word string) string
returns the singular form of a plural word
func (rs *Ruleset) Tableize(word string) string
Rails style pluralized table names: "SuperPerson" -> "super_people"
func (rs *Ruleset) Titleize(word string) string
Captitilize every word in sentance "hello there" -> "Hello There"
func (rs *Ruleset) Typeify(word string) string
"something_like_this" -> "SomethingLikeThis"
func (rs *Ruleset) Uncountables() map[string]bool
func (rs *Ruleset) Underscore(word string) string
lowercase underscore version "BigBen" -> "big_ben"

713
vendor/github.com/go-openapi/inflect/inflect.go generated vendored Normal file
View File

@ -0,0 +1,713 @@
package inflect
import (
"fmt"
"regexp"
"strconv"
"strings"
"unicode"
"unicode/utf8"
)
// used by rulesets
type Rule struct {
suffix string
replacement string
exact bool
}
// a Ruleset is the config of pluralization rules
// you can extend the rules with the Add* methods
type Ruleset struct {
uncountables map[string]bool
plurals []*Rule
singulars []*Rule
humans []*Rule
acronyms []*Rule
acronymMatcher *regexp.Regexp
}
// create a blank ruleset. Unless you are going to
// build your own rules from scratch you probably
// won't need this and can just use the defaultRuleset
// via the global inflect.* methods
func NewRuleset() *Ruleset {
rs := new(Ruleset)
rs.uncountables = make(map[string]bool)
rs.plurals = make([]*Rule, 0)
rs.singulars = make([]*Rule, 0)
rs.humans = make([]*Rule, 0)
rs.acronyms = make([]*Rule, 0)
return rs
}
// create a new ruleset and load it with the default
// set of common English pluralization rules
func NewDefaultRuleset() *Ruleset {
rs := NewRuleset()
rs.AddPlural("s", "s")
rs.AddPlural("testis", "testes")
rs.AddPlural("axis", "axes")
rs.AddPlural("octopus", "octopi")
rs.AddPlural("virus", "viri")
rs.AddPlural("octopi", "octopi")
rs.AddPlural("viri", "viri")
rs.AddPlural("alias", "aliases")
rs.AddPlural("status", "statuses")
rs.AddPlural("bus", "buses")
rs.AddPlural("buffalo", "buffaloes")
rs.AddPlural("tomato", "tomatoes")
rs.AddPlural("tum", "ta")
rs.AddPlural("ium", "ia")
rs.AddPlural("ta", "ta")
rs.AddPlural("ia", "ia")
rs.AddPlural("sis", "ses")
rs.AddPlural("lf", "lves")
rs.AddPlural("rf", "rves")
rs.AddPlural("afe", "aves")
rs.AddPlural("bfe", "bves")
rs.AddPlural("cfe", "cves")
rs.AddPlural("dfe", "dves")
rs.AddPlural("efe", "eves")
rs.AddPlural("gfe", "gves")
rs.AddPlural("hfe", "hves")
rs.AddPlural("ife", "ives")
rs.AddPlural("jfe", "jves")
rs.AddPlural("kfe", "kves")
rs.AddPlural("lfe", "lves")
rs.AddPlural("mfe", "mves")
rs.AddPlural("nfe", "nves")
rs.AddPlural("ofe", "oves")
rs.AddPlural("pfe", "pves")
rs.AddPlural("qfe", "qves")
rs.AddPlural("rfe", "rves")
rs.AddPlural("sfe", "sves")
rs.AddPlural("tfe", "tves")
rs.AddPlural("ufe", "uves")
rs.AddPlural("vfe", "vves")
rs.AddPlural("wfe", "wves")
rs.AddPlural("xfe", "xves")
rs.AddPlural("yfe", "yves")
rs.AddPlural("zfe", "zves")
rs.AddPlural("hive", "hives")
rs.AddPlural("quy", "quies")
rs.AddPlural("by", "bies")
rs.AddPlural("cy", "cies")
rs.AddPlural("dy", "dies")
rs.AddPlural("fy", "fies")
rs.AddPlural("gy", "gies")
rs.AddPlural("hy", "hies")
rs.AddPlural("jy", "jies")
rs.AddPlural("ky", "kies")
rs.AddPlural("ly", "lies")
rs.AddPlural("my", "mies")
rs.AddPlural("ny", "nies")
rs.AddPlural("py", "pies")
rs.AddPlural("qy", "qies")
rs.AddPlural("ry", "ries")
rs.AddPlural("sy", "sies")
rs.AddPlural("ty", "ties")
rs.AddPlural("vy", "vies")
rs.AddPlural("wy", "wies")
rs.AddPlural("xy", "xies")
rs.AddPlural("zy", "zies")
rs.AddPlural("x", "xes")
rs.AddPlural("ch", "ches")
rs.AddPlural("ss", "sses")
rs.AddPlural("sh", "shes")
rs.AddPlural("matrix", "matrices")
rs.AddPlural("vertix", "vertices")
rs.AddPlural("indix", "indices")
rs.AddPlural("matrex", "matrices")
rs.AddPlural("vertex", "vertices")
rs.AddPlural("index", "indices")
rs.AddPlural("mouse", "mice")
rs.AddPlural("louse", "lice")
rs.AddPlural("mice", "mice")
rs.AddPlural("lice", "lice")
rs.AddPluralExact("ox", "oxen", true)
rs.AddPluralExact("oxen", "oxen", true)
rs.AddPluralExact("quiz", "quizzes", true)
rs.AddSingular("s", "")
rs.AddSingular("news", "news")
rs.AddSingular("ta", "tum")
rs.AddSingular("ia", "ium")
rs.AddSingular("analyses", "analysis")
rs.AddSingular("bases", "basis")
rs.AddSingular("diagnoses", "diagnosis")
rs.AddSingular("parentheses", "parenthesis")
rs.AddSingular("prognoses", "prognosis")
rs.AddSingular("synopses", "synopsis")
rs.AddSingular("theses", "thesis")
rs.AddSingular("analyses", "analysis")
rs.AddSingular("aves", "afe")
rs.AddSingular("bves", "bfe")
rs.AddSingular("cves", "cfe")
rs.AddSingular("dves", "dfe")
rs.AddSingular("eves", "efe")
rs.AddSingular("gves", "gfe")
rs.AddSingular("hves", "hfe")
rs.AddSingular("ives", "ife")
rs.AddSingular("jves", "jfe")
rs.AddSingular("kves", "kfe")
rs.AddSingular("lves", "lfe")
rs.AddSingular("mves", "mfe")
rs.AddSingular("nves", "nfe")
rs.AddSingular("oves", "ofe")
rs.AddSingular("pves", "pfe")
rs.AddSingular("qves", "qfe")
rs.AddSingular("rves", "rfe")
rs.AddSingular("sves", "sfe")
rs.AddSingular("tves", "tfe")
rs.AddSingular("uves", "ufe")
rs.AddSingular("vves", "vfe")
rs.AddSingular("wves", "wfe")
rs.AddSingular("xves", "xfe")
rs.AddSingular("yves", "yfe")
rs.AddSingular("zves", "zfe")
rs.AddSingular("hives", "hive")
rs.AddSingular("tives", "tive")
rs.AddSingular("lves", "lf")
rs.AddSingular("rves", "rf")
rs.AddSingular("quies", "quy")
rs.AddSingular("bies", "by")
rs.AddSingular("cies", "cy")
rs.AddSingular("dies", "dy")
rs.AddSingular("fies", "fy")
rs.AddSingular("gies", "gy")
rs.AddSingular("hies", "hy")
rs.AddSingular("jies", "jy")
rs.AddSingular("kies", "ky")
rs.AddSingular("lies", "ly")
rs.AddSingular("mies", "my")
rs.AddSingular("nies", "ny")
rs.AddSingular("pies", "py")
rs.AddSingular("qies", "qy")
rs.AddSingular("ries", "ry")
rs.AddSingular("sies", "sy")
rs.AddSingular("ties", "ty")
rs.AddSingular("vies", "vy")
rs.AddSingular("wies", "wy")
rs.AddSingular("xies", "xy")
rs.AddSingular("zies", "zy")
rs.AddSingular("series", "series")
rs.AddSingular("movies", "movie")
rs.AddSingular("xes", "x")
rs.AddSingular("ches", "ch")
rs.AddSingular("sses", "ss")
rs.AddSingular("shes", "sh")
rs.AddSingular("mice", "mouse")
rs.AddSingular("lice", "louse")
rs.AddSingular("buses", "bus")
rs.AddSingular("oes", "o")
rs.AddSingular("shoes", "shoe")
rs.AddSingular("crises", "crisis")
rs.AddSingular("axes", "axis")
rs.AddSingular("testes", "testis")
rs.AddSingular("octopi", "octopus")
rs.AddSingular("viri", "virus")
rs.AddSingular("statuses", "status")
rs.AddSingular("aliases", "alias")
rs.AddSingularExact("oxen", "ox", true)
rs.AddSingular("vertices", "vertex")
rs.AddSingular("indices", "index")
rs.AddSingular("matrices", "matrix")
rs.AddSingularExact("quizzes", "quiz", true)
rs.AddSingular("databases", "database")
rs.AddIrregular("person", "people")
rs.AddIrregular("man", "men")
rs.AddIrregular("child", "children")
rs.AddIrregular("sex", "sexes")
rs.AddIrregular("move", "moves")
rs.AddIrregular("zombie", "zombies")
rs.AddUncountable("equipment")
rs.AddUncountable("information")
rs.AddUncountable("rice")
rs.AddUncountable("money")
rs.AddUncountable("species")
rs.AddUncountable("series")
rs.AddUncountable("fish")
rs.AddUncountable("sheep")
rs.AddUncountable("jeans")
rs.AddUncountable("police")
return rs
}
func (rs *Ruleset) Uncountables() map[string]bool {
return rs.uncountables
}
// add a pluralization rule
func (rs *Ruleset) AddPlural(suffix, replacement string) {
rs.AddPluralExact(suffix, replacement, false)
}
// add a pluralization rule with full string match
func (rs *Ruleset) AddPluralExact(suffix, replacement string, exact bool) {
// remove uncountable
delete(rs.uncountables, suffix)
// create rule
r := new(Rule)
r.suffix = suffix
r.replacement = replacement
r.exact = exact
// prepend
rs.plurals = append([]*Rule{r}, rs.plurals...)
}
// add a singular rule
func (rs *Ruleset) AddSingular(suffix, replacement string) {
rs.AddSingularExact(suffix, replacement, false)
}
// same as AddSingular but you can set `exact` to force
// a full string match
func (rs *Ruleset) AddSingularExact(suffix, replacement string, exact bool) {
// remove from uncountable
delete(rs.uncountables, suffix)
// create rule
r := new(Rule)
r.suffix = suffix
r.replacement = replacement
r.exact = exact
rs.singulars = append([]*Rule{r}, rs.singulars...)
}
// Human rules are applied by humanize to show more friendly
// versions of words
func (rs *Ruleset) AddHuman(suffix, replacement string) {
r := new(Rule)
r.suffix = suffix
r.replacement = replacement
rs.humans = append([]*Rule{r}, rs.humans...)
}
// Add any inconsistant pluralizing/sinularizing rules
// to the set here.
func (rs *Ruleset) AddIrregular(singular, plural string) {
delete(rs.uncountables, singular)
delete(rs.uncountables, plural)
rs.AddPlural(singular, plural)
rs.AddPlural(plural, plural)
rs.AddSingular(plural, singular)
}
// if you use acronym you may need to add them to the ruleset
// to prevent Underscored words of things like "HTML" coming out
// as "h_t_m_l"
func (rs *Ruleset) AddAcronym(word string) {
r := new(Rule)
r.suffix = word
r.replacement = rs.Titleize(strings.ToLower(word))
rs.acronyms = append(rs.acronyms, r)
}
// add a word to this ruleset that has the same singular and plural form
// for example: "rice"
func (rs *Ruleset) AddUncountable(word string) {
rs.uncountables[strings.ToLower(word)] = true
}
func (rs *Ruleset) isUncountable(word string) bool {
// handle multiple words by using the last one
words := strings.Split(word, " ")
if _, exists := rs.uncountables[strings.ToLower(words[len(words)-1])]; exists {
return true
}
return false
}
// returns the plural form of a singular word
func (rs *Ruleset) Pluralize(word string) string {
if len(word) == 0 {
return word
}
if rs.isUncountable(word) {
return word
}
for _, rule := range rs.plurals {
if rule.exact {
if word == rule.suffix {
return rule.replacement
}
} else {
if strings.HasSuffix(word, rule.suffix) {
return replaceLast(word, rule.suffix, rule.replacement)
}
}
}
return word + "s"
}
// returns the singular form of a plural word
func (rs *Ruleset) Singularize(word string) string {
if len(word) == 0 {
return word
}
if rs.isUncountable(word) {
return word
}
for _, rule := range rs.singulars {
if rule.exact {
if word == rule.suffix {
return rule.replacement
}
} else {
if strings.HasSuffix(word, rule.suffix) {
return replaceLast(word, rule.suffix, rule.replacement)
}
}
}
return word
}
// uppercase first character
func (rs *Ruleset) Capitalize(word string) string {
return strings.ToUpper(word[:1]) + word[1:]
}
// "dino_party" -> "DinoParty"
func (rs *Ruleset) Camelize(word string) string {
words := splitAtCaseChangeWithTitlecase(word)
return strings.Join(words, "")
}
// same as Camelcase but with first letter downcased
func (rs *Ruleset) CamelizeDownFirst(word string) string {
word = Camelize(word)
return strings.ToLower(word[:1]) + word[1:]
}
// Captitilize every word in sentance "hello there" -> "Hello There"
func (rs *Ruleset) Titleize(word string) string {
words := splitAtCaseChangeWithTitlecase(word)
return strings.Join(words, " ")
}
func (rs *Ruleset) safeCaseAcronyms(word string) string {
// convert an acroymn like HTML into Html
for _, rule := range rs.acronyms {
word = strings.Replace(word, rule.suffix, rule.replacement, -1)
}
return word
}
func (rs *Ruleset) seperatedWords(word, sep string) string {
word = rs.safeCaseAcronyms(word)
words := splitAtCaseChange(word)
return strings.Join(words, sep)
}
// lowercase underscore version "BigBen" -> "big_ben"
func (rs *Ruleset) Underscore(word string) string {
return rs.seperatedWords(word, "_")
}
// First letter of sentance captitilized
// Uses custom friendly replacements via AddHuman()
func (rs *Ruleset) Humanize(word string) string {
word = replaceLast(word, "_id", "") // strip foreign key kinds
// replace and strings in humans list
for _, rule := range rs.humans {
word = strings.Replace(word, rule.suffix, rule.replacement, -1)
}
sentance := rs.seperatedWords(word, " ")
return strings.ToUpper(sentance[:1]) + sentance[1:]
}
// an underscored foreign key name "Person" -> "person_id"
func (rs *Ruleset) ForeignKey(word string) string {
return rs.Underscore(rs.Singularize(word)) + "_id"
}
// a foreign key (with an underscore) "Person" -> "personid"
func (rs *Ruleset) ForeignKeyCondensed(word string) string {
return rs.Underscore(word) + "id"
}
// Rails style pluralized table names: "SuperPerson" -> "super_people"
func (rs *Ruleset) Tableize(word string) string {
return rs.Pluralize(rs.Underscore(rs.Typeify(word)))
}
var notUrlSafe *regexp.Regexp = regexp.MustCompile(`[^\w\d\-_ ]`)
// param safe dasherized names like "my-param"
func (rs *Ruleset) Parameterize(word string) string {
return ParameterizeJoin(word, "-")
}
// param safe dasherized names with custom seperator
func (rs *Ruleset) ParameterizeJoin(word, sep string) string {
word = strings.ToLower(word)
word = rs.Asciify(word)
word = notUrlSafe.ReplaceAllString(word, "")
word = strings.Replace(word, " ", sep, -1)
if len(sep) > 0 {
squash, err := regexp.Compile(sep + "+")
if err == nil {
word = squash.ReplaceAllString(word, sep)
}
}
word = strings.Trim(word, sep+" ")
return word
}
var lookalikes map[string]*regexp.Regexp = map[string]*regexp.Regexp{
"A": regexp.MustCompile(`À|Á|Â|Ã|Ä|Å`),
"AE": regexp.MustCompile(`Æ`),
"C": regexp.MustCompile(`Ç`),
"E": regexp.MustCompile(`È|É|Ê|Ë`),
"G": regexp.MustCompile(`Ğ`),
"I": regexp.MustCompile(`Ì|Í|Î|Ï|İ`),
"N": regexp.MustCompile(`Ñ`),
"O": regexp.MustCompile(`Ò|Ó|Ô|Õ|Ö|Ø`),
"S": regexp.MustCompile(`Ş`),
"U": regexp.MustCompile(`Ù|Ú|Û|Ü`),
"Y": regexp.MustCompile(`Ý`),
"ss": regexp.MustCompile(`ß`),
"a": regexp.MustCompile(`à|á|â|ã|ä|å`),
"ae": regexp.MustCompile(`æ`),
"c": regexp.MustCompile(`ç`),
"e": regexp.MustCompile(`è|é|ê|ë`),
"g": regexp.MustCompile(`ğ`),
"i": regexp.MustCompile(`ì|í|î|ï|ı`),
"n": regexp.MustCompile(`ñ`),
"o": regexp.MustCompile(`ò|ó|ô|õ|ö|ø`),
"s": regexp.MustCompile(`ş`),
"u": regexp.MustCompile(`ù|ú|û|ü|ũ|ū|ŭ|ů|ű|ų`),
"y": regexp.MustCompile(`ý|ÿ`),
}
// transforms latin characters like é -> e
func (rs *Ruleset) Asciify(word string) string {
for repl, regex := range lookalikes {
word = regex.ReplaceAllString(word, repl)
}
return word
}
var tablePrefix *regexp.Regexp = regexp.MustCompile(`^[^.]*\.`)
// "something_like_this" -> "SomethingLikeThis"
func (rs *Ruleset) Typeify(word string) string {
word = tablePrefix.ReplaceAllString(word, "")
return rs.Camelize(rs.Singularize(word))
}
// "SomeText" -> "some-text"
func (rs *Ruleset) Dasherize(word string) string {
return rs.seperatedWords(word, "-")
}
// "1031" -> "1031st"
func (rs *Ruleset) Ordinalize(str string) string {
number, err := strconv.Atoi(str)
if err != nil {
return str
}
switch abs(number) % 100 {
case 11, 12, 13:
return fmt.Sprintf("%dth", number)
default:
switch abs(number) % 10 {
case 1:
return fmt.Sprintf("%dst", number)
case 2:
return fmt.Sprintf("%dnd", number)
case 3:
return fmt.Sprintf("%drd", number)
}
}
return fmt.Sprintf("%dth", number)
}
/////////////////////////////////////////
// the default global ruleset
//////////////////////////////////////////
var defaultRuleset *Ruleset
func init() {
defaultRuleset = NewDefaultRuleset()
}
func Uncountables() map[string]bool {
return defaultRuleset.Uncountables()
}
func AddPlural(suffix, replacement string) {
defaultRuleset.AddPlural(suffix, replacement)
}
func AddSingular(suffix, replacement string) {
defaultRuleset.AddSingular(suffix, replacement)
}
func AddHuman(suffix, replacement string) {
defaultRuleset.AddHuman(suffix, replacement)
}
func AddIrregular(singular, plural string) {
defaultRuleset.AddIrregular(singular, plural)
}
func AddAcronym(word string) {
defaultRuleset.AddAcronym(word)
}
func AddUncountable(word string) {
defaultRuleset.AddUncountable(word)
}
func Pluralize(word string) string {
return defaultRuleset.Pluralize(word)
}
func Singularize(word string) string {
return defaultRuleset.Singularize(word)
}
func Capitalize(word string) string {
return defaultRuleset.Capitalize(word)
}
func Camelize(word string) string {
return defaultRuleset.Camelize(word)
}
func CamelizeDownFirst(word string) string {
return defaultRuleset.CamelizeDownFirst(word)
}
func Titleize(word string) string {
return defaultRuleset.Titleize(word)
}
func Underscore(word string) string {
return defaultRuleset.Underscore(word)
}
func Humanize(word string) string {
return defaultRuleset.Humanize(word)
}
func ForeignKey(word string) string {
return defaultRuleset.ForeignKey(word)
}
func ForeignKeyCondensed(word string) string {
return defaultRuleset.ForeignKeyCondensed(word)
}
func Tableize(word string) string {
return defaultRuleset.Tableize(word)
}
func Parameterize(word string) string {
return defaultRuleset.Parameterize(word)
}
func ParameterizeJoin(word, sep string) string {
return defaultRuleset.ParameterizeJoin(word, sep)
}
func Typeify(word string) string {
return defaultRuleset.Typeify(word)
}
func Dasherize(word string) string {
return defaultRuleset.Dasherize(word)
}
func Ordinalize(word string) string {
return defaultRuleset.Ordinalize(word)
}
func Asciify(word string) string {
return defaultRuleset.Asciify(word)
}
// helper funcs
func reverse(s string) string {
o := make([]rune, utf8.RuneCountInString(s))
i := len(o)
for _, c := range s {
i--
o[i] = c
}
return string(o)
}
func isSpacerChar(c rune) bool {
switch {
case c == rune("_"[0]):
return true
case c == rune(" "[0]):
return true
case c == rune(":"[0]):
return true
case c == rune("-"[0]):
return true
}
return false
}
func splitAtCaseChange(s string) []string {
words := make([]string, 0)
word := make([]rune, 0)
for _, c := range s {
spacer := isSpacerChar(c)
if len(word) > 0 {
if unicode.IsUpper(c) || spacer {
words = append(words, string(word))
word = make([]rune, 0)
}
}
if !spacer {
word = append(word, unicode.ToLower(c))
}
}
words = append(words, string(word))
return words
}
func splitAtCaseChangeWithTitlecase(s string) []string {
words := make([]string, 0)
word := make([]rune, 0)
for _, c := range s {
spacer := isSpacerChar(c)
if len(word) > 0 {
if unicode.IsUpper(c) || spacer {
words = append(words, string(word))
word = make([]rune, 0)
}
}
if !spacer {
if len(word) > 0 {
word = append(word, unicode.ToLower(c))
} else {
word = append(word, unicode.ToUpper(c))
}
}
}
words = append(words, string(word))
return words
}
func replaceLast(s, match, repl string) string {
// reverse strings
srev := reverse(s)
mrev := reverse(match)
rrev := reverse(repl)
// match first and reverse back
return reverse(strings.Replace(srev, mrev, rrev, 1))
}
func abs(x int) int {
if x < 0 {
return -x
}
return x
}

26
vendor/github.com/go-openapi/jsonpointer/.editorconfig generated vendored Normal file
View File

@ -0,0 +1,26 @@
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
# Set default charset
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
charset = utf-8
# Tab indentation (no size specified)
[*.go]
indent_style = tab
[*.md]
trim_trailing_whitespace = false
# Matches the exact files either package.json or .travis.yml
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2

1
vendor/github.com/go-openapi/jsonpointer/.gitignore generated vendored Normal file
View File

@ -0,0 +1 @@
secrets.yml

View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/jsonpointer/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

15
vendor/github.com/go-openapi/jsonpointer/README.md generated vendored Normal file
View File

@ -0,0 +1,15 @@
# gojsonpointer [![Build Status](https://travis-ci.org/go-openapi/jsonpointer.svg?branch=master)](https://travis-ci.org/go-openapi/jsonpointer) [![codecov](https://codecov.io/gh/go-openapi/jsonpointer/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonpointer) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonpointer/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonpointer?status.svg)](http://godoc.org/github.com/go-openapi/jsonpointer)
An implementation of JSON Pointer - Go language
## Status
Completed YES
Tested YES
## References
http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
### Note
The 4.Evaluation part of the previous reference, starting with 'If the currently referenced value is a JSON array, the reference token MUST contain either...' is not implemented.

390
vendor/github.com/go-openapi/jsonpointer/pointer.go generated vendored Normal file
View File

@ -0,0 +1,390 @@
// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// author sigu-399
// author-github https://github.com/sigu-399
// author-mail sigu.399@gmail.com
//
// repository-name jsonpointer
// repository-desc An implementation of JSON Pointer - Go language
//
// description Main and unique file.
//
// created 25-02-2013
package jsonpointer
import (
"errors"
"fmt"
"reflect"
"strconv"
"strings"
"github.com/go-openapi/swag"
)
const (
emptyPointer = ``
pointerSeparator = `/`
invalidStart = `JSON pointer must be empty or start with a "` + pointerSeparator
)
var jsonPointableType = reflect.TypeOf(new(JSONPointable)).Elem()
var jsonSetableType = reflect.TypeOf(new(JSONSetable)).Elem()
// JSONPointable is an interface for structs to implement when they need to customize the
// json pointer process
type JSONPointable interface {
JSONLookup(string) (interface{}, error)
}
// JSONSetable is an interface for structs to implement when they need to customize the
// json pointer process
type JSONSetable interface {
JSONSet(string, interface{}) error
}
// New creates a new json pointer for the given string
func New(jsonPointerString string) (Pointer, error) {
var p Pointer
err := p.parse(jsonPointerString)
return p, err
}
// Pointer the json pointer reprsentation
type Pointer struct {
referenceTokens []string
}
// "Constructor", parses the given string JSON pointer
func (p *Pointer) parse(jsonPointerString string) error {
var err error
if jsonPointerString != emptyPointer {
if !strings.HasPrefix(jsonPointerString, pointerSeparator) {
err = errors.New(invalidStart)
} else {
referenceTokens := strings.Split(jsonPointerString, pointerSeparator)
for _, referenceToken := range referenceTokens[1:] {
p.referenceTokens = append(p.referenceTokens, referenceToken)
}
}
}
return err
}
// Get uses the pointer to retrieve a value from a JSON document
func (p *Pointer) Get(document interface{}) (interface{}, reflect.Kind, error) {
return p.get(document, swag.DefaultJSONNameProvider)
}
// Set uses the pointer to set a value from a JSON document
func (p *Pointer) Set(document interface{}, value interface{}) (interface{}, error) {
return document, p.set(document, value, swag.DefaultJSONNameProvider)
}
// GetForToken gets a value for a json pointer token 1 level deep
func GetForToken(document interface{}, decodedToken string) (interface{}, reflect.Kind, error) {
return getSingleImpl(document, decodedToken, swag.DefaultJSONNameProvider)
}
// SetForToken gets a value for a json pointer token 1 level deep
func SetForToken(document interface{}, decodedToken string, value interface{}) (interface{}, error) {
return document, setSingleImpl(document, value, decodedToken, swag.DefaultJSONNameProvider)
}
func getSingleImpl(node interface{}, decodedToken string, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
rValue := reflect.Indirect(reflect.ValueOf(node))
kind := rValue.Kind()
if rValue.Type().Implements(jsonPointableType) {
r, err := node.(JSONPointable).JSONLookup(decodedToken)
if err != nil {
return nil, kind, err
}
return r, kind, nil
}
switch kind {
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
return nil, kind, fmt.Errorf("object has no field %q", decodedToken)
}
fld := rValue.FieldByName(nm)
return fld.Interface(), kind, nil
case reflect.Map:
kv := reflect.ValueOf(decodedToken)
mv := rValue.MapIndex(kv)
if mv.IsValid() {
return mv.Interface(), kind, nil
}
return nil, kind, fmt.Errorf("object has no key %q", decodedToken)
case reflect.Slice:
tokenIndex, err := strconv.Atoi(decodedToken)
if err != nil {
return nil, kind, err
}
sLength := rValue.Len()
if tokenIndex < 0 || tokenIndex >= sLength {
return nil, kind, fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength-1, tokenIndex)
}
elem := rValue.Index(tokenIndex)
return elem.Interface(), kind, nil
default:
return nil, kind, fmt.Errorf("invalid token reference %q", decodedToken)
}
}
func setSingleImpl(node, data interface{}, decodedToken string, nameProvider *swag.NameProvider) error {
rValue := reflect.Indirect(reflect.ValueOf(node))
if ns, ok := node.(JSONSetable); ok { // pointer impl
return ns.JSONSet(decodedToken, data)
}
if rValue.Type().Implements(jsonSetableType) {
return node.(JSONSetable).JSONSet(decodedToken, data)
}
switch rValue.Kind() {
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
return fmt.Errorf("object has no field %q", decodedToken)
}
fld := rValue.FieldByName(nm)
if fld.IsValid() {
fld.Set(reflect.ValueOf(data))
}
return nil
case reflect.Map:
kv := reflect.ValueOf(decodedToken)
rValue.SetMapIndex(kv, reflect.ValueOf(data))
return nil
case reflect.Slice:
tokenIndex, err := strconv.Atoi(decodedToken)
if err != nil {
return err
}
sLength := rValue.Len()
if tokenIndex < 0 || tokenIndex >= sLength {
return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
}
elem := rValue.Index(tokenIndex)
if !elem.CanSet() {
return fmt.Errorf("can't set slice index %s to %v", decodedToken, data)
}
elem.Set(reflect.ValueOf(data))
return nil
default:
return fmt.Errorf("invalid token reference %q", decodedToken)
}
}
func (p *Pointer) get(node interface{}, nameProvider *swag.NameProvider) (interface{}, reflect.Kind, error) {
if nameProvider == nil {
nameProvider = swag.DefaultJSONNameProvider
}
kind := reflect.Invalid
// Full document when empty
if len(p.referenceTokens) == 0 {
return node, kind, nil
}
for _, token := range p.referenceTokens {
decodedToken := Unescape(token)
r, knd, err := getSingleImpl(node, decodedToken, nameProvider)
if err != nil {
return nil, knd, err
}
node, kind = r, knd
}
rValue := reflect.ValueOf(node)
kind = rValue.Kind()
return node, kind, nil
}
func (p *Pointer) set(node, data interface{}, nameProvider *swag.NameProvider) error {
knd := reflect.ValueOf(node).Kind()
if knd != reflect.Ptr && knd != reflect.Struct && knd != reflect.Map && knd != reflect.Slice && knd != reflect.Array {
return fmt.Errorf("only structs, pointers, maps and slices are supported for setting values")
}
if nameProvider == nil {
nameProvider = swag.DefaultJSONNameProvider
}
// Full document when empty
if len(p.referenceTokens) == 0 {
return nil
}
lastI := len(p.referenceTokens) - 1
for i, token := range p.referenceTokens {
isLastToken := i == lastI
decodedToken := Unescape(token)
if isLastToken {
return setSingleImpl(node, data, decodedToken, nameProvider)
}
rValue := reflect.Indirect(reflect.ValueOf(node))
kind := rValue.Kind()
if rValue.Type().Implements(jsonPointableType) {
r, err := node.(JSONPointable).JSONLookup(decodedToken)
if err != nil {
return err
}
fld := reflect.ValueOf(r)
if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
node = fld.Addr().Interface()
continue
}
node = r
continue
}
switch kind {
case reflect.Struct:
nm, ok := nameProvider.GetGoNameForType(rValue.Type(), decodedToken)
if !ok {
return fmt.Errorf("object has no field %q", decodedToken)
}
fld := rValue.FieldByName(nm)
if fld.CanAddr() && fld.Kind() != reflect.Interface && fld.Kind() != reflect.Map && fld.Kind() != reflect.Slice && fld.Kind() != reflect.Ptr {
node = fld.Addr().Interface()
continue
}
node = fld.Interface()
case reflect.Map:
kv := reflect.ValueOf(decodedToken)
mv := rValue.MapIndex(kv)
if !mv.IsValid() {
return fmt.Errorf("object has no key %q", decodedToken)
}
if mv.CanAddr() && mv.Kind() != reflect.Interface && mv.Kind() != reflect.Map && mv.Kind() != reflect.Slice && mv.Kind() != reflect.Ptr {
node = mv.Addr().Interface()
continue
}
node = mv.Interface()
case reflect.Slice:
tokenIndex, err := strconv.Atoi(decodedToken)
if err != nil {
return err
}
sLength := rValue.Len()
if tokenIndex < 0 || tokenIndex >= sLength {
return fmt.Errorf("index out of bounds array[0,%d] index '%d'", sLength, tokenIndex)
}
elem := rValue.Index(tokenIndex)
if elem.CanAddr() && elem.Kind() != reflect.Interface && elem.Kind() != reflect.Map && elem.Kind() != reflect.Slice && elem.Kind() != reflect.Ptr {
node = elem.Addr().Interface()
continue
}
node = elem.Interface()
default:
return fmt.Errorf("invalid token reference %q", decodedToken)
}
}
return nil
}
// DecodedTokens returns the decoded tokens
func (p *Pointer) DecodedTokens() []string {
result := make([]string, 0, len(p.referenceTokens))
for _, t := range p.referenceTokens {
result = append(result, Unescape(t))
}
return result
}
// IsEmpty returns true if this is an empty json pointer
// this indicates that it points to the root document
func (p *Pointer) IsEmpty() bool {
return len(p.referenceTokens) == 0
}
// Pointer to string representation function
func (p *Pointer) String() string {
if len(p.referenceTokens) == 0 {
return emptyPointer
}
pointerString := pointerSeparator + strings.Join(p.referenceTokens, pointerSeparator)
return pointerString
}
// Specific JSON pointer encoding here
// ~0 => ~
// ~1 => /
// ... and vice versa
const (
encRefTok0 = `~0`
encRefTok1 = `~1`
decRefTok0 = `~`
decRefTok1 = `/`
)
// Unescape unescapes a json pointer reference token string to the original representation
func Unescape(token string) string {
step1 := strings.Replace(token, encRefTok1, decRefTok1, -1)
step2 := strings.Replace(step1, encRefTok0, decRefTok0, -1)
return step2
}
// Escape escapes a pointer reference token string
func Escape(token string) string {
step1 := strings.Replace(token, decRefTok0, encRefTok0, -1)
step2 := strings.Replace(step1, decRefTok1, encRefTok1, -1)
return step2
}

View File

@ -0,0 +1 @@
secrets.yml

View File

@ -0,0 +1,50 @@
linters-settings:
govet:
check-shadowing: true
gocyclo:
min-complexity: 30
maligned:
suggest-new: true
dupl:
threshold: 100
goconst:
min-len: 2
min-occurrences: 4
paralleltest:
ignore-missing: true
linters:
enable-all: true
disable:
- maligned
- lll
- gochecknoglobals
- godox
- gocognit
- whitespace
- wsl
- funlen
- gochecknoglobals
- gochecknoinits
- scopelint
- wrapcheck
- exhaustivestruct
- exhaustive
- nlreturn
- testpackage
- gci
- gofumpt
- goerr113
- gomnd
- tparallel
- nestif
- godot
- errorlint
- varcheck
- interfacer
- deadcode
- golint
- ifshort
- structcheck
- nosnakecase
- varnamelen
- exhaustruct

View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/jsonreference/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

15
vendor/github.com/go-openapi/jsonreference/README.md generated vendored Normal file
View File

@ -0,0 +1,15 @@
# gojsonreference [![Build Status](https://travis-ci.org/go-openapi/jsonreference.svg?branch=master)](https://travis-ci.org/go-openapi/jsonreference) [![codecov](https://codecov.io/gh/go-openapi/jsonreference/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/jsonreference) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/jsonreference/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/jsonreference?status.svg)](http://godoc.org/github.com/go-openapi/jsonreference)
An implementation of JSON Reference - Go language
## Status
Feature complete. Stable API
## Dependencies
https://github.com/go-openapi/jsonpointer
## References
http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07
http://tools.ietf.org/html/draft-pbryan-zyp-json-ref-03

View File

@ -0,0 +1,69 @@
package internal
import (
"net/url"
"regexp"
"strings"
)
const (
defaultHTTPPort = ":80"
defaultHTTPSPort = ":443"
)
// Regular expressions used by the normalizations
var rxPort = regexp.MustCompile(`(:\d+)/?$`)
var rxDupSlashes = regexp.MustCompile(`/{2,}`)
// NormalizeURL will normalize the specified URL
// This was added to replace a previous call to the no longer maintained purell library:
// The call that was used looked like the following:
//
// url.Parse(purell.NormalizeURL(parsed, purell.FlagsSafe|purell.FlagRemoveDuplicateSlashes))
//
// To explain all that was included in the call above, purell.FlagsSafe was really just the following:
// - FlagLowercaseScheme
// - FlagLowercaseHost
// - FlagRemoveDefaultPort
// - FlagRemoveDuplicateSlashes (and this was mixed in with the |)
//
// This also normalizes the URL into its urlencoded form by removing RawPath and RawFragment.
func NormalizeURL(u *url.URL) {
lowercaseScheme(u)
lowercaseHost(u)
removeDefaultPort(u)
removeDuplicateSlashes(u)
u.RawPath = ""
u.RawFragment = ""
}
func lowercaseScheme(u *url.URL) {
if len(u.Scheme) > 0 {
u.Scheme = strings.ToLower(u.Scheme)
}
}
func lowercaseHost(u *url.URL) {
if len(u.Host) > 0 {
u.Host = strings.ToLower(u.Host)
}
}
func removeDefaultPort(u *url.URL) {
if len(u.Host) > 0 {
scheme := strings.ToLower(u.Scheme)
u.Host = rxPort.ReplaceAllStringFunc(u.Host, func(val string) string {
if (scheme == "http" && val == defaultHTTPPort) || (scheme == "https" && val == defaultHTTPSPort) {
return ""
}
return val
})
}
}
func removeDuplicateSlashes(u *url.URL) {
if len(u.Path) > 0 {
u.Path = rxDupSlashes.ReplaceAllString(u.Path, "/")
}
}

158
vendor/github.com/go-openapi/jsonreference/reference.go generated vendored Normal file
View File

@ -0,0 +1,158 @@
// Copyright 2013 sigu-399 ( https://github.com/sigu-399 )
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// author sigu-399
// author-github https://github.com/sigu-399
// author-mail sigu.399@gmail.com
//
// repository-name jsonreference
// repository-desc An implementation of JSON Reference - Go language
//
// description Main and unique file.
//
// created 26-02-2013
package jsonreference
import (
"errors"
"net/url"
"strings"
"github.com/go-openapi/jsonpointer"
"github.com/go-openapi/jsonreference/internal"
)
const (
fragmentRune = `#`
)
// New creates a new reference for the given string
func New(jsonReferenceString string) (Ref, error) {
var r Ref
err := r.parse(jsonReferenceString)
return r, err
}
// MustCreateRef parses the ref string and panics when it's invalid.
// Use the New method for a version that returns an error
func MustCreateRef(ref string) Ref {
r, err := New(ref)
if err != nil {
panic(err)
}
return r
}
// Ref represents a json reference object
type Ref struct {
referenceURL *url.URL
referencePointer jsonpointer.Pointer
HasFullURL bool
HasURLPathOnly bool
HasFragmentOnly bool
HasFileScheme bool
HasFullFilePath bool
}
// GetURL gets the URL for this reference
func (r *Ref) GetURL() *url.URL {
return r.referenceURL
}
// GetPointer gets the json pointer for this reference
func (r *Ref) GetPointer() *jsonpointer.Pointer {
return &r.referencePointer
}
// String returns the best version of the url for this reference
func (r *Ref) String() string {
if r.referenceURL != nil {
return r.referenceURL.String()
}
if r.HasFragmentOnly {
return fragmentRune + r.referencePointer.String()
}
return r.referencePointer.String()
}
// IsRoot returns true if this reference is a root document
func (r *Ref) IsRoot() bool {
return r.referenceURL != nil &&
!r.IsCanonical() &&
!r.HasURLPathOnly &&
r.referenceURL.Fragment == ""
}
// IsCanonical returns true when this pointer starts with http(s):// or file://
func (r *Ref) IsCanonical() bool {
return (r.HasFileScheme && r.HasFullFilePath) || (!r.HasFileScheme && r.HasFullURL)
}
// "Constructor", parses the given string JSON reference
func (r *Ref) parse(jsonReferenceString string) error {
parsed, err := url.Parse(jsonReferenceString)
if err != nil {
return err
}
internal.NormalizeURL(parsed)
r.referenceURL = parsed
refURL := r.referenceURL
if refURL.Scheme != "" && refURL.Host != "" {
r.HasFullURL = true
} else {
if refURL.Path != "" {
r.HasURLPathOnly = true
} else if refURL.RawQuery == "" && refURL.Fragment != "" {
r.HasFragmentOnly = true
}
}
r.HasFileScheme = refURL.Scheme == "file"
r.HasFullFilePath = strings.HasPrefix(refURL.Path, "/")
// invalid json-pointer error means url has no json-pointer fragment. simply ignore error
r.referencePointer, _ = jsonpointer.New(refURL.Fragment)
return nil
}
// Inherits creates a new reference from a parent and a child
// If the child cannot inherit from the parent, an error is returned
func (r *Ref) Inherits(child Ref) (*Ref, error) {
childURL := child.GetURL()
parentURL := r.GetURL()
if childURL == nil {
return nil, errors.New("child url is nil")
}
if parentURL == nil {
return &child, nil
}
ref, err := New(parentURL.ResolveReference(childURL).String())
if err != nil {
return nil, err
}
return &ref, nil
}

26
vendor/github.com/go-openapi/loads/.editorconfig generated vendored Normal file
View File

@ -0,0 +1,26 @@
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
# Set default charset
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
charset = utf-8
# Tab indentation (no size specified)
[*.go]
indent_style = tab
[*.md]
trim_trailing_whitespace = false
# Matches the exact files either package.json or .travis.yml
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2

4
vendor/github.com/go-openapi/loads/.gitignore generated vendored Normal file
View File

@ -0,0 +1,4 @@
secrets.yml
coverage.out
profile.cov
profile.out

44
vendor/github.com/go-openapi/loads/.golangci.yml generated vendored Normal file
View File

@ -0,0 +1,44 @@
linters-settings:
govet:
check-shadowing: true
golint:
min-confidence: 0
gocyclo:
min-complexity: 30
maligned:
suggest-new: true
dupl:
threshold: 100
goconst:
min-len: 2
min-occurrences: 4
linters:
enable-all: true
disable:
- maligned
- lll
- gochecknoglobals
- gochecknoinits
- godox
- gocognit
- whitespace
- wsl
- funlen
- gochecknoglobals
- gochecknoinits
- scopelint
- wrapcheck
- exhaustivestruct
- exhaustive
- nlreturn
- testpackage
- gci
- gofumpt
- goerr113
- gomnd
- tparallel
- nestif
- godot
- errorlint
- paralleltest

25
vendor/github.com/go-openapi/loads/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,25 @@
after_success:
- bash <(curl -s https://codecov.io/bash)
go:
- 1.16.x
- 1.x
install:
- go get gotest.tools/gotestsum
language: go
arch:
- amd64
- ppc64le
jobs:
include:
# include linting job, but only for latest go version and amd64 arch
- go: 1.x
arch: amd64
install:
go get github.com/golangci/golangci-lint/cmd/golangci-lint
script:
- golangci-lint run --new-from-rev master
notifications:
slack:
secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
script:
- gotestsum -f short-verbose -- -race -timeout=20m -coverprofile=coverage.txt -covermode=atomic ./...

74
vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md generated vendored Normal file
View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/loads/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

6
vendor/github.com/go-openapi/loads/README.md generated vendored Normal file
View File

@ -0,0 +1,6 @@
# Loads OAI specs [![Build Status](https://travis-ci.org/go-openapi/loads.svg?branch=master)](https://travis-ci.org/go-openapi/loads) [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io) [![Actions/Go Test Status](https://github.com/go-openapi/loads/workflows/Go%20Test/badge.svg)](https://github.com/go-openapi/loads/actions?query=workflow%3A"Go+Test")
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads)
[![Go Report Card](https://goreportcard.com/badge/github.com/go-openapi/loads)](https://goreportcard.com/report/github.com/go-openapi/loads)
Loading of OAI specification documents from local or remote locations. Supports JSON and YAML documents.

21
vendor/github.com/go-openapi/loads/doc.go generated vendored Normal file
View File

@ -0,0 +1,21 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*
Package loads provides document loading methods for swagger (OAI) specifications.
It is used by other go-openapi packages to load and run analysis on local or remote spec documents.
*/
package loads

30
vendor/github.com/go-openapi/loads/fmts/yaml.go generated vendored Normal file
View File

@ -0,0 +1,30 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package fmts
import "github.com/go-openapi/swag"
var (
// YAMLMatcher matches yaml
YAMLMatcher = swag.YAMLMatcher
// YAMLToJSON converts YAML unmarshaled data into json compatible data
YAMLToJSON = swag.YAMLToJSON
// BytesToYAMLDoc converts raw bytes to a map[string]interface{}
BytesToYAMLDoc = swag.BytesToYAMLDoc
// YAMLDoc loads a yaml document from either http or a file and converts it to json
YAMLDoc = swag.YAMLDoc
// YAMLData loads a yaml document from either http or a file
YAMLData = swag.YAMLData
)

134
vendor/github.com/go-openapi/loads/loaders.go generated vendored Normal file
View File

@ -0,0 +1,134 @@
package loads
import (
"encoding/json"
"errors"
"net/url"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
var (
// Default chain of loaders, defined at the package level.
//
// By default this matches json and yaml documents.
//
// May be altered with AddLoader().
loaders *loader
)
func init() {
jsonLoader := &loader{
DocLoaderWithMatch: DocLoaderWithMatch{
Match: func(pth string) bool {
return true
},
Fn: JSONDoc,
},
}
loaders = jsonLoader.WithHead(&loader{
DocLoaderWithMatch: DocLoaderWithMatch{
Match: swag.YAMLMatcher,
Fn: swag.YAMLDoc,
},
})
// sets the global default loader for go-openapi/spec
spec.PathLoader = loaders.Load
}
// DocLoader represents a doc loader type
type DocLoader func(string) (json.RawMessage, error)
// DocMatcher represents a predicate to check if a loader matches
type DocMatcher func(string) bool
// DocLoaderWithMatch describes a loading function for a given extension match.
type DocLoaderWithMatch struct {
Fn DocLoader
Match DocMatcher
}
// NewDocLoaderWithMatch builds a DocLoaderWithMatch to be used in load options
func NewDocLoaderWithMatch(fn DocLoader, matcher DocMatcher) DocLoaderWithMatch {
return DocLoaderWithMatch{
Fn: fn,
Match: matcher,
}
}
type loader struct {
DocLoaderWithMatch
Next *loader
}
// WithHead adds a loader at the head of the current stack
func (l *loader) WithHead(head *loader) *loader {
if head == nil {
return l
}
head.Next = l
return head
}
// WithNext adds a loader at the trail of the current stack
func (l *loader) WithNext(next *loader) *loader {
l.Next = next
return next
}
// Load the raw document from path
func (l *loader) Load(path string) (json.RawMessage, error) {
_, erp := url.Parse(path)
if erp != nil {
return nil, erp
}
var lastErr error = errors.New("no loader matched") // default error if no match was found
for ldr := l; ldr != nil; ldr = ldr.Next {
if ldr.Match != nil && !ldr.Match(path) {
continue
}
// try then move to next one if there is an error
b, err := ldr.Fn(path)
if err == nil {
return b, nil
}
lastErr = err
}
return nil, lastErr
}
// JSONDoc loads a json document from either a file or a remote url
func JSONDoc(path string) (json.RawMessage, error) {
data, err := swag.LoadFromFileOrHTTP(path)
if err != nil {
return nil, err
}
return json.RawMessage(data), nil
}
// AddLoader for a document, executed before other previously set loaders.
//
// This sets the configuration at the package level.
//
// NOTE:
// * this updates the default loader used by github.com/go-openapi/spec
// * since this sets package level globals, you shouln't call this concurrently
//
func AddLoader(predicate DocMatcher, load DocLoader) {
loaders = loaders.WithHead(&loader{
DocLoaderWithMatch: DocLoaderWithMatch{
Match: predicate,
Fn: load,
},
})
// sets the global default loader for go-openapi/spec
spec.PathLoader = loaders.Load
}

61
vendor/github.com/go-openapi/loads/options.go generated vendored Normal file
View File

@ -0,0 +1,61 @@
package loads
type options struct {
loader *loader
}
func defaultOptions() *options {
return &options{
loader: loaders,
}
}
func loaderFromOptions(options []LoaderOption) *loader {
opts := defaultOptions()
for _, apply := range options {
apply(opts)
}
return opts.loader
}
// LoaderOption allows to fine-tune the spec loader behavior
type LoaderOption func(*options)
// WithDocLoader sets a custom loader for loading specs
func WithDocLoader(l DocLoader) LoaderOption {
return func(opt *options) {
if l == nil {
return
}
opt.loader = &loader{
DocLoaderWithMatch: DocLoaderWithMatch{
Fn: l,
},
}
}
}
// WithDocLoaderMatches sets a chain of custom loaders for loading specs
// for different extension matches.
//
// Loaders are executed in the order of provided DocLoaderWithMatch'es.
func WithDocLoaderMatches(l ...DocLoaderWithMatch) LoaderOption {
return func(opt *options) {
var final, prev *loader
for _, ldr := range l {
if ldr.Fn == nil {
continue
}
if prev == nil {
final = &loader{DocLoaderWithMatch: ldr}
prev = final
continue
}
prev = prev.WithNext(&loader{DocLoaderWithMatch: ldr})
}
opt.loader = final
}
}

266
vendor/github.com/go-openapi/loads/spec.go generated vendored Normal file
View File

@ -0,0 +1,266 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package loads
import (
"bytes"
"encoding/gob"
"encoding/json"
"fmt"
"github.com/go-openapi/analysis"
"github.com/go-openapi/spec"
"github.com/go-openapi/swag"
)
func init() {
gob.Register(map[string]interface{}{})
gob.Register([]interface{}{})
}
// Document represents a swagger spec document
type Document struct {
// specAnalyzer
Analyzer *analysis.Spec
spec *spec.Swagger
specFilePath string
origSpec *spec.Swagger
schema *spec.Schema
raw json.RawMessage
pathLoader *loader
}
// JSONSpec loads a spec from a json document
func JSONSpec(path string, options ...LoaderOption) (*Document, error) {
data, err := JSONDoc(path)
if err != nil {
return nil, err
}
// convert to json
return Analyzed(data, "", options...)
}
// Embedded returns a Document based on embedded specs. No analysis is required
func Embedded(orig, flat json.RawMessage, options ...LoaderOption) (*Document, error) {
var origSpec, flatSpec spec.Swagger
if err := json.Unmarshal(orig, &origSpec); err != nil {
return nil, err
}
if err := json.Unmarshal(flat, &flatSpec); err != nil {
return nil, err
}
return &Document{
raw: orig,
origSpec: &origSpec,
spec: &flatSpec,
pathLoader: loaderFromOptions(options),
}, nil
}
// Spec loads a new spec document from a local or remote path
func Spec(path string, options ...LoaderOption) (*Document, error) {
ldr := loaderFromOptions(options)
b, err := ldr.Load(path)
if err != nil {
return nil, err
}
document, err := Analyzed(b, "", options...)
if err != nil {
return nil, err
}
if document != nil {
document.specFilePath = path
document.pathLoader = ldr
}
return document, err
}
// Analyzed creates a new analyzed spec document for a root json.RawMessage.
func Analyzed(data json.RawMessage, version string, options ...LoaderOption) (*Document, error) {
if version == "" {
version = "2.0"
}
if version != "2.0" {
return nil, fmt.Errorf("spec version %q is not supported", version)
}
raw, err := trimData(data) // trim blanks, then convert yaml docs into json
if err != nil {
return nil, err
}
swspec := new(spec.Swagger)
if err = json.Unmarshal(raw, swspec); err != nil {
return nil, err
}
origsqspec, err := cloneSpec(swspec)
if err != nil {
return nil, err
}
d := &Document{
Analyzer: analysis.New(swspec),
schema: spec.MustLoadSwagger20Schema(),
spec: swspec,
raw: raw,
origSpec: origsqspec,
pathLoader: loaderFromOptions(options),
}
return d, nil
}
func trimData(in json.RawMessage) (json.RawMessage, error) {
trimmed := bytes.TrimSpace(in)
if len(trimmed) == 0 {
return in, nil
}
if trimmed[0] == '{' || trimmed[0] == '[' {
return trimmed, nil
}
// assume yaml doc: convert it to json
yml, err := swag.BytesToYAMLDoc(trimmed)
if err != nil {
return nil, fmt.Errorf("analyzed: %v", err)
}
d, err := swag.YAMLToJSON(yml)
if err != nil {
return nil, fmt.Errorf("analyzed: %v", err)
}
return d, nil
}
// Expanded expands the ref fields in the spec document and returns a new spec document
func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
swspec := new(spec.Swagger)
if err := json.Unmarshal(d.raw, swspec); err != nil {
return nil, err
}
var expandOptions *spec.ExpandOptions
if len(options) > 0 {
expandOptions = options[0]
} else {
expandOptions = &spec.ExpandOptions{
RelativeBase: d.specFilePath,
}
}
if expandOptions.PathLoader == nil {
if d.pathLoader != nil {
// use loader from Document options
expandOptions.PathLoader = d.pathLoader.Load
} else {
// use package level loader
expandOptions.PathLoader = loaders.Load
}
}
if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
return nil, err
}
dd := &Document{
Analyzer: analysis.New(swspec),
spec: swspec,
specFilePath: d.specFilePath,
schema: spec.MustLoadSwagger20Schema(),
raw: d.raw,
origSpec: d.origSpec,
}
return dd, nil
}
// BasePath the base path for this spec
func (d *Document) BasePath() string {
return d.spec.BasePath
}
// Version returns the version of this spec
func (d *Document) Version() string {
return d.spec.Swagger
}
// Schema returns the swagger 2.0 schema
func (d *Document) Schema() *spec.Schema {
return d.schema
}
// Spec returns the swagger spec object model
func (d *Document) Spec() *spec.Swagger {
return d.spec
}
// Host returns the host for the API
func (d *Document) Host() string {
return d.spec.Host
}
// Raw returns the raw swagger spec as json bytes
func (d *Document) Raw() json.RawMessage {
return d.raw
}
// OrigSpec yields the original spec
func (d *Document) OrigSpec() *spec.Swagger {
return d.origSpec
}
// ResetDefinitions gives a shallow copy with the models reset to the original spec
func (d *Document) ResetDefinitions() *Document {
defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
for k, v := range d.origSpec.Definitions {
defs[k] = v
}
d.spec.Definitions = defs
return d
}
// Pristine creates a new pristine document instance based on the input data
func (d *Document) Pristine() *Document {
dd, _ := Analyzed(d.Raw(), d.Version())
dd.pathLoader = d.pathLoader
return dd
}
// SpecFilePath returns the file path of the spec if one is defined
func (d *Document) SpecFilePath() string {
return d.specFilePath
}
func cloneSpec(src *spec.Swagger) (*spec.Swagger, error) {
var b bytes.Buffer
if err := gob.NewEncoder(&b).Encode(src); err != nil {
return nil, err
}
var dst spec.Swagger
if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
return nil, err
}
return &dst, nil
}

26
vendor/github.com/go-openapi/runtime/.editorconfig generated vendored Normal file
View File

@ -0,0 +1,26 @@
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
indent_style = space
indent_size = 2
trim_trailing_whitespace = true
# Set default charset
[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
charset = utf-8
# Tab indentation (no size specified)
[*.go]
indent_style = tab
[*.md]
trim_trailing_whitespace = false
# Matches the exact files either package.json or .travis.yml
[{package.json,.travis.yml}]
indent_style = space
indent_size = 2

1
vendor/github.com/go-openapi/runtime/.gitattributes generated vendored Normal file
View File

@ -0,0 +1 @@
*.go text eol=lf

5
vendor/github.com/go-openapi/runtime/.gitignore generated vendored Normal file
View File

@ -0,0 +1,5 @@
secrets.yml
coverage.out
*.cov
*.out
playground

44
vendor/github.com/go-openapi/runtime/.golangci.yml generated vendored Normal file
View File

@ -0,0 +1,44 @@
linters-settings:
govet:
# Using err repeatedly considered as shadowing.
check-shadowing: false
golint:
min-confidence: 0
gocyclo:
min-complexity: 30
maligned:
suggest-new: true
dupl:
threshold: 100
goconst:
min-len: 2
min-occurrences: 4
linters:
disable:
- maligned
- lll
- gochecknoglobals
- godox
- gocognit
- whitespace
- wsl
- funlen
- gochecknoglobals
- gochecknoinits
- scopelint
- wrapcheck
- exhaustivestruct
- exhaustive
- nlreturn
- testpackage
- gci
- gofumpt
- goerr113
- gomnd
- tparallel
- nestif
- godot
- errorlint
- noctx
- interfacer
- nilerr

View File

@ -0,0 +1,74 @@
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at ivan+abuse@flanders.co.nz. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/

202
vendor/github.com/go-openapi/runtime/LICENSE generated vendored Normal file
View File

@ -0,0 +1,202 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

7
vendor/github.com/go-openapi/runtime/README.md generated vendored Normal file
View File

@ -0,0 +1,7 @@
# runtime [![Build Status](https://travis-ci.org/go-openapi/runtime.svg?branch=client-context)](https://travis-ci.org/go-openapi/runtime) [![codecov](https://codecov.io/gh/go-openapi/runtime/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/runtime) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/runtime/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/runtime?status.svg)](http://godoc.org/github.com/go-openapi/runtime)
# golang Open-API toolkit - runtime
The runtime component for use in codegeneration or as untyped usage.

169
vendor/github.com/go-openapi/runtime/bytestream.go generated vendored Normal file
View File

@ -0,0 +1,169 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"bytes"
"encoding"
"errors"
"fmt"
"io"
"reflect"
"github.com/go-openapi/swag"
)
func defaultCloser() error { return nil }
type byteStreamOpt func(opts *byteStreamOpts)
// ClosesStream when the bytestream consumer or producer is finished
func ClosesStream(opts *byteStreamOpts) {
opts.Close = true
}
type byteStreamOpts struct {
Close bool
}
// ByteStreamConsumer creates a consumer for byte streams,
// takes a Writer/BinaryUnmarshaler interface or binary slice by reference,
// and reads from the provided reader
func ByteStreamConsumer(opts ...byteStreamOpt) Consumer {
var vals byteStreamOpts
for _, opt := range opts {
opt(&vals)
}
return ConsumerFunc(func(reader io.Reader, data interface{}) error {
if reader == nil {
return errors.New("ByteStreamConsumer requires a reader") // early exit
}
close := defaultCloser
if vals.Close {
if cl, ok := reader.(io.Closer); ok {
close = cl.Close
}
}
//nolint:errcheck // closing a reader wouldn't fail.
defer close()
if wrtr, ok := data.(io.Writer); ok {
_, err := io.Copy(wrtr, reader)
return err
}
buf := new(bytes.Buffer)
_, err := buf.ReadFrom(reader)
if err != nil {
return err
}
b := buf.Bytes()
if bu, ok := data.(encoding.BinaryUnmarshaler); ok {
return bu.UnmarshalBinary(b)
}
if data != nil {
if str, ok := data.(*string); ok {
*str = string(b)
return nil
}
}
if t := reflect.TypeOf(data); data != nil && t.Kind() == reflect.Ptr {
v := reflect.Indirect(reflect.ValueOf(data))
if t = v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
v.SetBytes(b)
return nil
}
}
return fmt.Errorf("%v (%T) is not supported by the ByteStreamConsumer, %s",
data, data, "can be resolved by supporting Writer/BinaryUnmarshaler interface")
})
}
// ByteStreamProducer creates a producer for byte streams,
// takes a Reader/BinaryMarshaler interface or binary slice,
// and writes to a writer (essentially a pipe)
func ByteStreamProducer(opts ...byteStreamOpt) Producer {
var vals byteStreamOpts
for _, opt := range opts {
opt(&vals)
}
return ProducerFunc(func(writer io.Writer, data interface{}) error {
if writer == nil {
return errors.New("ByteStreamProducer requires a writer") // early exit
}
close := defaultCloser
if vals.Close {
if cl, ok := writer.(io.Closer); ok {
close = cl.Close
}
}
//nolint:errcheck // TODO: closing a writer would fail.
defer close()
if rc, ok := data.(io.ReadCloser); ok {
defer rc.Close()
}
if rdr, ok := data.(io.Reader); ok {
_, err := io.Copy(writer, rdr)
return err
}
if bm, ok := data.(encoding.BinaryMarshaler); ok {
bytes, err := bm.MarshalBinary()
if err != nil {
return err
}
_, err = writer.Write(bytes)
return err
}
if data != nil {
if str, ok := data.(string); ok {
_, err := writer.Write([]byte(str))
return err
}
if e, ok := data.(error); ok {
_, err := writer.Write([]byte(e.Error()))
return err
}
v := reflect.Indirect(reflect.ValueOf(data))
if t := v.Type(); t.Kind() == reflect.Slice && t.Elem().Kind() == reflect.Uint8 {
_, err := writer.Write(v.Bytes())
return err
}
if t := v.Type(); t.Kind() == reflect.Struct || t.Kind() == reflect.Slice {
b, err := swag.WriteJSON(data)
if err != nil {
return err
}
_, err = writer.Write(b)
return err
}
}
return fmt.Errorf("%v (%T) is not supported by the ByteStreamProducer, %s",
data, data, "can be resolved by supporting Reader/BinaryMarshaler interface")
})
}

View File

@ -0,0 +1,30 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import "github.com/go-openapi/strfmt"
// A ClientAuthInfoWriterFunc converts a function to a request writer interface
type ClientAuthInfoWriterFunc func(ClientRequest, strfmt.Registry) error
// AuthenticateRequest adds authentication data to the request
func (fn ClientAuthInfoWriterFunc) AuthenticateRequest(req ClientRequest, reg strfmt.Registry) error {
return fn(req, reg)
}
// A ClientAuthInfoWriter implementor knows how to write authentication info to a request
type ClientAuthInfoWriter interface {
AuthenticateRequest(ClientRequest, strfmt.Registry) error
}

View File

@ -0,0 +1,41 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"context"
"net/http"
)
// ClientOperation represents the context for a swagger operation to be submitted to the transport
type ClientOperation struct {
ID string
Method string
PathPattern string
ProducesMediaTypes []string
ConsumesMediaTypes []string
Schemes []string
AuthInfo ClientAuthInfoWriter
Params ClientRequestWriter
Reader ClientResponseReader
Context context.Context
Client *http.Client
}
// A ClientTransport implementor knows how to submit Request objects to some destination
type ClientTransport interface {
//Submit(string, RequestWriter, ResponseReader, AuthInfoWriter) (interface{}, error)
Submit(*ClientOperation) (interface{}, error)
}

152
vendor/github.com/go-openapi/runtime/client_request.go generated vendored Normal file
View File

@ -0,0 +1,152 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"io"
"net/http"
"net/url"
"time"
"github.com/go-openapi/strfmt"
)
// ClientRequestWriterFunc converts a function to a request writer interface
type ClientRequestWriterFunc func(ClientRequest, strfmt.Registry) error
// WriteToRequest adds data to the request
func (fn ClientRequestWriterFunc) WriteToRequest(req ClientRequest, reg strfmt.Registry) error {
return fn(req, reg)
}
// ClientRequestWriter is an interface for things that know how to write to a request
type ClientRequestWriter interface {
WriteToRequest(ClientRequest, strfmt.Registry) error
}
// ClientRequest is an interface for things that know how to
// add information to a swagger client request
type ClientRequest interface {
SetHeaderParam(string, ...string) error
GetHeaderParams() http.Header
SetQueryParam(string, ...string) error
SetFormParam(string, ...string) error
SetPathParam(string, string) error
GetQueryParams() url.Values
SetFileParam(string, ...NamedReadCloser) error
SetBodyParam(interface{}) error
SetTimeout(time.Duration) error
GetMethod() string
GetPath() string
GetBody() []byte
GetBodyParam() interface{}
GetFileParam() map[string][]NamedReadCloser
}
// NamedReadCloser represents a named ReadCloser interface
type NamedReadCloser interface {
io.ReadCloser
Name() string
}
// NamedReader creates a NamedReadCloser for use as file upload
func NamedReader(name string, rdr io.Reader) NamedReadCloser {
rc, ok := rdr.(io.ReadCloser)
if !ok {
rc = io.NopCloser(rdr)
}
return &namedReadCloser{
name: name,
cr: rc,
}
}
type namedReadCloser struct {
name string
cr io.ReadCloser
}
func (n *namedReadCloser) Close() error {
return n.cr.Close()
}
func (n *namedReadCloser) Read(p []byte) (int, error) {
return n.cr.Read(p)
}
func (n *namedReadCloser) Name() string {
return n.name
}
type TestClientRequest struct {
Headers http.Header
Body interface{}
}
func (t *TestClientRequest) SetHeaderParam(name string, values ...string) error {
if t.Headers == nil {
t.Headers = make(http.Header)
}
t.Headers.Set(name, values[0])
return nil
}
func (t *TestClientRequest) SetQueryParam(_ string, _ ...string) error { return nil }
func (t *TestClientRequest) SetFormParam(_ string, _ ...string) error { return nil }
func (t *TestClientRequest) SetPathParam(_ string, _ string) error { return nil }
func (t *TestClientRequest) SetFileParam(_ string, _ ...NamedReadCloser) error { return nil }
func (t *TestClientRequest) SetBodyParam(body interface{}) error {
t.Body = body
return nil
}
func (t *TestClientRequest) SetTimeout(time.Duration) error {
return nil
}
func (t *TestClientRequest) GetQueryParams() url.Values { return nil }
func (t *TestClientRequest) GetMethod() string { return "" }
func (t *TestClientRequest) GetPath() string { return "" }
func (t *TestClientRequest) GetBody() []byte { return nil }
func (t *TestClientRequest) GetBodyParam() interface{} {
return t.Body
}
func (t *TestClientRequest) GetFileParam() map[string][]NamedReadCloser {
return nil
}
func (t *TestClientRequest) GetHeaderParams() http.Header {
return t.Headers
}

110
vendor/github.com/go-openapi/runtime/client_response.go generated vendored Normal file
View File

@ -0,0 +1,110 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"encoding/json"
"fmt"
"io"
)
// A ClientResponse represents a client response
// This bridges between responses obtained from different transports
type ClientResponse interface {
Code() int
Message() string
GetHeader(string) string
GetHeaders(string) []string
Body() io.ReadCloser
}
// A ClientResponseReaderFunc turns a function into a ClientResponseReader interface implementation
type ClientResponseReaderFunc func(ClientResponse, Consumer) (interface{}, error)
// ReadResponse reads the response
func (read ClientResponseReaderFunc) ReadResponse(resp ClientResponse, consumer Consumer) (interface{}, error) {
return read(resp, consumer)
}
// A ClientResponseReader is an interface for things want to read a response.
// An application of this is to create structs from response values
type ClientResponseReader interface {
ReadResponse(ClientResponse, Consumer) (interface{}, error)
}
// NewAPIError creates a new API error
func NewAPIError(opName string, payload interface{}, code int) *APIError {
return &APIError{
OperationName: opName,
Response: payload,
Code: code,
}
}
// APIError wraps an error model and captures the status code
type APIError struct {
OperationName string
Response interface{}
Code int
}
func (o *APIError) Error() string {
var resp []byte
if err, ok := o.Response.(error); ok {
resp = []byte("'" + err.Error() + "'")
} else {
resp, _ = json.Marshal(o.Response)
}
return fmt.Sprintf("%s (status %d): %s", o.OperationName, o.Code, resp)
}
func (o *APIError) String() string {
return o.Error()
}
// IsSuccess returns true when this elapse o k response returns a 2xx status code
func (o *APIError) IsSuccess() bool {
return o.Code/100 == 2
}
// IsRedirect returns true when this elapse o k response returns a 3xx status code
func (o *APIError) IsRedirect() bool {
return o.Code/100 == 3
}
// IsClientError returns true when this elapse o k response returns a 4xx status code
func (o *APIError) IsClientError() bool {
return o.Code/100 == 4
}
// IsServerError returns true when this elapse o k response returns a 5xx status code
func (o *APIError) IsServerError() bool {
return o.Code/100 == 5
}
// IsCode returns true when this elapse o k response returns a 4xx status code
func (o *APIError) IsCode(code int) bool {
return o.Code == code
}
// A ClientResponseStatus is a common interface implemented by all responses on the generated code
// You can use this to treat any client response based on status code
type ClientResponseStatus interface {
IsSuccess() bool
IsRedirect() bool
IsClientError() bool
IsServerError() bool
IsCode(int) bool
}

49
vendor/github.com/go-openapi/runtime/constants.go generated vendored Normal file
View File

@ -0,0 +1,49 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
const (
// HeaderContentType represents a http content-type header, it's value is supposed to be a mime type
HeaderContentType = "Content-Type"
// HeaderTransferEncoding represents a http transfer-encoding header.
HeaderTransferEncoding = "Transfer-Encoding"
// HeaderAccept the Accept header
HeaderAccept = "Accept"
// HeaderAuthorization the Authorization header
HeaderAuthorization = "Authorization"
charsetKey = "charset"
// DefaultMime the default fallback mime type
DefaultMime = "application/octet-stream"
// JSONMime the json mime type
JSONMime = "application/json"
// YAMLMime the yaml mime type
YAMLMime = "application/x-yaml"
// XMLMime the xml mime type
XMLMime = "application/xml"
// TextMime the text mime type
TextMime = "text/plain"
// HTMLMime the html mime type
HTMLMime = "text/html"
// CSVMime the csv mime type
CSVMime = "text/csv"
// MultipartFormMime the multipart form mime type
MultipartFormMime = "multipart/form-data"
// URLencodedFormMime the url encoded form mime type
URLencodedFormMime = "application/x-www-form-urlencoded"
)

77
vendor/github.com/go-openapi/runtime/csv.go generated vendored Normal file
View File

@ -0,0 +1,77 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"bytes"
"encoding/csv"
"errors"
"io"
)
// CSVConsumer creates a new CSV consumer
func CSVConsumer() Consumer {
return ConsumerFunc(func(reader io.Reader, data interface{}) error {
if reader == nil {
return errors.New("CSVConsumer requires a reader")
}
csvReader := csv.NewReader(reader)
writer, ok := data.(io.Writer)
if !ok {
return errors.New("data type must be io.Writer")
}
csvWriter := csv.NewWriter(writer)
records, err := csvReader.ReadAll()
if err != nil {
return err
}
for _, r := range records {
if err := csvWriter.Write(r); err != nil {
return err
}
}
csvWriter.Flush()
return nil
})
}
// CSVProducer creates a new CSV producer
func CSVProducer() Producer {
return ProducerFunc(func(writer io.Writer, data interface{}) error {
if writer == nil {
return errors.New("CSVProducer requires a writer")
}
dataBytes, ok := data.([]byte)
if !ok {
return errors.New("data type must be byte array")
}
csvReader := csv.NewReader(bytes.NewBuffer(dataBytes))
records, err := csvReader.ReadAll()
if err != nil {
return err
}
csvWriter := csv.NewWriter(writer)
for _, r := range records {
if err := csvWriter.Write(r); err != nil {
return err
}
}
csvWriter.Flush()
return nil
})
}

9
vendor/github.com/go-openapi/runtime/discard.go generated vendored Normal file
View File

@ -0,0 +1,9 @@
package runtime
import "io"
// DiscardConsumer does absolutely nothing, it's a black hole.
var DiscardConsumer = ConsumerFunc(func(_ io.Reader, _ interface{}) error { return nil })
// DiscardProducer does absolutely nothing, it's a black hole.
var DiscardProducer = ProducerFunc(func(_ io.Writer, _ interface{}) error { return nil })

19
vendor/github.com/go-openapi/runtime/file.go generated vendored Normal file
View File

@ -0,0 +1,19 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import "github.com/go-openapi/swag"
type File = swag.File

45
vendor/github.com/go-openapi/runtime/headers.go generated vendored Normal file
View File

@ -0,0 +1,45 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"mime"
"net/http"
"github.com/go-openapi/errors"
)
// ContentType parses a content type header
func ContentType(headers http.Header) (string, string, error) {
ct := headers.Get(HeaderContentType)
orig := ct
if ct == "" {
ct = DefaultMime
}
if ct == "" {
return "", "", nil
}
mt, opts, err := mime.ParseMediaType(ct)
if err != nil {
return "", "", errors.NewParseError(HeaderContentType, "header", orig, err)
}
if cs, ok := opts[charsetKey]; ok {
return mt, cs, nil
}
return mt, "", nil
}

112
vendor/github.com/go-openapi/runtime/interfaces.go generated vendored Normal file
View File

@ -0,0 +1,112 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"context"
"io"
"net/http"
"github.com/go-openapi/strfmt"
)
// OperationHandlerFunc an adapter for a function to the OperationHandler interface
type OperationHandlerFunc func(interface{}) (interface{}, error)
// Handle implements the operation handler interface
func (s OperationHandlerFunc) Handle(data interface{}) (interface{}, error) {
return s(data)
}
// OperationHandler a handler for a swagger operation
type OperationHandler interface {
Handle(interface{}) (interface{}, error)
}
// ConsumerFunc represents a function that can be used as a consumer
type ConsumerFunc func(io.Reader, interface{}) error
// Consume consumes the reader into the data parameter
func (fn ConsumerFunc) Consume(reader io.Reader, data interface{}) error {
return fn(reader, data)
}
// Consumer implementations know how to bind the values on the provided interface to
// data provided by the request body
type Consumer interface {
// Consume performs the binding of request values
Consume(io.Reader, interface{}) error
}
// ProducerFunc represents a function that can be used as a producer
type ProducerFunc func(io.Writer, interface{}) error
// Produce produces the response for the provided data
func (f ProducerFunc) Produce(writer io.Writer, data interface{}) error {
return f(writer, data)
}
// Producer implementations know how to turn the provided interface into a valid
// HTTP response
type Producer interface {
// Produce writes to the http response
Produce(io.Writer, interface{}) error
}
// AuthenticatorFunc turns a function into an authenticator
type AuthenticatorFunc func(interface{}) (bool, interface{}, error)
// Authenticate authenticates the request with the provided data
func (f AuthenticatorFunc) Authenticate(params interface{}) (bool, interface{}, error) {
return f(params)
}
// Authenticator represents an authentication strategy
// implementations of Authenticator know how to authenticate the
// request data and translate that into a valid principal object or an error
type Authenticator interface {
Authenticate(interface{}) (bool, interface{}, error)
}
// AuthorizerFunc turns a function into an authorizer
type AuthorizerFunc func(*http.Request, interface{}) error
// Authorize authorizes the processing of the request for the principal
func (f AuthorizerFunc) Authorize(r *http.Request, principal interface{}) error {
return f(r, principal)
}
// Authorizer represents an authorization strategy
// implementations of Authorizer know how to authorize the principal object
// using the request data and returns error if unauthorized
type Authorizer interface {
Authorize(*http.Request, interface{}) error
}
// Validatable types implementing this interface allow customizing their validation
// this will be used instead of the reflective validation based on the spec document.
// the implementations are assumed to have been generated by the swagger tool so they should
// contain all the validations obtained from the spec
type Validatable interface {
Validate(strfmt.Registry) error
}
// ContextValidatable types implementing this interface allow customizing their validation
// this will be used instead of the reflective validation based on the spec document.
// the implementations are assumed to have been generated by the swagger tool so they should
// contain all the context validations obtained from the spec
type ContextValidatable interface {
ContextValidate(context.Context, strfmt.Registry) error
}

38
vendor/github.com/go-openapi/runtime/json.go generated vendored Normal file
View File

@ -0,0 +1,38 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package runtime
import (
"encoding/json"
"io"
)
// JSONConsumer creates a new JSON consumer
func JSONConsumer() Consumer {
return ConsumerFunc(func(reader io.Reader, data interface{}) error {
dec := json.NewDecoder(reader)
dec.UseNumber() // preserve number formats
return dec.Decode(data)
})
}
// JSONProducer creates a new JSON producer
func JSONProducer() Producer {
return ProducerFunc(func(writer io.Writer, data interface{}) error {
enc := json.NewEncoder(writer)
enc.SetEscapeHTML(false)
return enc.Encode(data)
})
}

20
vendor/github.com/go-openapi/runtime/logger/logger.go generated vendored Normal file
View File

@ -0,0 +1,20 @@
package logger
import "os"
type Logger interface {
Printf(format string, args ...interface{})
Debugf(format string, args ...interface{})
}
func DebugEnabled() bool {
d := os.Getenv("SWAGGER_DEBUG")
if d != "" && d != "false" && d != "0" {
return true
}
d = os.Getenv("DEBUG")
if d != "" && d != "false" && d != "0" {
return true
}
return false
}

View File

@ -0,0 +1,22 @@
package logger
import (
"fmt"
"os"
)
type StandardLogger struct{}
func (StandardLogger) Printf(format string, args ...interface{}) {
if len(format) == 0 || format[len(format)-1] != '\n' {
format += "\n"
}
fmt.Fprintf(os.Stderr, format, args...)
}
func (StandardLogger) Debugf(format string, args ...interface{}) {
if len(format) == 0 || format[len(format)-1] != '\n' {
format += "\n"
}
fmt.Fprintf(os.Stderr, format, args...)
}

View File

@ -0,0 +1,635 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package middleware
import (
stdContext "context"
"fmt"
"net/http"
"strings"
"sync"
"github.com/go-openapi/analysis"
"github.com/go-openapi/errors"
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/runtime"
"github.com/go-openapi/runtime/logger"
"github.com/go-openapi/runtime/middleware/untyped"
"github.com/go-openapi/runtime/security"
)
// Debug when true turns on verbose logging
var Debug = logger.DebugEnabled()
var Logger logger.Logger = logger.StandardLogger{}
func debugLog(format string, args ...interface{}) {
if Debug {
Logger.Printf(format, args...)
}
}
// A Builder can create middlewares
type Builder func(http.Handler) http.Handler
// PassthroughBuilder returns the handler, aka the builder identity function
func PassthroughBuilder(handler http.Handler) http.Handler { return handler }
// RequestBinder is an interface for types to implement
// when they want to be able to bind from a request
type RequestBinder interface {
BindRequest(*http.Request, *MatchedRoute) error
}
// Responder is an interface for types to implement
// when they want to be considered for writing HTTP responses
type Responder interface {
WriteResponse(http.ResponseWriter, runtime.Producer)
}
// ResponderFunc wraps a func as a Responder interface
type ResponderFunc func(http.ResponseWriter, runtime.Producer)
// WriteResponse writes to the response
func (fn ResponderFunc) WriteResponse(rw http.ResponseWriter, pr runtime.Producer) {
fn(rw, pr)
}
// Context is a type safe wrapper around an untyped request context
// used throughout to store request context with the standard context attached
// to the http.Request
type Context struct {
spec *loads.Document
analyzer *analysis.Spec
api RoutableAPI
router Router
}
type routableUntypedAPI struct {
api *untyped.API
hlock *sync.Mutex
handlers map[string]map[string]http.Handler
defaultConsumes string
defaultProduces string
}
func newRoutableUntypedAPI(spec *loads.Document, api *untyped.API, context *Context) *routableUntypedAPI {
var handlers map[string]map[string]http.Handler
if spec == nil || api == nil {
return nil
}
analyzer := analysis.New(spec.Spec())
for method, hls := range analyzer.Operations() {
um := strings.ToUpper(method)
for path, op := range hls {
schemes := analyzer.SecurityRequirementsFor(op)
if oh, ok := api.OperationHandlerFor(method, path); ok {
if handlers == nil {
handlers = make(map[string]map[string]http.Handler)
}
if b, ok := handlers[um]; !ok || b == nil {
handlers[um] = make(map[string]http.Handler)
}
var handler http.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// lookup route info in the context
route, rCtx, _ := context.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
// bind and validate the request using reflection
var bound interface{}
var validation error
bound, r, validation = context.BindAndValidate(r, route)
if validation != nil {
context.Respond(w, r, route.Produces, route, validation)
return
}
// actually handle the request
result, err := oh.Handle(bound)
if err != nil {
// respond with failure
context.Respond(w, r, route.Produces, route, err)
return
}
// respond with success
context.Respond(w, r, route.Produces, route, result)
})
if len(schemes) > 0 {
handler = newSecureAPI(context, handler)
}
handlers[um][path] = handler
}
}
}
return &routableUntypedAPI{
api: api,
hlock: new(sync.Mutex),
handlers: handlers,
defaultProduces: api.DefaultProduces,
defaultConsumes: api.DefaultConsumes,
}
}
func (r *routableUntypedAPI) HandlerFor(method, path string) (http.Handler, bool) {
r.hlock.Lock()
paths, ok := r.handlers[strings.ToUpper(method)]
if !ok {
r.hlock.Unlock()
return nil, false
}
handler, ok := paths[path]
r.hlock.Unlock()
return handler, ok
}
func (r *routableUntypedAPI) ServeErrorFor(operationID string) func(http.ResponseWriter, *http.Request, error) {
return r.api.ServeError
}
func (r *routableUntypedAPI) ConsumersFor(mediaTypes []string) map[string]runtime.Consumer {
return r.api.ConsumersFor(mediaTypes)
}
func (r *routableUntypedAPI) ProducersFor(mediaTypes []string) map[string]runtime.Producer {
return r.api.ProducersFor(mediaTypes)
}
func (r *routableUntypedAPI) AuthenticatorsFor(schemes map[string]spec.SecurityScheme) map[string]runtime.Authenticator {
return r.api.AuthenticatorsFor(schemes)
}
func (r *routableUntypedAPI) Authorizer() runtime.Authorizer {
return r.api.Authorizer()
}
func (r *routableUntypedAPI) Formats() strfmt.Registry {
return r.api.Formats()
}
func (r *routableUntypedAPI) DefaultProduces() string {
return r.defaultProduces
}
func (r *routableUntypedAPI) DefaultConsumes() string {
return r.defaultConsumes
}
// NewRoutableContext creates a new context for a routable API
func NewRoutableContext(spec *loads.Document, routableAPI RoutableAPI, routes Router) *Context {
var an *analysis.Spec
if spec != nil {
an = analysis.New(spec.Spec())
}
return NewRoutableContextWithAnalyzedSpec(spec, an, routableAPI, routes)
}
// NewRoutableContextWithAnalyzedSpec is like NewRoutableContext but takes in input the analysed spec too
func NewRoutableContextWithAnalyzedSpec(spec *loads.Document, an *analysis.Spec, routableAPI RoutableAPI, routes Router) *Context {
// Either there are no spec doc and analysis, or both of them.
if !((spec == nil && an == nil) || (spec != nil && an != nil)) {
panic(errors.New(http.StatusInternalServerError, "routable context requires either both spec doc and analysis, or none of them"))
}
ctx := &Context{spec: spec, api: routableAPI, analyzer: an, router: routes}
return ctx
}
// NewContext creates a new context wrapper
func NewContext(spec *loads.Document, api *untyped.API, routes Router) *Context {
var an *analysis.Spec
if spec != nil {
an = analysis.New(spec.Spec())
}
ctx := &Context{spec: spec, analyzer: an}
ctx.api = newRoutableUntypedAPI(spec, api, ctx)
ctx.router = routes
return ctx
}
// Serve serves the specified spec with the specified api registrations as a http.Handler
func Serve(spec *loads.Document, api *untyped.API) http.Handler {
return ServeWithBuilder(spec, api, PassthroughBuilder)
}
// ServeWithBuilder serves the specified spec with the specified api registrations as a http.Handler that is decorated
// by the Builder
func ServeWithBuilder(spec *loads.Document, api *untyped.API, builder Builder) http.Handler {
context := NewContext(spec, api, nil)
return context.APIHandler(builder)
}
type contextKey int8
const (
_ contextKey = iota
ctxContentType
ctxResponseFormat
ctxMatchedRoute
ctxBoundParams
ctxSecurityPrincipal
ctxSecurityScopes
)
// MatchedRouteFrom request context value.
func MatchedRouteFrom(req *http.Request) *MatchedRoute {
mr := req.Context().Value(ctxMatchedRoute)
if mr == nil {
return nil
}
if res, ok := mr.(*MatchedRoute); ok {
return res
}
return nil
}
// SecurityPrincipalFrom request context value.
func SecurityPrincipalFrom(req *http.Request) interface{} {
return req.Context().Value(ctxSecurityPrincipal)
}
// SecurityScopesFrom request context value.
func SecurityScopesFrom(req *http.Request) []string {
rs := req.Context().Value(ctxSecurityScopes)
if res, ok := rs.([]string); ok {
return res
}
return nil
}
type contentTypeValue struct {
MediaType string
Charset string
}
// BasePath returns the base path for this API
func (c *Context) BasePath() string {
return c.spec.BasePath()
}
// RequiredProduces returns the accepted content types for responses
func (c *Context) RequiredProduces() []string {
return c.analyzer.RequiredProduces()
}
// BindValidRequest binds a params object to a request but only when the request is valid
// if the request is not valid an error will be returned
func (c *Context) BindValidRequest(request *http.Request, route *MatchedRoute, binder RequestBinder) error {
var res []error
var requestContentType string
// check and validate content type, select consumer
if runtime.HasBody(request) {
ct, _, err := runtime.ContentType(request.Header)
if err != nil {
res = append(res, err)
} else {
if err := validateContentType(route.Consumes, ct); err != nil {
res = append(res, err)
}
if len(res) == 0 {
cons, ok := route.Consumers[ct]
if !ok {
res = append(res, errors.New(500, "no consumer registered for %s", ct))
} else {
route.Consumer = cons
requestContentType = ct
}
}
}
}
// check and validate the response format
if len(res) == 0 {
// if the route does not provide Produces and a default contentType could not be identified
// based on a body, typical for GET and DELETE requests, then default contentType to.
if len(route.Produces) == 0 && requestContentType == "" {
requestContentType = "*/*"
}
if str := NegotiateContentType(request, route.Produces, requestContentType); str == "" {
res = append(res, errors.InvalidResponseFormat(request.Header.Get(runtime.HeaderAccept), route.Produces))
}
}
// now bind the request with the provided binder
// it's assumed the binder will also validate the request and return an error if the
// request is invalid
if binder != nil && len(res) == 0 {
if err := binder.BindRequest(request, route); err != nil {
return err
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// ContentType gets the parsed value of a content type
// Returns the media type, its charset and a shallow copy of the request
// when its context doesn't contain the content type value, otherwise it returns
// the same request
// Returns the error that runtime.ContentType may retunrs.
func (c *Context) ContentType(request *http.Request) (string, string, *http.Request, error) {
var rCtx = request.Context()
if v, ok := rCtx.Value(ctxContentType).(*contentTypeValue); ok {
return v.MediaType, v.Charset, request, nil
}
mt, cs, err := runtime.ContentType(request.Header)
if err != nil {
return "", "", nil, err
}
rCtx = stdContext.WithValue(rCtx, ctxContentType, &contentTypeValue{mt, cs})
return mt, cs, request.WithContext(rCtx), nil
}
// LookupRoute looks a route up and returns true when it is found
func (c *Context) LookupRoute(request *http.Request) (*MatchedRoute, bool) {
if route, ok := c.router.Lookup(request.Method, request.URL.EscapedPath()); ok {
return route, ok
}
return nil, false
}
// RouteInfo tries to match a route for this request
// Returns the matched route, a shallow copy of the request if its context
// contains the matched router, otherwise the same request, and a bool to
// indicate if it the request matches one of the routes, if it doesn't
// then it returns false and nil for the other two return values
func (c *Context) RouteInfo(request *http.Request) (*MatchedRoute, *http.Request, bool) {
var rCtx = request.Context()
if v, ok := rCtx.Value(ctxMatchedRoute).(*MatchedRoute); ok {
return v, request, ok
}
if route, ok := c.LookupRoute(request); ok {
rCtx = stdContext.WithValue(rCtx, ctxMatchedRoute, route)
return route, request.WithContext(rCtx), ok
}
return nil, nil, false
}
// ResponseFormat negotiates the response content type
// Returns the response format and a shallow copy of the request if its context
// doesn't contain the response format, otherwise the same request
func (c *Context) ResponseFormat(r *http.Request, offers []string) (string, *http.Request) {
var rCtx = r.Context()
if v, ok := rCtx.Value(ctxResponseFormat).(string); ok {
debugLog("[%s %s] found response format %q in context", r.Method, r.URL.Path, v)
return v, r
}
format := NegotiateContentType(r, offers, "")
if format != "" {
debugLog("[%s %s] set response format %q in context", r.Method, r.URL.Path, format)
r = r.WithContext(stdContext.WithValue(rCtx, ctxResponseFormat, format))
}
debugLog("[%s %s] negotiated response format %q", r.Method, r.URL.Path, format)
return format, r
}
// AllowedMethods gets the allowed methods for the path of this request
func (c *Context) AllowedMethods(request *http.Request) []string {
return c.router.OtherMethods(request.Method, request.URL.EscapedPath())
}
// ResetAuth removes the current principal from the request context
func (c *Context) ResetAuth(request *http.Request) *http.Request {
rctx := request.Context()
rctx = stdContext.WithValue(rctx, ctxSecurityPrincipal, nil)
rctx = stdContext.WithValue(rctx, ctxSecurityScopes, nil)
return request.WithContext(rctx)
}
// Authorize authorizes the request
// Returns the principal object and a shallow copy of the request when its
// context doesn't contain the principal, otherwise the same request or an error
// (the last) if one of the authenticators returns one or an Unauthenticated error
func (c *Context) Authorize(request *http.Request, route *MatchedRoute) (interface{}, *http.Request, error) {
if route == nil || !route.HasAuth() {
return nil, nil, nil
}
var rCtx = request.Context()
if v := rCtx.Value(ctxSecurityPrincipal); v != nil {
return v, request, nil
}
applies, usr, err := route.Authenticators.Authenticate(request, route)
if !applies || err != nil || !route.Authenticators.AllowsAnonymous() && usr == nil {
if err != nil {
return nil, nil, err
}
return nil, nil, errors.Unauthenticated("invalid credentials")
}
if route.Authorizer != nil {
if err := route.Authorizer.Authorize(request, usr); err != nil {
if _, ok := err.(errors.Error); ok {
return nil, nil, err
}
return nil, nil, errors.New(http.StatusForbidden, err.Error())
}
}
rCtx = request.Context()
rCtx = stdContext.WithValue(rCtx, ctxSecurityPrincipal, usr)
rCtx = stdContext.WithValue(rCtx, ctxSecurityScopes, route.Authenticator.AllScopes())
return usr, request.WithContext(rCtx), nil
}
// BindAndValidate binds and validates the request
// Returns the validation map and a shallow copy of the request when its context
// doesn't contain the validation, otherwise it returns the same request or an
// CompositeValidationError error
func (c *Context) BindAndValidate(request *http.Request, matched *MatchedRoute) (interface{}, *http.Request, error) {
var rCtx = request.Context()
if v, ok := rCtx.Value(ctxBoundParams).(*validation); ok {
debugLog("got cached validation (valid: %t)", len(v.result) == 0)
if len(v.result) > 0 {
return v.bound, request, errors.CompositeValidationError(v.result...)
}
return v.bound, request, nil
}
result := validateRequest(c, request, matched)
rCtx = stdContext.WithValue(rCtx, ctxBoundParams, result)
request = request.WithContext(rCtx)
if len(result.result) > 0 {
return result.bound, request, errors.CompositeValidationError(result.result...)
}
debugLog("no validation errors found")
return result.bound, request, nil
}
// NotFound the default not found responder for when no route has been matched yet
func (c *Context) NotFound(rw http.ResponseWriter, r *http.Request) {
c.Respond(rw, r, []string{c.api.DefaultProduces()}, nil, errors.NotFound("not found"))
}
// Respond renders the response after doing some content negotiation
func (c *Context) Respond(rw http.ResponseWriter, r *http.Request, produces []string, route *MatchedRoute, data interface{}) {
debugLog("responding to %s %s with produces: %v", r.Method, r.URL.Path, produces)
offers := []string{}
for _, mt := range produces {
if mt != c.api.DefaultProduces() {
offers = append(offers, mt)
}
}
// the default producer is last so more specific producers take precedence
offers = append(offers, c.api.DefaultProduces())
debugLog("offers: %v", offers)
var format string
format, r = c.ResponseFormat(r, offers)
rw.Header().Set(runtime.HeaderContentType, format)
if resp, ok := data.(Responder); ok {
producers := route.Producers
// producers contains keys with normalized format, if a format has MIME type parameter such as `text/plain; charset=utf-8`
// then you must provide `text/plain` to get the correct producer. HOWEVER, format here is not normalized.
prod, ok := producers[normalizeOffer(format)]
if !ok {
prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
pr, ok := prods[c.api.DefaultProduces()]
if !ok {
panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
}
prod = pr
}
resp.WriteResponse(rw, prod)
return
}
if err, ok := data.(error); ok {
if format == "" {
rw.Header().Set(runtime.HeaderContentType, runtime.JSONMime)
}
if realm := security.FailedBasicAuth(r); realm != "" {
rw.Header().Set("WWW-Authenticate", fmt.Sprintf("Basic realm=%q", realm))
}
if route == nil || route.Operation == nil {
c.api.ServeErrorFor("")(rw, r, err)
return
}
c.api.ServeErrorFor(route.Operation.ID)(rw, r, err)
return
}
if route == nil || route.Operation == nil {
rw.WriteHeader(200)
if r.Method == "HEAD" {
return
}
producers := c.api.ProducersFor(normalizeOffers(offers))
prod, ok := producers[format]
if !ok {
panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
}
if err := prod.Produce(rw, data); err != nil {
panic(err) // let the recovery middleware deal with this
}
return
}
if _, code, ok := route.Operation.SuccessResponse(); ok {
rw.WriteHeader(code)
if code == 204 || r.Method == "HEAD" {
return
}
producers := route.Producers
prod, ok := producers[format]
if !ok {
if !ok {
prods := c.api.ProducersFor(normalizeOffers([]string{c.api.DefaultProduces()}))
pr, ok := prods[c.api.DefaultProduces()]
if !ok {
panic(errors.New(http.StatusInternalServerError, "can't find a producer for "+format))
}
prod = pr
}
}
if err := prod.Produce(rw, data); err != nil {
panic(err) // let the recovery middleware deal with this
}
return
}
c.api.ServeErrorFor(route.Operation.ID)(rw, r, errors.New(http.StatusInternalServerError, "can't produce response"))
}
func (c *Context) APIHandlerSwaggerUI(builder Builder) http.Handler {
b := builder
if b == nil {
b = PassthroughBuilder
}
var title string
sp := c.spec.Spec()
if sp != nil && sp.Info != nil && sp.Info.Title != "" {
title = sp.Info.Title
}
swaggerUIOpts := SwaggerUIOpts{
BasePath: c.BasePath(),
Title: title,
}
return Spec("", c.spec.Raw(), SwaggerUI(swaggerUIOpts, c.RoutesHandler(b)))
}
// APIHandler returns a handler to serve the API, this includes a swagger spec, router and the contract defined in the swagger spec
func (c *Context) APIHandler(builder Builder) http.Handler {
b := builder
if b == nil {
b = PassthroughBuilder
}
var title string
sp := c.spec.Spec()
if sp != nil && sp.Info != nil && sp.Info.Title != "" {
title = sp.Info.Title
}
redocOpts := RedocOpts{
BasePath: c.BasePath(),
Title: title,
}
return Spec("", c.spec.Raw(), Redoc(redocOpts, c.RoutesHandler(b)))
}
// RoutesHandler returns a handler to serve the API, just the routes and the contract defined in the swagger spec
func (c *Context) RoutesHandler(builder Builder) http.Handler {
b := builder
if b == nil {
b = PassthroughBuilder
}
return NewRouter(c, b(NewOperationExecutor(c)))
}

View File

@ -0,0 +1,19 @@
Copyright (c) 2014 Naoya Inada <naoina@kuune.org>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

View File

@ -0,0 +1,180 @@
# Denco [![Build Status](https://travis-ci.org/naoina/denco.png?branch=master)](https://travis-ci.org/naoina/denco)
The fast and flexible HTTP request router for [Go](http://golang.org).
Denco is based on Double-Array implementation of [Kocha-urlrouter](https://github.com/naoina/kocha-urlrouter).
However, Denco is optimized and some features added.
## Features
* Fast (See [go-http-routing-benchmark](https://github.com/naoina/go-http-routing-benchmark))
* [URL patterns](#url-patterns) (`/foo/:bar` and `/foo/*wildcard`)
* Small (but enough) URL router API
* HTTP request multiplexer like `http.ServeMux`
## Installation
go get -u github.com/go-openapi/runtime/middleware/denco
## Using as HTTP request multiplexer
```go
package main
import (
"fmt"
"log"
"net/http"
"github.com/go-openapi/runtime/middleware/denco"
)
func Index(w http.ResponseWriter, r *http.Request, params denco.Params) {
fmt.Fprintf(w, "Welcome to Denco!\n")
}
func User(w http.ResponseWriter, r *http.Request, params denco.Params) {
fmt.Fprintf(w, "Hello %s!\n", params.Get("name"))
}
func main() {
mux := denco.NewMux()
handler, err := mux.Build([]denco.Handler{
mux.GET("/", Index),
mux.GET("/user/:name", User),
mux.POST("/user/:name", User),
})
if err != nil {
panic(err)
}
log.Fatal(http.ListenAndServe(":8080", handler))
}
```
## Using as URL router
```go
package main
import (
"fmt"
"github.com/go-openapi/runtime/middleware/denco"
)
type route struct {
name string
}
func main() {
router := denco.New()
router.Build([]denco.Record{
{"/", &route{"root"}},
{"/user/:id", &route{"user"}},
{"/user/:name/:id", &route{"username"}},
{"/static/*filepath", &route{"static"}},
})
data, params, found := router.Lookup("/")
// print `&main.route{name:"root"}, denco.Params(nil), true`.
fmt.Printf("%#v, %#v, %#v\n", data, params, found)
data, params, found = router.Lookup("/user/hoge")
// print `&main.route{name:"user"}, denco.Params{denco.Param{Name:"id", Value:"hoge"}}, true`.
fmt.Printf("%#v, %#v, %#v\n", data, params, found)
data, params, found = router.Lookup("/user/hoge/7")
// print `&main.route{name:"username"}, denco.Params{denco.Param{Name:"name", Value:"hoge"}, denco.Param{Name:"id", Value:"7"}}, true`.
fmt.Printf("%#v, %#v, %#v\n", data, params, found)
data, params, found = router.Lookup("/static/path/to/file")
// print `&main.route{name:"static"}, denco.Params{denco.Param{Name:"filepath", Value:"path/to/file"}}, true`.
fmt.Printf("%#v, %#v, %#v\n", data, params, found)
}
```
See [Godoc](http://godoc.org/github.com/go-openapi/runtime/middleware/denco) for more details.
## Getting the value of path parameter
You can get the value of path parameter by 2 ways.
1. Using [`denco.Params.Get`](http://godoc.org/github.com/go-openapi/runtime/middleware/denco#Params.Get) method
2. Find by loop
```go
package main
import (
"fmt"
"github.com/go-openapi/runtime/middleware/denco"
)
func main() {
router := denco.New()
if err := router.Build([]denco.Record{
{"/user/:name/:id", "route1"},
}); err != nil {
panic(err)
}
// 1. Using denco.Params.Get method.
_, params, _ := router.Lookup("/user/alice/1")
name := params.Get("name")
if name != "" {
fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
}
// 2. Find by loop.
for _, param := range params {
if param.Name == "name" {
fmt.Printf("Hello %s.\n", name) // prints "Hello alice.".
}
}
}
```
## URL patterns
Denco's route matching strategy is "most nearly matching".
When routes `/:name` and `/alice` have been built, URI `/alice` matches the route `/alice`, not `/:name`.
Because URI `/alice` is more match with the route `/alice` than `/:name`.
For more example, when routes below have been built:
```
/user/alice
/user/:name
/user/:name/:id
/user/alice/:id
/user/:id/bob
```
Routes matching are:
```
/user/alice => "/user/alice" (no match with "/user/:name")
/user/bob => "/user/:name"
/user/naoina/1 => "/user/:name/1"
/user/alice/1 => "/user/alice/:id" (no match with "/user/:name/:id")
/user/1/bob => "/user/:id/bob" (no match with "/user/:name/:id")
/user/alice/bob => "/user/alice/:id" (no match with "/user/:name/:id" and "/user/:id/bob")
```
## Limitation
Denco has some limitations below.
* Number of param records (such as `/:name`) must be less than 2^22
* Number of elements of internal slice must be less than 2^22
## Benchmarks
cd $GOPATH/github.com/go-openapi/runtime/middleware/denco
go test -bench . -benchmem
## License
Denco is licensed under the MIT License.

View File

@ -0,0 +1,460 @@
// Package denco provides fast URL router.
package denco
import (
"fmt"
"sort"
"strings"
)
const (
// ParamCharacter is a special character for path parameter.
ParamCharacter = ':'
// WildcardCharacter is a special character for wildcard path parameter.
WildcardCharacter = '*'
// TerminationCharacter is a special character for end of path.
TerminationCharacter = '#'
// SeparatorCharacter separates path segments.
SeparatorCharacter = '/'
// PathParamCharacter indicates a RESTCONF path param
PathParamCharacter = '='
// MaxSize is max size of records and internal slice.
MaxSize = (1 << 22) - 1
)
// Router represents a URL router.
type Router struct {
// SizeHint expects the maximum number of path parameters in records to Build.
// SizeHint will be used to determine the capacity of the memory to allocate.
// By default, SizeHint will be determined from given records to Build.
SizeHint int
static map[string]interface{}
param *doubleArray
}
// New returns a new Router.
func New() *Router {
return &Router{
SizeHint: -1,
static: make(map[string]interface{}),
param: newDoubleArray(),
}
}
// Lookup returns data and path parameters that associated with path.
// params is a slice of the Param that arranged in the order in which parameters appeared.
// e.g. when built routing path is "/path/to/:id/:name" and given path is "/path/to/1/alice". params order is [{"id": "1"}, {"name": "alice"}], not [{"name": "alice"}, {"id": "1"}].
func (rt *Router) Lookup(path string) (data interface{}, params Params, found bool) {
if data, found := rt.static[path]; found {
return data, nil, true
}
if len(rt.param.node) == 1 {
return nil, nil, false
}
nd, params, found := rt.param.lookup(path, make([]Param, 0, rt.SizeHint), 1)
if !found {
return nil, nil, false
}
for i := 0; i < len(params); i++ {
params[i].Name = nd.paramNames[i]
}
return nd.data, params, true
}
// Build builds URL router from records.
func (rt *Router) Build(records []Record) error {
statics, params := makeRecords(records)
if len(params) > MaxSize {
return fmt.Errorf("denco: too many records")
}
if rt.SizeHint < 0 {
rt.SizeHint = 0
for _, p := range params {
size := 0
for _, k := range p.Key {
if k == ParamCharacter || k == WildcardCharacter {
size++
}
}
if size > rt.SizeHint {
rt.SizeHint = size
}
}
}
for _, r := range statics {
rt.static[r.Key] = r.Value
}
if err := rt.param.build(params, 1, 0, make(map[int]struct{})); err != nil {
return err
}
return nil
}
// Param represents name and value of path parameter.
type Param struct {
Name string
Value string
}
// Params represents the name and value of path parameters.
type Params []Param
// Get gets the first value associated with the given name.
// If there are no values associated with the key, Get returns "".
func (ps Params) Get(name string) string {
for _, p := range ps {
if p.Name == name {
return p.Value
}
}
return ""
}
type doubleArray struct {
bc []baseCheck
node []*node
}
func newDoubleArray() *doubleArray {
return &doubleArray{
bc: []baseCheck{0},
node: []*node{nil}, // A start index is adjusting to 1 because 0 will be used as a mark of non-existent node.
}
}
// baseCheck contains BASE, CHECK and Extra flags.
// From the top, 22bits of BASE, 2bits of Extra flags and 8bits of CHECK.
//
// BASE (22bit) | Extra flags (2bit) | CHECK (8bit)
// |----------------------|--|--------|
// 32 10 8 0
type baseCheck uint32
func (bc baseCheck) Base() int {
return int(bc >> 10)
}
func (bc *baseCheck) SetBase(base int) {
*bc |= baseCheck(base) << 10
}
func (bc baseCheck) Check() byte {
return byte(bc)
}
func (bc *baseCheck) SetCheck(check byte) {
*bc |= baseCheck(check)
}
func (bc baseCheck) IsEmpty() bool {
return bc&0xfffffcff == 0
}
func (bc baseCheck) IsSingleParam() bool {
return bc&paramTypeSingle == paramTypeSingle
}
func (bc baseCheck) IsWildcardParam() bool {
return bc&paramTypeWildcard == paramTypeWildcard
}
func (bc baseCheck) IsAnyParam() bool {
return bc&paramTypeAny != 0
}
func (bc *baseCheck) SetSingleParam() {
*bc |= (1 << 8)
}
func (bc *baseCheck) SetWildcardParam() {
*bc |= (1 << 9)
}
const (
paramTypeSingle = 0x0100
paramTypeWildcard = 0x0200
paramTypeAny = 0x0300
)
func (da *doubleArray) lookup(path string, params []Param, idx int) (*node, []Param, bool) {
indices := make([]uint64, 0, 1)
for i := 0; i < len(path); i++ {
if da.bc[idx].IsAnyParam() {
indices = append(indices, (uint64(i)<<32)|(uint64(idx)&0xffffffff))
}
c := path[i]
if idx = nextIndex(da.bc[idx].Base(), c); idx >= len(da.bc) || da.bc[idx].Check() != c {
goto BACKTRACKING
}
}
if next := nextIndex(da.bc[idx].Base(), TerminationCharacter); next < len(da.bc) && da.bc[next].Check() == TerminationCharacter {
return da.node[da.bc[next].Base()], params, true
}
BACKTRACKING:
for j := len(indices) - 1; j >= 0; j-- {
i, idx := int(indices[j]>>32), int(indices[j]&0xffffffff)
if da.bc[idx].IsSingleParam() {
idx := nextIndex(da.bc[idx].Base(), ParamCharacter)
if idx >= len(da.bc) {
break
}
next := NextSeparator(path, i)
params := append(params, Param{Value: path[i:next]})
if nd, params, found := da.lookup(path[next:], params, idx); found {
return nd, params, true
}
}
if da.bc[idx].IsWildcardParam() {
idx := nextIndex(da.bc[idx].Base(), WildcardCharacter)
params := append(params, Param{Value: path[i:]})
return da.node[da.bc[idx].Base()], params, true
}
}
return nil, nil, false
}
// build builds double-array from records.
func (da *doubleArray) build(srcs []*record, idx, depth int, usedBase map[int]struct{}) error {
sort.Stable(recordSlice(srcs))
base, siblings, leaf, err := da.arrange(srcs, idx, depth, usedBase)
if err != nil {
return err
}
if leaf != nil {
nd, err := makeNode(leaf)
if err != nil {
return err
}
da.bc[idx].SetBase(len(da.node))
da.node = append(da.node, nd)
}
for _, sib := range siblings {
da.setCheck(nextIndex(base, sib.c), sib.c)
}
for _, sib := range siblings {
records := srcs[sib.start:sib.end]
switch sib.c {
case ParamCharacter:
for _, r := range records {
next := NextSeparator(r.Key, depth+1)
name := r.Key[depth+1 : next]
r.paramNames = append(r.paramNames, name)
r.Key = r.Key[next:]
}
da.bc[idx].SetSingleParam()
if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
return err
}
case WildcardCharacter:
r := records[0]
name := r.Key[depth+1 : len(r.Key)-1]
r.paramNames = append(r.paramNames, name)
r.Key = ""
da.bc[idx].SetWildcardParam()
if err := da.build(records, nextIndex(base, sib.c), 0, usedBase); err != nil {
return err
}
default:
if err := da.build(records, nextIndex(base, sib.c), depth+1, usedBase); err != nil {
return err
}
}
}
return nil
}
// setBase sets BASE.
func (da *doubleArray) setBase(i, base int) {
da.bc[i].SetBase(base)
}
// setCheck sets CHECK.
func (da *doubleArray) setCheck(i int, check byte) {
da.bc[i].SetCheck(check)
}
// findEmptyIndex returns an index of unused BASE/CHECK node.
func (da *doubleArray) findEmptyIndex(start int) int {
i := start
for ; i < len(da.bc); i++ {
if da.bc[i].IsEmpty() {
break
}
}
return i
}
// findBase returns good BASE.
func (da *doubleArray) findBase(siblings []sibling, start int, usedBase map[int]struct{}) (base int) {
for idx, firstChar := start+1, siblings[0].c; ; idx = da.findEmptyIndex(idx + 1) {
base = nextIndex(idx, firstChar)
if _, used := usedBase[base]; used {
continue
}
i := 0
for ; i < len(siblings); i++ {
next := nextIndex(base, siblings[i].c)
if len(da.bc) <= next {
da.bc = append(da.bc, make([]baseCheck, next-len(da.bc)+1)...)
}
if !da.bc[next].IsEmpty() {
break
}
}
if i == len(siblings) {
break
}
}
usedBase[base] = struct{}{}
return base
}
func (da *doubleArray) arrange(records []*record, idx, depth int, usedBase map[int]struct{}) (base int, siblings []sibling, leaf *record, err error) {
siblings, leaf, err = makeSiblings(records, depth)
if err != nil {
return -1, nil, nil, err
}
if len(siblings) < 1 {
return -1, nil, leaf, nil
}
base = da.findBase(siblings, idx, usedBase)
if base > MaxSize {
return -1, nil, nil, fmt.Errorf("denco: too many elements of internal slice")
}
da.setBase(idx, base)
return base, siblings, leaf, err
}
// node represents a node of Double-Array.
type node struct {
data interface{}
// Names of path parameters.
paramNames []string
}
// makeNode returns a new node from record.
func makeNode(r *record) (*node, error) {
dups := make(map[string]bool)
for _, name := range r.paramNames {
if dups[name] {
return nil, fmt.Errorf("denco: path parameter `%v' is duplicated in the key `%v'", name, r.Key)
}
dups[name] = true
}
return &node{data: r.Value, paramNames: r.paramNames}, nil
}
// sibling represents an intermediate data of build for Double-Array.
type sibling struct {
// An index of start of duplicated characters.
start int
// An index of end of duplicated characters.
end int
// A character of sibling.
c byte
}
// nextIndex returns a next index of array of BASE/CHECK.
func nextIndex(base int, c byte) int {
return base ^ int(c)
}
// makeSiblings returns slice of sibling.
func makeSiblings(records []*record, depth int) (sib []sibling, leaf *record, err error) {
var (
pc byte
n int
)
for i, r := range records {
if len(r.Key) <= depth {
leaf = r
continue
}
c := r.Key[depth]
switch {
case pc < c:
sib = append(sib, sibling{start: i, c: c})
case pc == c:
continue
default:
return nil, nil, fmt.Errorf("denco: BUG: routing table hasn't been sorted")
}
if n > 0 {
sib[n-1].end = i
}
pc = c
n++
}
if n == 0 {
return nil, leaf, nil
}
sib[n-1].end = len(records)
return sib, leaf, nil
}
// Record represents a record data for router construction.
type Record struct {
// Key for router construction.
Key string
// Result value for Key.
Value interface{}
}
// NewRecord returns a new Record.
func NewRecord(key string, value interface{}) Record {
return Record{
Key: key,
Value: value,
}
}
// record represents a record that use to build the Double-Array.
type record struct {
Record
paramNames []string
}
// makeRecords returns the records that use to build Double-Arrays.
func makeRecords(srcs []Record) (statics, params []*record) {
termChar := string(TerminationCharacter)
paramPrefix := string(SeparatorCharacter) + string(ParamCharacter)
wildcardPrefix := string(SeparatorCharacter) + string(WildcardCharacter)
restconfPrefix := string(PathParamCharacter) + string(ParamCharacter)
for _, r := range srcs {
if strings.Contains(r.Key, paramPrefix) || strings.Contains(r.Key, wildcardPrefix) ||strings.Contains(r.Key, restconfPrefix){
r.Key += termChar
params = append(params, &record{Record: r})
} else {
statics = append(statics, &record{Record: r})
}
}
return statics, params
}
// recordSlice represents a slice of Record for sort and implements the sort.Interface.
type recordSlice []*record
// Len implements the sort.Interface.Len.
func (rs recordSlice) Len() int {
return len(rs)
}
// Less implements the sort.Interface.Less.
func (rs recordSlice) Less(i, j int) bool {
return rs[i].Key < rs[j].Key
}
// Swap implements the sort.Interface.Swap.
func (rs recordSlice) Swap(i, j int) {
rs[i], rs[j] = rs[j], rs[i]
}

View File

@ -0,0 +1,106 @@
package denco
import (
"net/http"
)
// Mux represents a multiplexer for HTTP request.
type Mux struct{}
// NewMux returns a new Mux.
func NewMux() *Mux {
return &Mux{}
}
// GET is shorthand of Mux.Handler("GET", path, handler).
func (m *Mux) GET(path string, handler HandlerFunc) Handler {
return m.Handler("GET", path, handler)
}
// POST is shorthand of Mux.Handler("POST", path, handler).
func (m *Mux) POST(path string, handler HandlerFunc) Handler {
return m.Handler("POST", path, handler)
}
// PUT is shorthand of Mux.Handler("PUT", path, handler).
func (m *Mux) PUT(path string, handler HandlerFunc) Handler {
return m.Handler("PUT", path, handler)
}
// HEAD is shorthand of Mux.Handler("HEAD", path, handler).
func (m *Mux) HEAD(path string, handler HandlerFunc) Handler {
return m.Handler("HEAD", path, handler)
}
// Handler returns a handler for HTTP method.
func (m *Mux) Handler(method, path string, handler HandlerFunc) Handler {
return Handler{
Method: method,
Path: path,
Func: handler,
}
}
// Build builds a http.Handler.
func (m *Mux) Build(handlers []Handler) (http.Handler, error) {
recordMap := make(map[string][]Record)
for _, h := range handlers {
recordMap[h.Method] = append(recordMap[h.Method], NewRecord(h.Path, h.Func))
}
mux := newServeMux()
for m, records := range recordMap {
router := New()
if err := router.Build(records); err != nil {
return nil, err
}
mux.routers[m] = router
}
return mux, nil
}
// Handler represents a handler of HTTP request.
type Handler struct {
// Method is an HTTP method.
Method string
// Path is a routing path for handler.
Path string
// Func is a function of handler of HTTP request.
Func HandlerFunc
}
// The HandlerFunc type is aliased to type of handler function.
type HandlerFunc func(w http.ResponseWriter, r *http.Request, params Params)
type serveMux struct {
routers map[string]*Router
}
func newServeMux() *serveMux {
return &serveMux{
routers: make(map[string]*Router),
}
}
// ServeHTTP implements http.Handler interface.
func (mux *serveMux) ServeHTTP(w http.ResponseWriter, r *http.Request) {
handler, params := mux.handler(r.Method, r.URL.Path)
handler(w, r, params)
}
func (mux *serveMux) handler(method, path string) (HandlerFunc, []Param) {
if router, found := mux.routers[method]; found {
if handler, params, found := router.Lookup(path); found {
return handler.(HandlerFunc), params
}
}
return NotFound, nil
}
// NotFound replies to the request with an HTTP 404 not found error.
// NotFound is called when unknown HTTP method or a handler not found.
// If you want to use the your own NotFound handler, please overwrite this variable.
var NotFound = func(w http.ResponseWriter, r *http.Request, _ Params) {
http.NotFound(w, r)
}

View File

@ -0,0 +1,12 @@
package denco
// NextSeparator returns an index of next separator in path.
func NextSeparator(path string, start int) int {
for start < len(path) {
if c := path[start]; c == '/' || c == TerminationCharacter {
break
}
start++
}
return start
}

62
vendor/github.com/go-openapi/runtime/middleware/doc.go generated vendored Normal file
View File

@ -0,0 +1,62 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/*Package middleware provides the library with helper functions for serving swagger APIs.
Pseudo middleware handler
import (
"net/http"
"github.com/go-openapi/errors"
)
func newCompleteMiddleware(ctx *Context) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// use context to lookup routes
if matched, ok := ctx.RouteInfo(r); ok {
if matched.NeedsAuth() {
if _, err := ctx.Authorize(r, matched); err != nil {
ctx.Respond(rw, r, matched.Produces, matched, err)
return
}
}
bound, validation := ctx.BindAndValidate(r, matched)
if validation != nil {
ctx.Respond(rw, r, matched.Produces, matched, validation)
return
}
result, err := matched.Handler.Handle(bound)
if err != nil {
ctx.Respond(rw, r, matched.Produces, matched, err)
return
}
ctx.Respond(rw, r, matched.Produces, matched, result)
return
}
// Not found, check if it exists in the other methods first
if others := ctx.AllowedMethods(r); len(others) > 0 {
ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.MethodNotAllowed(r.Method, others))
return
}
ctx.Respond(rw, r, ctx.spec.RequiredProduces(), nil, errors.NotFound("path %s was not found", r.URL.Path))
})
}
*/
package middleware

View File

@ -0,0 +1,9 @@
// +build go1.8
package middleware
import "net/url"
func pathUnescape(path string) (string, error) {
return url.PathUnescape(path)
}

View File

@ -0,0 +1,329 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// this file was taken from the github.com/golang/gddo repository
// Package header provides functions for parsing HTTP headers.
package header
import (
"net/http"
"strings"
"time"
)
// Octet types from RFC 2616.
var octetTypes [256]octetType
type octetType byte
const (
isToken octetType = 1 << iota
isSpace
)
func init() {
// OCTET = <any 8-bit sequence of data>
// CHAR = <any US-ASCII character (octets 0 - 127)>
// CTL = <any US-ASCII control character (octets 0 - 31) and DEL (127)>
// CR = <US-ASCII CR, carriage return (13)>
// LF = <US-ASCII LF, linefeed (10)>
// SP = <US-ASCII SP, space (32)>
// HT = <US-ASCII HT, horizontal-tab (9)>
// <"> = <US-ASCII double-quote mark (34)>
// CRLF = CR LF
// LWS = [CRLF] 1*( SP | HT )
// TEXT = <any OCTET except CTLs, but including LWS>
// separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <">
// | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT
// token = 1*<any CHAR except CTLs or separators>
// qdtext = <any TEXT except <">>
for c := 0; c < 256; c++ {
var t octetType
isCtl := c <= 31 || c == 127
isChar := 0 <= c && c <= 127
isSeparator := strings.ContainsRune(" \t\"(),/:;<=>?@[]\\{}", rune(c))
if strings.ContainsRune(" \t\r\n", rune(c)) {
t |= isSpace
}
if isChar && !isCtl && !isSeparator {
t |= isToken
}
octetTypes[c] = t
}
}
// Copy returns a shallow copy of the header.
func Copy(header http.Header) http.Header {
h := make(http.Header)
for k, vs := range header {
h[k] = vs
}
return h
}
var timeLayouts = []string{"Mon, 02 Jan 2006 15:04:05 GMT", time.RFC850, time.ANSIC}
// ParseTime parses the header as time. The zero value is returned if the
// header is not present or there is an error parsing the
// header.
func ParseTime(header http.Header, key string) time.Time {
if s := header.Get(key); s != "" {
for _, layout := range timeLayouts {
if t, err := time.Parse(layout, s); err == nil {
return t.UTC()
}
}
}
return time.Time{}
}
// ParseList parses a comma separated list of values. Commas are ignored in
// quoted strings. Quoted values are not unescaped or unquoted. Whitespace is
// trimmed.
func ParseList(header http.Header, key string) []string {
var result []string
for _, s := range header[http.CanonicalHeaderKey(key)] {
begin := 0
end := 0
escape := false
quote := false
for i := 0; i < len(s); i++ {
b := s[i]
switch {
case escape:
escape = false
end = i + 1
case quote:
switch b {
case '\\':
escape = true
case '"':
quote = false
}
end = i + 1
case b == '"':
quote = true
end = i + 1
case octetTypes[b]&isSpace != 0:
if begin == end {
begin = i + 1
end = begin
}
case b == ',':
if begin < end {
result = append(result, s[begin:end])
}
begin = i + 1
end = begin
default:
end = i + 1
}
}
if begin < end {
result = append(result, s[begin:end])
}
}
return result
}
// ParseValueAndParams parses a comma separated list of values with optional
// semicolon separated name-value pairs. Content-Type and Content-Disposition
// headers are in this format.
func ParseValueAndParams(header http.Header, key string) (string, map[string]string) {
return parseValueAndParams(header.Get(key))
}
func parseValueAndParams(s string) (value string, params map[string]string) {
params = make(map[string]string)
value, s = expectTokenSlash(s)
if value == "" {
return
}
value = strings.ToLower(value)
s = skipSpace(s)
for strings.HasPrefix(s, ";") {
var pkey string
pkey, s = expectToken(skipSpace(s[1:]))
if pkey == "" {
return
}
if !strings.HasPrefix(s, "=") {
return
}
var pvalue string
pvalue, s = expectTokenOrQuoted(s[1:])
if pvalue == "" {
return
}
pkey = strings.ToLower(pkey)
params[pkey] = pvalue
s = skipSpace(s)
}
return
}
// AcceptSpec ...
type AcceptSpec struct {
Value string
Q float64
}
// ParseAccept2 ...
func ParseAccept2(header http.Header, key string) (specs []AcceptSpec) {
for _, en := range ParseList(header, key) {
v, p := parseValueAndParams(en)
var spec AcceptSpec
spec.Value = v
spec.Q = 1.0
if p != nil {
if q, ok := p["q"]; ok {
spec.Q, _ = expectQuality(q)
}
}
if spec.Q < 0.0 {
continue
}
specs = append(specs, spec)
}
return
}
// ParseAccept parses Accept* headers.
func ParseAccept(header http.Header, key string) (specs []AcceptSpec) {
loop:
for _, s := range header[key] {
for {
var spec AcceptSpec
spec.Value, s = expectTokenSlash(s)
if spec.Value == "" {
continue loop
}
spec.Q = 1.0
s = skipSpace(s)
if strings.HasPrefix(s, ";") {
s = skipSpace(s[1:])
for !strings.HasPrefix(s, "q=") && s != "" && !strings.HasPrefix(s, ",") {
s = skipSpace(s[1:])
}
if strings.HasPrefix(s, "q=") {
spec.Q, s = expectQuality(s[2:])
if spec.Q < 0.0 {
continue loop
}
}
}
specs = append(specs, spec)
s = skipSpace(s)
if !strings.HasPrefix(s, ",") {
continue loop
}
s = skipSpace(s[1:])
}
}
return
}
func skipSpace(s string) (rest string) {
i := 0
for ; i < len(s); i++ {
if octetTypes[s[i]]&isSpace == 0 {
break
}
}
return s[i:]
}
func expectToken(s string) (token, rest string) {
i := 0
for ; i < len(s); i++ {
if octetTypes[s[i]]&isToken == 0 {
break
}
}
return s[:i], s[i:]
}
func expectTokenSlash(s string) (token, rest string) {
i := 0
for ; i < len(s); i++ {
b := s[i]
if (octetTypes[b]&isToken == 0) && b != '/' {
break
}
}
return s[:i], s[i:]
}
func expectQuality(s string) (q float64, rest string) {
switch {
case len(s) == 0:
return -1, ""
case s[0] == '0':
// q is already 0
s = s[1:]
case s[0] == '1':
s = s[1:]
q = 1
case s[0] == '.':
// q is already 0
default:
return -1, ""
}
if !strings.HasPrefix(s, ".") {
return q, s
}
s = s[1:]
i := 0
n := 0
d := 1
for ; i < len(s); i++ {
b := s[i]
if b < '0' || b > '9' {
break
}
n = n*10 + int(b) - '0'
d *= 10
}
return q + float64(n)/float64(d), s[i:]
}
func expectTokenOrQuoted(s string) (value string, rest string) {
if !strings.HasPrefix(s, "\"") {
return expectToken(s)
}
s = s[1:]
for i := 0; i < len(s); i++ {
switch s[i] {
case '"':
return s[:i], s[i+1:]
case '\\':
p := make([]byte, len(s)-1)
j := copy(p, s[:i])
escape := true
for i = i + 1; i < len(s); i++ {
b := s[i]
switch {
case escape:
escape = false
p[j] = b
j++
case b == '\\':
escape = true
case b == '"':
return string(p[:j]), s[i+1:]
default:
p[j] = b
j++
}
}
return "", ""
}
}
return "", ""
}

View File

@ -0,0 +1,98 @@
// Copyright 2013 The Go Authors. All rights reserved.
//
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file or at
// https://developers.google.com/open-source/licenses/bsd.
// this file was taken from the github.com/golang/gddo repository
package middleware
import (
"net/http"
"strings"
"github.com/go-openapi/runtime/middleware/header"
)
// NegotiateContentEncoding returns the best offered content encoding for the
// request's Accept-Encoding header. If two offers match with equal weight and
// then the offer earlier in the list is preferred. If no offers are
// acceptable, then "" is returned.
func NegotiateContentEncoding(r *http.Request, offers []string) string {
bestOffer := "identity"
bestQ := -1.0
specs := header.ParseAccept(r.Header, "Accept-Encoding")
for _, offer := range offers {
for _, spec := range specs {
if spec.Q > bestQ &&
(spec.Value == "*" || spec.Value == offer) {
bestQ = spec.Q
bestOffer = offer
}
}
}
if bestQ == 0 {
bestOffer = ""
}
return bestOffer
}
// NegotiateContentType returns the best offered content type for the request's
// Accept header. If two offers match with equal weight, then the more specific
// offer is preferred. For example, text/* trumps */*. If two offers match
// with equal weight and specificity, then the offer earlier in the list is
// preferred. If no offers match, then defaultOffer is returned.
func NegotiateContentType(r *http.Request, offers []string, defaultOffer string) string {
bestOffer := defaultOffer
bestQ := -1.0
bestWild := 3
specs := header.ParseAccept(r.Header, "Accept")
for _, rawOffer := range offers {
offer := normalizeOffer(rawOffer)
// No Accept header: just return the first offer.
if len(specs) == 0 {
return rawOffer
}
for _, spec := range specs {
switch {
case spec.Q == 0.0:
// ignore
case spec.Q < bestQ:
// better match found
case spec.Value == "*/*":
if spec.Q > bestQ || bestWild > 2 {
bestQ = spec.Q
bestWild = 2
bestOffer = rawOffer
}
case strings.HasSuffix(spec.Value, "/*"):
if strings.HasPrefix(offer, spec.Value[:len(spec.Value)-1]) &&
(spec.Q > bestQ || bestWild > 1) {
bestQ = spec.Q
bestWild = 1
bestOffer = rawOffer
}
default:
if spec.Value == offer &&
(spec.Q > bestQ || bestWild > 0) {
bestQ = spec.Q
bestWild = 0
bestOffer = rawOffer
}
}
}
}
return bestOffer
}
func normalizeOffers(orig []string) (norm []string) {
for _, o := range orig {
norm = append(norm, normalizeOffer(o))
}
return
}
func normalizeOffer(orig string) string {
return strings.SplitN(orig, ";", 2)[0]
}

View File

@ -0,0 +1,67 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package middleware
import (
"net/http"
"github.com/go-openapi/runtime"
)
type errorResp struct {
code int
response interface{}
headers http.Header
}
func (e *errorResp) WriteResponse(rw http.ResponseWriter, producer runtime.Producer) {
for k, v := range e.headers {
for _, val := range v {
rw.Header().Add(k, val)
}
}
if e.code > 0 {
rw.WriteHeader(e.code)
} else {
rw.WriteHeader(http.StatusInternalServerError)
}
if err := producer.Produce(rw, e.response); err != nil {
Logger.Printf("failed to write error response: %v", err)
}
}
// NotImplemented the error response when the response is not implemented
func NotImplemented(message string) Responder {
return Error(http.StatusNotImplemented, message)
}
// Error creates a generic responder for returning errors, the data will be serialized
// with the matching producer for the request
func Error(code int, data interface{}, headers ...http.Header) Responder {
var hdr http.Header
for _, h := range headers {
for k, v := range h {
if hdr == nil {
hdr = make(http.Header)
}
hdr[k] = v
}
}
return &errorResp{
code: code,
response: data,
headers: hdr,
}
}

View File

@ -0,0 +1,30 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package middleware
import "net/http"
// NewOperationExecutor creates a context aware middleware that handles the operations after routing
func NewOperationExecutor(ctx *Context) http.Handler {
return http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
// use context to lookup routes
route, rCtx, _ := ctx.RouteInfo(r)
if rCtx != nil {
r = rCtx
}
route.Handler.ServeHTTP(rw, r)
})
}

View File

@ -0,0 +1,485 @@
// Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package middleware
import (
"encoding"
"encoding/base64"
"fmt"
"io"
"net/http"
"reflect"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/spec"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
"github.com/go-openapi/validate"
"github.com/go-openapi/runtime"
)
const defaultMaxMemory = 32 << 20
var textUnmarshalType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
func newUntypedParamBinder(param spec.Parameter, spec *spec.Swagger, formats strfmt.Registry) *untypedParamBinder {
binder := new(untypedParamBinder)
binder.Name = param.Name
binder.parameter = &param
binder.formats = formats
if param.In != "body" {
binder.validator = validate.NewParamValidator(&param, formats)
} else {
binder.validator = validate.NewSchemaValidator(param.Schema, spec, param.Name, formats)
}
return binder
}
type untypedParamBinder struct {
parameter *spec.Parameter
formats strfmt.Registry
Name string
validator validate.EntityValidator
}
func (p *untypedParamBinder) Type() reflect.Type {
return p.typeForSchema(p.parameter.Type, p.parameter.Format, p.parameter.Items)
}
func (p *untypedParamBinder) typeForSchema(tpe, format string, items *spec.Items) reflect.Type {
switch tpe {
case "boolean":
return reflect.TypeOf(true)
case "string":
if tt, ok := p.formats.GetType(format); ok {
return tt
}
return reflect.TypeOf("")
case "integer":
switch format {
case "int8":
return reflect.TypeOf(int8(0))
case "int16":
return reflect.TypeOf(int16(0))
case "int32":
return reflect.TypeOf(int32(0))
case "int64":
return reflect.TypeOf(int64(0))
default:
return reflect.TypeOf(int64(0))
}
case "number":
switch format {
case "float":
return reflect.TypeOf(float32(0))
case "double":
return reflect.TypeOf(float64(0))
}
case "array":
if items == nil {
return nil
}
itemsType := p.typeForSchema(items.Type, items.Format, items.Items)
if itemsType == nil {
return nil
}
return reflect.MakeSlice(reflect.SliceOf(itemsType), 0, 0).Type()
case "file":
return reflect.TypeOf(&runtime.File{}).Elem()
case "object":
return reflect.TypeOf(map[string]interface{}{})
}
return nil
}
func (p *untypedParamBinder) allowsMulti() bool {
return p.parameter.In == "query" || p.parameter.In == "formData"
}
func (p *untypedParamBinder) readValue(values runtime.Gettable, target reflect.Value) ([]string, bool, bool, error) {
name, in, cf, tpe := p.parameter.Name, p.parameter.In, p.parameter.CollectionFormat, p.parameter.Type
if tpe == "array" {
if cf == "multi" {
if !p.allowsMulti() {
return nil, false, false, errors.InvalidCollectionFormat(name, in, cf)
}
vv, hasKey, _ := values.GetOK(name)
return vv, false, hasKey, nil
}
v, hk, hv := values.GetOK(name)
if !hv {
return nil, false, hk, nil
}
d, c, e := p.readFormattedSliceFieldValue(v[len(v)-1], target)
return d, c, hk, e
}
vv, hk, _ := values.GetOK(name)
return vv, false, hk, nil
}
func (p *untypedParamBinder) Bind(request *http.Request, routeParams RouteParams, consumer runtime.Consumer, target reflect.Value) error {
// fmt.Println("binding", p.name, "as", p.Type())
switch p.parameter.In {
case "query":
data, custom, hasKey, err := p.readValue(runtime.Values(request.URL.Query()), target)
if err != nil {
return err
}
if custom {
return nil
}
return p.bindValue(data, hasKey, target)
case "header":
data, custom, hasKey, err := p.readValue(runtime.Values(request.Header), target)
if err != nil {
return err
}
if custom {
return nil
}
return p.bindValue(data, hasKey, target)
case "path":
data, custom, hasKey, err := p.readValue(routeParams, target)
if err != nil {
return err
}
if custom {
return nil
}
return p.bindValue(data, hasKey, target)
case "formData":
var err error
var mt string
mt, _, e := runtime.ContentType(request.Header)
if e != nil {
// because of the interface conversion go thinks the error is not nil
// so we first check for nil and then set the err var if it's not nil
err = e
}
if err != nil {
return errors.InvalidContentType("", []string{"multipart/form-data", "application/x-www-form-urlencoded"})
}
if mt != "multipart/form-data" && mt != "application/x-www-form-urlencoded" {
return errors.InvalidContentType(mt, []string{"multipart/form-data", "application/x-www-form-urlencoded"})
}
if mt == "multipart/form-data" {
if err = request.ParseMultipartForm(defaultMaxMemory); err != nil {
return errors.NewParseError(p.Name, p.parameter.In, "", err)
}
}
if err = request.ParseForm(); err != nil {
return errors.NewParseError(p.Name, p.parameter.In, "", err)
}
if p.parameter.Type == "file" {
file, header, ffErr := request.FormFile(p.parameter.Name)
if ffErr != nil {
if p.parameter.Required {
return errors.NewParseError(p.Name, p.parameter.In, "", ffErr)
} else {
return nil
}
}
target.Set(reflect.ValueOf(runtime.File{Data: file, Header: header}))
return nil
}
if request.MultipartForm != nil {
data, custom, hasKey, rvErr := p.readValue(runtime.Values(request.MultipartForm.Value), target)
if rvErr != nil {
return rvErr
}
if custom {
return nil
}
return p.bindValue(data, hasKey, target)
}
data, custom, hasKey, err := p.readValue(runtime.Values(request.PostForm), target)
if err != nil {
return err
}
if custom {
return nil
}
return p.bindValue(data, hasKey, target)
case "body":
newValue := reflect.New(target.Type())
if !runtime.HasBody(request) {
if p.parameter.Default != nil {
target.Set(reflect.ValueOf(p.parameter.Default))
}
return nil
}
if err := consumer.Consume(request.Body, newValue.Interface()); err != nil {
if err == io.EOF && p.parameter.Default != nil {
target.Set(reflect.ValueOf(p.parameter.Default))
return nil
}
tpe := p.parameter.Type
if p.parameter.Format != "" {
tpe = p.parameter.Format
}
return errors.InvalidType(p.Name, p.parameter.In, tpe, nil)
}
target.Set(reflect.Indirect(newValue))
return nil
default:
return errors.New(500, fmt.Sprintf("invalid parameter location %q", p.parameter.In))
}
}
func (p *untypedParamBinder) bindValue(data []string, hasKey bool, target reflect.Value) error {
if p.parameter.Type == "array" {
return p.setSliceFieldValue(target, p.parameter.Default, data, hasKey)
}
var d string
if len(data) > 0 {
d = data[len(data)-1]
}
return p.setFieldValue(target, p.parameter.Default, d, hasKey)
}
func (p *untypedParamBinder) setFieldValue(target reflect.Value, defaultValue interface{}, data string, hasKey bool) error {
tpe := p.parameter.Type
if p.parameter.Format != "" {
tpe = p.parameter.Format
}
if (!hasKey || (!p.parameter.AllowEmptyValue && data == "")) && p.parameter.Required && p.parameter.Default == nil {
return errors.Required(p.Name, p.parameter.In, data)
}
ok, err := p.tryUnmarshaler(target, defaultValue, data)
if err != nil {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if ok {
return nil
}
defVal := reflect.Zero(target.Type())
if defaultValue != nil {
defVal = reflect.ValueOf(defaultValue)
}
if tpe == "byte" {
if data == "" {
if target.CanSet() {
target.SetBytes(defVal.Bytes())
}
return nil
}
b, err := base64.StdEncoding.DecodeString(data)
if err != nil {
b, err = base64.URLEncoding.DecodeString(data)
if err != nil {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
}
if target.CanSet() {
target.SetBytes(b)
}
return nil
}
switch target.Kind() {
case reflect.Bool:
if data == "" {
if target.CanSet() {
target.SetBool(defVal.Bool())
}
return nil
}
b, err := swag.ConvertBool(data)
if err != nil {
return err
}
if target.CanSet() {
target.SetBool(b)
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if data == "" {
if target.CanSet() {
rd := defVal.Convert(reflect.TypeOf(int64(0)))
target.SetInt(rd.Int())
}
return nil
}
i, err := strconv.ParseInt(data, 10, 64)
if err != nil {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.OverflowInt(i) {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.CanSet() {
target.SetInt(i)
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
if data == "" {
if target.CanSet() {
rd := defVal.Convert(reflect.TypeOf(uint64(0)))
target.SetUint(rd.Uint())
}
return nil
}
u, err := strconv.ParseUint(data, 10, 64)
if err != nil {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.OverflowUint(u) {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.CanSet() {
target.SetUint(u)
}
case reflect.Float32, reflect.Float64:
if data == "" {
if target.CanSet() {
rd := defVal.Convert(reflect.TypeOf(float64(0)))
target.SetFloat(rd.Float())
}
return nil
}
f, err := strconv.ParseFloat(data, 64)
if err != nil {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.OverflowFloat(f) {
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
if target.CanSet() {
target.SetFloat(f)
}
case reflect.String:
value := data
if value == "" {
value = defVal.String()
}
// validate string
if target.CanSet() {
target.SetString(value)
}
case reflect.Ptr:
if data == "" && defVal.Kind() == reflect.Ptr {
if target.CanSet() {
target.Set(defVal)
}
return nil
}
newVal := reflect.New(target.Type().Elem())
if err := p.setFieldValue(reflect.Indirect(newVal), defVal, data, hasKey); err != nil {
return err
}
if target.CanSet() {
target.Set(newVal)
}
default:
return errors.InvalidType(p.Name, p.parameter.In, tpe, data)
}
return nil
}
func (p *untypedParamBinder) tryUnmarshaler(target reflect.Value, defaultValue interface{}, data string) (bool, error) {
if !target.CanSet() {
return false, nil
}
// When a type implements encoding.TextUnmarshaler we'll use that instead of reflecting some more
if reflect.PtrTo(target.Type()).Implements(textUnmarshalType) {
if defaultValue != nil && len(data) == 0 {
target.Set(reflect.ValueOf(defaultValue))
return true, nil
}
value := reflect.New(target.Type())
if err := value.Interface().(encoding.TextUnmarshaler).UnmarshalText([]byte(data)); err != nil {
return true, err
}
target.Set(reflect.Indirect(value))
return true, nil
}
return false, nil
}
func (p *untypedParamBinder) readFormattedSliceFieldValue(data string, target reflect.Value) ([]string, bool, error) {
ok, err := p.tryUnmarshaler(target, p.parameter.Default, data)
if err != nil {
return nil, true, err
}
if ok {
return nil, true, nil
}
return swag.SplitByFormat(data, p.parameter.CollectionFormat), false, nil
}
func (p *untypedParamBinder) setSliceFieldValue(target reflect.Value, defaultValue interface{}, data []string, hasKey bool) error {
sz := len(data)
if (!hasKey || (!p.parameter.AllowEmptyValue && (sz == 0 || (sz == 1 && data[0] == "")))) && p.parameter.Required && defaultValue == nil {
return errors.Required(p.Name, p.parameter.In, data)
}
defVal := reflect.Zero(target.Type())
if defaultValue != nil {
defVal = reflect.ValueOf(defaultValue)
}
if !target.CanSet() {
return nil
}
if sz == 0 {
target.Set(defVal)
return nil
}
value := reflect.MakeSlice(reflect.SliceOf(target.Type().Elem()), sz, sz)
for i := 0; i < sz; i++ {
if err := p.setFieldValue(value.Index(i), nil, data[i], hasKey); err != nil {
return err
}
}
target.Set(value)
return nil
}

Some files were not shown because too many files have changed in this diff Show More