[chore] Bump go swagger (#2871)

* bump go swagger version

* bump swagger version
This commit is contained in:
tobi
2024-04-26 11:31:10 +02:00
committed by GitHub
parent 3a369d834a
commit fd8a724e77
251 changed files with 10841 additions and 11896 deletions

View File

@ -1,12 +1,14 @@
linters-settings:
govet:
check-shadowing: true
golint:
min-confidence: 0
gocyclo:
min-complexity: 50
min-complexity: 45
maligned:
suggest-new: true
dupl:
threshold: 100
threshold: 200
goconst:
min-len: 2
min-occurrences: 3
@ -15,36 +17,45 @@ linters:
enable-all: true
disable:
- maligned
- unparam
- lll
- gochecknoinits
- gochecknoglobals
- funlen
- godox
- gocognit
- whitespace
- wsl
- funlen
- gochecknoglobals
- gochecknoinits
- scopelint
- wrapcheck
- exhaustivestruct
- exhaustive
- nlreturn
- testpackage
- gci
- gofumpt
- goerr113
- nlreturn
- gomnd
- tparallel
- exhaustivestruct
- goerr113
- errorlint
- nestif
- godot
- tparallel
- gofumpt
- paralleltest
- cyclop # because we have gocyclo already
# TODO: review the linters below. We disabled them to make the CI pass first.
- ireturn
- varnamelen
- forcetypeassert
- tparallel
- thelper
# Disable deprecated linters.
# They will be removed from golangci-lint in future.
- ifshort
- exhaustruct
- varnamelen
- gci
- depguard
- errchkjson
- inamedparam
- nonamedreturns
- musttag
- ireturn
- forcetypeassert
- cyclop
# deprecated linters
- deadcode
- interfacer
- golint
- scopelint
- varcheck
- structcheck
- golint
- nosnakecase

31
vendor/github.com/go-openapi/validate/BENCHMARK.md generated vendored Normal file
View File

@ -0,0 +1,31 @@
# Benchmark
Validating the Kubernetes Swagger API
## v0.22.6: 60,000,000 allocs
```
goos: linux
goarch: amd64
pkg: github.com/go-openapi/validate
cpu: AMD Ryzen 7 5800X 8-Core Processor
Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 8549863982 ns/op 7067424936 B/op 59583275 allocs/op
```
## After refact PR: minor but noticable improvements: 25,000,000 allocs
```
go test -bench Spec
goos: linux
goarch: amd64
pkg: github.com/go-openapi/validate
cpu: AMD Ryzen 7 5800X 8-Core Processor
Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 4064535557 ns/op 3379715592 B/op 25320330 allocs/op
```
## After reduce GC pressure PR: 17,000,000 allocs
```
goos: linux
goarch: amd64
pkg: github.com/go-openapi/validate
cpu: AMD Ryzen 7 5800X 8-Core Processor
Benchmark_KubernetesSpec/validating_kubernetes_API-16 1 3758414145 ns/op 2593881496 B/op 17111373 allocs/op
```

View File

@ -1,7 +1,5 @@
# Validation helpers
[![Build Status](https://travis-ci.org/go-openapi/validate.svg?branch=master)](https://travis-ci.org/go-openapi/validate)
[![Build status](https://ci.appveyor.com/api/projects/status/d6epy6vipueyh5fs/branch/master?svg=true)](https://ci.appveyor.com/project/fredbi/validate/branch/master)
[![codecov](https://codecov.io/gh/go-openapi/validate/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/validate)
# Validation helpers [![Build Status](https://github.com/go-openapi/validate/actions/workflows/go-test.yml/badge.svg)](https://github.com/go-openapi/validate/actions?query=workflow%3A"go+test") [![codecov](https://codecov.io/gh/go-openapi/validate/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/validate)
[![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/validate/master/LICENSE)
[![Go Reference](https://pkg.go.dev/badge/github.com/go-openapi/validate.svg)](https://pkg.go.dev/github.com/go-openapi/validate)
@ -24,7 +22,7 @@ Reference can be found here: https://github.com/OAI/OpenAPI-Specification/blob/m
* Minimum, Maximum, MultipleOf
* FormatOf
[Documentation](https://godoc.org/github.com/go-openapi/validate)
[Documentation](https://pkg.go.dev/github.com/go-openapi/validate)
## FAQ

View File

@ -1,32 +0,0 @@
version: "0.1.{build}"
clone_folder: C:\go-openapi\validate
shallow_clone: true # for startup speed
pull_requests:
do_not_increment_build_number: true
#skip_tags: true
#skip_branch_with_pr: true
# appveyor.yml
build: off
environment:
GOPATH: c:\gopath
stack: go 1.15
test_script:
- go test -v -timeout 20m -args -enable-long ./...
deploy: off
notifications:
- provider: Slack
incoming_webhook: https://hooks.slack.com/services/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
auth_token:
secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
channel: bots
on_build_success: false
on_build_failure: true
on_build_status_changed: true

View File

@ -25,48 +25,55 @@ import (
// According to Swagger spec, default values MUST validate their schema.
type defaultValidator struct {
SpecValidator *SpecValidator
visitedSchemas map[string]bool
visitedSchemas map[string]struct{}
schemaOptions *SchemaValidatorOptions
}
// resetVisited resets the internal state of visited schemas
func (d *defaultValidator) resetVisited() {
d.visitedSchemas = map[string]bool{}
if d.visitedSchemas == nil {
d.visitedSchemas = make(map[string]struct{})
return
}
// TODO(go1.21): clear(ex.visitedSchemas)
for k := range d.visitedSchemas {
delete(d.visitedSchemas, k)
}
}
func isVisited(path string, visitedSchemas map[string]bool) bool {
found := visitedSchemas[path]
if !found {
// search for overlapping paths
frags := strings.Split(path, ".")
if len(frags) < 2 {
// shortcut exit on smaller paths
return found
func isVisited(path string, visitedSchemas map[string]struct{}) bool {
_, found := visitedSchemas[path]
if found {
return true
}
// search for overlapping paths
var (
parent string
suffix string
)
for i := len(path) - 2; i >= 0; i-- {
r := path[i]
if r != '.' {
continue
}
last := len(frags) - 1
var currentFragStr, parent string
for i := range frags {
if i == 0 {
currentFragStr = frags[last]
} else {
currentFragStr = strings.Join([]string{frags[last-i], currentFragStr}, ".")
}
if i < last {
parent = strings.Join(frags[0:last-i], ".")
} else {
parent = ""
}
if strings.HasSuffix(parent, currentFragStr) {
found = true
break
}
parent = path[0:i]
suffix = path[i+1:]
if strings.HasSuffix(parent, suffix) {
return true
}
}
return found
return false
}
// beingVisited asserts a schema is being visited
func (d *defaultValidator) beingVisited(path string) {
d.visitedSchemas[path] = true
d.visitedSchemas[path] = struct{}{}
}
// isVisited tells if a path has already been visited
@ -75,8 +82,9 @@ func (d *defaultValidator) isVisited(path string) bool {
}
// Validate validates the default values declared in the swagger spec
func (d *defaultValidator) Validate() (errs *Result) {
errs = new(Result)
func (d *defaultValidator) Validate() *Result {
errs := pools.poolOfResults.BorrowResult() // will redeem when merged
if d == nil || d.SpecValidator == nil {
return errs
}
@ -89,7 +97,7 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
// every default value that is specified must validate against the schema for that property
// headers, items, parameters, schema
res := new(Result)
res := pools.poolOfResults.BorrowResult() // will redeem when merged
s := d.SpecValidator
for method, pathItem := range s.expandedAnalyzer().Operations() {
@ -107,10 +115,12 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
// default values provided must validate against their inline definition (no explicit schema)
if param.Default != nil && param.Schema == nil {
// check param default value is valid
red := NewParamValidator(&param, s.KnownFormats).Validate(param.Default) //#nosec
red := newParamValidator(&param, s.KnownFormats, d.schemaOptions).Validate(param.Default) //#nosec
if red.HasErrorsOrWarnings() {
res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -120,6 +130,8 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
if red.HasErrorsOrWarnings() {
res.AddErrors(defaultValueItemsDoesNotValidateMsg(param.Name, param.In))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -129,6 +141,8 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
if red.HasErrorsOrWarnings() {
res.AddErrors(defaultValueDoesNotValidateMsg(param.Name, param.In))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
}
@ -154,7 +168,7 @@ func (d *defaultValidator) validateDefaultValueValidAgainstSchema() *Result {
// reset explored schemas to get depth-first recursive-proof exploration
d.resetVisited()
for nm, sch := range s.spec.Spec().Definitions {
res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec
res.Merge(d.validateDefaultValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec
}
}
return res
@ -170,17 +184,18 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon
responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
// nolint: dupl
if response.Headers != nil { // Safeguard
for nm, h := range response.Headers {
// reset explored schemas to get depth-first recursive-proof exploration
d.resetVisited()
if h.Default != nil {
red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Default) //#nosec
red := newHeaderValidator(nm, &h, s.KnownFormats, d.schemaOptions).Validate(h.Default) //#nosec
if red.HasErrorsOrWarnings() {
res.AddErrors(defaultValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -190,6 +205,8 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon
if red.HasErrorsOrWarnings() {
res.AddErrors(defaultValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -209,6 +226,8 @@ func (d *defaultValidator) validateDefaultInResponse(resp *spec.Response, respon
// Additional message to make sure the context of the error is not lost
res.AddErrors(defaultValueInDoesNotValidateMsg(operationID, responseName))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
return res
@ -220,11 +239,13 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri
return nil
}
d.beingVisited(path)
res := new(Result)
res := pools.poolOfResults.BorrowResult()
s := d.SpecValidator
if schema.Default != nil {
res.Merge(NewSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Default))
res.Merge(
newSchemaValidator(schema, s.spec.Spec(), path+".default", s.KnownFormats, d.schemaOptions).Validate(schema.Default),
)
}
if schema.Items != nil {
if schema.Items.Schema != nil {
@ -242,7 +263,7 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri
}
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
// NOTE: we keep validating values, even though additionalItems is not supported by Swagger 2.0 (and 3.0 as well)
res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema))
res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema))
}
for propName, prop := range schema.Properties {
res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
@ -251,7 +272,7 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri
res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
}
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
res.Merge(d.validateDefaultValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema))
res.Merge(d.validateDefaultValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema))
}
if schema.AllOf != nil {
for i, aoSch := range schema.AllOf {
@ -262,13 +283,15 @@ func (d *defaultValidator) validateDefaultValueSchemaAgainstSchema(path, in stri
}
// TODO: Temporary duplicated code. Need to refactor with examples
// nolint: dupl
func (d *defaultValidator) validateDefaultValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
res := new(Result)
res := pools.poolOfResults.BorrowResult()
s := d.SpecValidator
if items != nil {
if items.Default != nil {
res.Merge(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Default))
res.Merge(
newItemsValidator(path, in, items, root, s.KnownFormats, d.schemaOptions).Validate(0, items.Default),
)
}
if items.Items != nil {
res.Merge(d.validateDefaultValueItemsAgainstSchema(path+"[0].default", in, root, items.Items))

View File

@ -19,7 +19,7 @@ as well as tools to validate data against their schema.
This package follows Swagger 2.0. specification (aka OpenAPI 2.0). Reference
can be found here: https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md.
Validating a specification
# Validating a specification
Validates a spec document (from JSON or YAML) against the JSON schema for swagger,
then checks a number of extra rules that can't be expressed in JSON schema.
@ -30,34 +30,36 @@ Entry points:
- SpecValidator.Validate()
Reported as errors:
[x] definition can't declare a property that's already defined by one of its ancestors
[x] definition's ancestor can't be a descendant of the same model
[x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method
[x] each security reference should contain only unique scopes
[x] each security scope in a security definition should be unique
[x] parameters in path must be unique
[x] each path parameter must correspond to a parameter placeholder and vice versa
[x] each referenceable definition must have references
[x] each definition property listed in the required array must be defined in the properties of the model
[x] each parameter should have a unique `name` and `type` combination
[x] each operation should have only 1 parameter of type body
[x] each reference must point to a valid object
[x] every default value that is specified must validate against the schema for that property
[x] items property is required for all schemas/definitions of type `array`
[x] path parameters must be declared a required
[x] headers must not contain $ref
[x] schema and property examples provided must validate against their respective object's schema
[x] examples provided must validate their schema
[x] definition can't declare a property that's already defined by one of its ancestors
[x] definition's ancestor can't be a descendant of the same model
[x] path uniqueness: each api path should be non-verbatim (account for path param names) unique per method. Validation can be laxed by disabling StrictPathParamUniqueness.
[x] each security reference should contain only unique scopes
[x] each security scope in a security definition should be unique
[x] parameters in path must be unique
[x] each path parameter must correspond to a parameter placeholder and vice versa
[x] each referenceable definition must have references
[x] each definition property listed in the required array must be defined in the properties of the model
[x] each parameter should have a unique `name` and `type` combination
[x] each operation should have only 1 parameter of type body
[x] each reference must point to a valid object
[x] every default value that is specified must validate against the schema for that property
[x] items property is required for all schemas/definitions of type `array`
[x] path parameters must be declared a required
[x] headers must not contain $ref
[x] schema and property examples provided must validate against their respective object's schema
[x] examples provided must validate their schema
Reported as warnings:
[x] path parameters should not contain any of [{,},\w]
[x] empty path
[x] unused definitions
[x] unsupported validation of examples on non-JSON media types
[x] examples in response without schema
[x] readOnly properties should not be required
Validating a schema
[x] path parameters should not contain any of [{,},\w]
[x] empty path
[x] unused definitions
[x] unsupported validation of examples on non-JSON media types
[x] examples in response without schema
[x] readOnly properties should not be required
# Validating a schema
The schema validation toolkit validates data against JSON-schema-draft 04 schema.
@ -70,16 +72,16 @@ Entry points:
- AgainstSchema()
- ...
Known limitations
# Known limitations
With the current version of this package, the following aspects of swagger are not yet supported:
[ ] errors and warnings are not reported with key/line number in spec
[ ] default values and examples on responses only support application/json producer type
[ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values
[ ] rules for collectionFormat are not implemented
[ ] no validation rule for polymorphism support (discriminator) [not done here]
[ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid
[ ] arbitrary large numbers are not supported: max is math.MaxFloat64
[ ] errors and warnings are not reported with key/line number in spec
[ ] default values and examples on responses only support application/json producer type
[ ] invalid numeric constraints (such as Minimum, etc..) are not checked except for default and example values
[ ] rules for collectionFormat are not implemented
[ ] no validation rule for polymorphism support (discriminator) [not done here]
[ ] valid js ECMA regexp not supported by Go regexp engine are considered invalid
[ ] arbitrary large numbers are not supported: max is math.MaxFloat64
*/
package validate

View File

@ -23,17 +23,27 @@ import (
// ExampleValidator validates example values defined in a spec
type exampleValidator struct {
SpecValidator *SpecValidator
visitedSchemas map[string]bool
visitedSchemas map[string]struct{}
schemaOptions *SchemaValidatorOptions
}
// resetVisited resets the internal state of visited schemas
func (ex *exampleValidator) resetVisited() {
ex.visitedSchemas = map[string]bool{}
if ex.visitedSchemas == nil {
ex.visitedSchemas = make(map[string]struct{})
return
}
// TODO(go1.21): clear(ex.visitedSchemas)
for k := range ex.visitedSchemas {
delete(ex.visitedSchemas, k)
}
}
// beingVisited asserts a schema is being visited
func (ex *exampleValidator) beingVisited(path string) {
ex.visitedSchemas[path] = true
ex.visitedSchemas[path] = struct{}{}
}
// isVisited tells if a path has already been visited
@ -48,9 +58,9 @@ func (ex *exampleValidator) isVisited(path string) bool {
// - schemas
// - individual property
// - responses
//
func (ex *exampleValidator) Validate() (errs *Result) {
errs = new(Result)
func (ex *exampleValidator) Validate() *Result {
errs := pools.poolOfResults.BorrowResult()
if ex == nil || ex.SpecValidator == nil {
return errs
}
@ -65,7 +75,7 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
// in: schemas, properties, object, items
// not in: headers, parameters without schema
res := new(Result)
res := pools.poolOfResults.BorrowResult()
s := ex.SpecValidator
for method, pathItem := range s.expandedAnalyzer().Operations() {
@ -83,10 +93,12 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
// default values provided must validate against their inline definition (no explicit schema)
if param.Example != nil && param.Schema == nil {
// check param default value is valid
red := NewParamValidator(&param, s.KnownFormats).Validate(param.Example) //#nosec
red := newParamValidator(&param, s.KnownFormats, ex.schemaOptions).Validate(param.Example) //#nosec
if red.HasErrorsOrWarnings() {
res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
res.MergeAsWarnings(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -96,6 +108,8 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
if red.HasErrorsOrWarnings() {
res.AddWarnings(exampleValueItemsDoesNotValidateMsg(param.Name, param.In))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -105,6 +119,8 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
if red.HasErrorsOrWarnings() {
res.AddWarnings(exampleValueDoesNotValidateMsg(param.Name, param.In))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
}
@ -130,7 +146,7 @@ func (ex *exampleValidator) validateExampleValueValidAgainstSchema() *Result {
// reset explored schemas to get depth-first recursive-proof exploration
ex.resetVisited()
for nm, sch := range s.spec.Spec().Definitions {
res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("definitions.%s", nm), "body", &sch)) //#nosec
res.Merge(ex.validateExampleValueSchemaAgainstSchema("definitions."+nm, "body", &sch)) //#nosec
}
}
return res
@ -146,17 +162,18 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo
responseName, responseCodeAsStr := responseHelp.responseMsgVariants(responseType, responseCode)
// nolint: dupl
if response.Headers != nil { // Safeguard
for nm, h := range response.Headers {
// reset explored schemas to get depth-first recursive-proof exploration
ex.resetVisited()
if h.Example != nil {
red := NewHeaderValidator(nm, &h, s.KnownFormats).Validate(h.Example) //#nosec
red := newHeaderValidator(nm, &h, s.KnownFormats, ex.schemaOptions).Validate(h.Example) //#nosec
if red.HasErrorsOrWarnings() {
res.AddWarnings(exampleValueHeaderDoesNotValidateMsg(operationID, nm, responseName))
res.MergeAsWarnings(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -166,6 +183,8 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo
if red.HasErrorsOrWarnings() {
res.AddWarnings(exampleValueHeaderItemsDoesNotValidateMsg(operationID, nm, responseName))
res.MergeAsWarnings(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
@ -185,13 +204,17 @@ func (ex *exampleValidator) validateExampleInResponse(resp *spec.Response, respo
// Additional message to make sure the context of the error is not lost
res.AddWarnings(exampleValueInDoesNotValidateMsg(operationID, responseName))
res.Merge(red)
} else if red.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(red)
}
}
if response.Examples != nil {
if response.Schema != nil {
if example, ok := response.Examples["application/json"]; ok {
res.MergeAsWarnings(NewSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, SwaggerSchema(true)).Validate(example))
res.MergeAsWarnings(
newSchemaValidator(response.Schema, s.spec.Spec(), path+".examples", s.KnownFormats, s.schemaOptions).Validate(example),
)
} else {
// TODO: validate other media types too
res.AddWarnings(examplesMimeNotSupportedMsg(operationID, responseName))
@ -210,10 +233,12 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str
}
ex.beingVisited(path)
s := ex.SpecValidator
res := new(Result)
res := pools.poolOfResults.BorrowResult()
if schema.Example != nil {
res.MergeAsWarnings(NewSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, SwaggerSchema(true)).Validate(schema.Example))
res.MergeAsWarnings(
newSchemaValidator(schema, s.spec.Spec(), path+".example", s.KnownFormats, ex.schemaOptions).Validate(schema.Example),
)
}
if schema.Items != nil {
if schema.Items.Schema != nil {
@ -231,7 +256,7 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str
}
if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
// NOTE: we keep validating values, even though additionalItems is unsupported in Swagger 2.0 (and 3.0 as well)
res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalItems", path), in, schema.AdditionalItems.Schema))
res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalItems", in, schema.AdditionalItems.Schema))
}
for propName, prop := range schema.Properties {
res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
@ -240,7 +265,7 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str
res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+"."+propName, in, &prop)) //#nosec
}
if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
res.Merge(ex.validateExampleValueSchemaAgainstSchema(fmt.Sprintf("%s.additionalProperties", path), in, schema.AdditionalProperties.Schema))
res.Merge(ex.validateExampleValueSchemaAgainstSchema(path+".additionalProperties", in, schema.AdditionalProperties.Schema))
}
if schema.AllOf != nil {
for i, aoSch := range schema.AllOf {
@ -251,13 +276,16 @@ func (ex *exampleValidator) validateExampleValueSchemaAgainstSchema(path, in str
}
// TODO: Temporary duplicated code. Need to refactor with examples
// nolint: dupl
//
func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in string, root interface{}, items *spec.Items) *Result {
res := new(Result)
res := pools.poolOfResults.BorrowResult()
s := ex.SpecValidator
if items != nil {
if items.Example != nil {
res.MergeAsWarnings(newItemsValidator(path, in, items, root, s.KnownFormats).Validate(0, items.Example))
res.MergeAsWarnings(
newItemsValidator(path, in, items, root, s.KnownFormats, ex.schemaOptions).Validate(0, items.Example),
)
}
if items.Items != nil {
res.Merge(ex.validateExampleValueItemsAgainstSchema(path+"[0].example", in, root, items.Items))
@ -266,5 +294,6 @@ func (ex *exampleValidator) validateExampleValueItemsAgainstSchema(path, in stri
res.AddErrors(invalidPatternInMsg(path, in, items.Pattern))
}
}
return res
}

View File

@ -22,10 +22,32 @@ import (
)
type formatValidator struct {
Format string
Path string
In string
Format string
KnownFormats strfmt.Registry
Options *SchemaValidatorOptions
}
func newFormatValidator(path, in, format string, formats strfmt.Registry, opts *SchemaValidatorOptions) *formatValidator {
if opts == nil {
opts = new(SchemaValidatorOptions)
}
var f *formatValidator
if opts.recycleValidators {
f = pools.poolOfFormatValidators.BorrowValidator()
} else {
f = new(formatValidator)
}
f.Path = path
f.In = in
f.Format = format
f.KnownFormats = formats
f.Options = opts
return f
}
func (f *formatValidator) SetPath(path string) {
@ -33,37 +55,45 @@ func (f *formatValidator) SetPath(path string) {
}
func (f *formatValidator) Applies(source interface{}, kind reflect.Kind) bool {
doit := func() bool {
if source == nil {
return false
}
switch source := source.(type) {
case *spec.Items:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Parameter:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Schema:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Header:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
}
if source == nil || f.KnownFormats == nil {
return false
}
switch source := source.(type) {
case *spec.Items:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Parameter:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Schema:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
case *spec.Header:
return kind == reflect.String && f.KnownFormats.ContainsName(source.Format)
default:
return false
}
r := doit()
debugLog("format validator for %q applies %t for %T (kind: %v)\n", f.Path, r, source, kind)
return r
}
func (f *formatValidator) Validate(val interface{}) *Result {
result := new(Result)
debugLog("validating \"%v\" against format: %s", val, f.Format)
if f.Options.recycleValidators {
defer func() {
f.redeem()
}()
}
var result *Result
if f.Options.recycleResult {
result = pools.poolOfResults.BorrowResult()
} else {
result = new(Result)
}
if err := FormatOf(f.Path, f.In, f.Format, val.(string), f.KnownFormats); err != nil {
result.AddErrors(err)
}
if result.HasErrors() {
return result
}
return nil
return result
}
func (f *formatValidator) redeem() {
pools.poolOfFormatValidators.RedeemValidator(f)
}

View File

@ -101,9 +101,17 @@ type errorHelper struct {
// A collection of unexported helpers for error construction
}
func (h *errorHelper) sErr(err errors.Error) *Result {
func (h *errorHelper) sErr(err errors.Error, recycle bool) *Result {
// Builds a Result from standard errors.Error
return &Result{Errors: []error{err}}
var result *Result
if recycle {
result = pools.poolOfResults.BorrowResult()
} else {
result = new(Result)
}
result.Errors = []error{err}
return result
}
func (h *errorHelper) addPointerError(res *Result, err error, ref string, fromPath string) *Result {
@ -157,7 +165,7 @@ func (h *valueHelper) asInt64(val interface{}) int64 {
// Number conversion function for int64, without error checking
// (implements an implicit type upgrade).
v := reflect.ValueOf(val)
switch v.Kind() {
switch v.Kind() { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
@ -174,7 +182,7 @@ func (h *valueHelper) asUint64(val interface{}) uint64 {
// Number conversion function for uint64, without error checking
// (implements an implicit type upgrade).
v := reflect.ValueOf(val)
switch v.Kind() {
switch v.Kind() { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return uint64(v.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
@ -192,7 +200,7 @@ func (h *valueHelper) asFloat64(val interface{}) float64 {
// Number conversion function for float64, without error checking
// (implements an implicit type upgrade).
v := reflect.ValueOf(val)
switch v.Kind() {
switch v.Kind() { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return float64(v.Int())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
@ -225,7 +233,7 @@ func (h *paramHelper) safeExpandedParamsFor(path, method, operationID string, re
operation.Parameters = resolvedParams
for _, ppr := range s.expandedAnalyzer().SafeParamsFor(method, path,
func(p spec.Parameter, err error) bool {
func(_ spec.Parameter, err error) bool {
// since params have already been expanded, there are few causes for error
res.AddErrors(someParametersBrokenMsg(path, method, operationID))
// original error from analyzer
@ -250,7 +258,7 @@ func (h *paramHelper) resolveParam(path, method, operationID string, param *spec
}
if err != nil { // Safeguard
// NOTE: we may enter enter here when the whole parameter is an unresolved $ref
// NOTE: we may enter here when the whole parameter is an unresolved $ref
refPath := strings.Join([]string{"\"" + path + "\"", method}, ".")
errorHelp.addPointerError(res, err, param.Ref.String(), refPath)
return nil, res
@ -306,6 +314,7 @@ func (r *responseHelper) expandResponseRef(
errorHelp.addPointerError(res, err, response.Ref.String(), path)
return nil, res
}
return response, res
}

View File

@ -15,8 +15,8 @@
package validate
import (
"fmt"
"reflect"
"regexp"
"strings"
"github.com/go-openapi/errors"
@ -35,62 +35,116 @@ type objectValidator struct {
PatternProperties map[string]spec.Schema
Root interface{}
KnownFormats strfmt.Registry
Options SchemaValidatorOptions
Options *SchemaValidatorOptions
splitPath []string
}
func newObjectValidator(path, in string,
maxProperties, minProperties *int64, required []string, properties spec.SchemaProperties,
additionalProperties *spec.SchemaOrBool, patternProperties spec.SchemaProperties,
root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *objectValidator {
if opts == nil {
opts = new(SchemaValidatorOptions)
}
var v *objectValidator
if opts.recycleValidators {
v = pools.poolOfObjectValidators.BorrowValidator()
} else {
v = new(objectValidator)
}
v.Path = path
v.In = in
v.MaxProperties = maxProperties
v.MinProperties = minProperties
v.Required = required
v.Properties = properties
v.AdditionalProperties = additionalProperties
v.PatternProperties = patternProperties
v.Root = root
v.KnownFormats = formats
v.Options = opts
v.splitPath = strings.Split(v.Path, ".")
return v
}
func (o *objectValidator) SetPath(path string) {
o.Path = path
o.splitPath = strings.Split(path, ".")
}
func (o *objectValidator) Applies(source interface{}, kind reflect.Kind) bool {
// TODO: this should also work for structs
// there is a problem in the type validator where it will be unhappy about null values
// so that requires more testing
r := reflect.TypeOf(source) == specSchemaType && (kind == reflect.Map || kind == reflect.Struct)
debugLog("object validator for %q applies %t for %T (kind: %v)\n", o.Path, r, source, kind)
return r
_, isSchema := source.(*spec.Schema)
return isSchema && (kind == reflect.Map || kind == reflect.Struct)
}
func (o *objectValidator) isProperties() bool {
p := strings.Split(o.Path, ".")
p := o.splitPath
return len(p) > 1 && p[len(p)-1] == jsonProperties && p[len(p)-2] != jsonProperties
}
func (o *objectValidator) isDefault() bool {
p := strings.Split(o.Path, ".")
p := o.splitPath
return len(p) > 1 && p[len(p)-1] == jsonDefault && p[len(p)-2] != jsonDefault
}
func (o *objectValidator) isExample() bool {
p := strings.Split(o.Path, ".")
p := o.splitPath
return len(p) > 1 && (p[len(p)-1] == swaggerExample || p[len(p)-1] == swaggerExamples) && p[len(p)-2] != swaggerExample
}
func (o *objectValidator) checkArrayMustHaveItems(res *Result, val map[string]interface{}) {
// for swagger 2.0 schemas, there is an additional constraint to have array items defined explicitly.
// with pure jsonschema draft 4, one may have arrays with undefined items (i.e. any type).
if t, typeFound := val[jsonType]; typeFound {
if tpe, ok := t.(string); ok && tpe == arrayType {
if item, itemsKeyFound := val[jsonItems]; !itemsKeyFound {
res.AddErrors(errors.Required(jsonItems, o.Path, item))
}
}
if val == nil {
return
}
t, typeFound := val[jsonType]
if !typeFound {
return
}
tpe, isString := t.(string)
if !isString || tpe != arrayType {
return
}
item, itemsKeyFound := val[jsonItems]
if itemsKeyFound {
return
}
res.AddErrors(errors.Required(jsonItems, o.Path, item))
}
func (o *objectValidator) checkItemsMustBeTypeArray(res *Result, val map[string]interface{}) {
if !o.isProperties() && !o.isDefault() && !o.isExample() {
if _, itemsKeyFound := val[jsonItems]; itemsKeyFound {
t, typeFound := val[jsonType]
if typeFound {
if tpe, ok := t.(string); !ok || tpe != arrayType {
res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil))
}
} else {
// there is no type
res.AddErrors(errors.Required(jsonType, o.Path, t))
}
}
if val == nil {
return
}
if o.isProperties() || o.isDefault() || o.isExample() {
return
}
_, itemsKeyFound := val[jsonItems]
if !itemsKeyFound {
return
}
t, typeFound := val[jsonType]
if !typeFound {
// there is no type
res.AddErrors(errors.Required(jsonType, o.Path, t))
}
if tpe, isString := t.(string); !isString || tpe != arrayType {
res.AddErrors(errors.InvalidType(o.Path, o.In, arrayType, nil))
}
}
@ -104,176 +158,274 @@ func (o *objectValidator) precheck(res *Result, val map[string]interface{}) {
}
func (o *objectValidator) Validate(data interface{}) *Result {
val := data.(map[string]interface{})
// TODO: guard against nil data
if o.Options.recycleValidators {
defer func() {
o.redeem()
}()
}
var val map[string]interface{}
if data != nil {
var ok bool
val, ok = data.(map[string]interface{})
if !ok {
return errorHelp.sErr(invalidObjectMsg(o.Path, o.In), o.Options.recycleResult)
}
}
numKeys := int64(len(val))
if o.MinProperties != nil && numKeys < *o.MinProperties {
return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties))
return errorHelp.sErr(errors.TooFewProperties(o.Path, o.In, *o.MinProperties), o.Options.recycleResult)
}
if o.MaxProperties != nil && numKeys > *o.MaxProperties {
return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties))
return errorHelp.sErr(errors.TooManyProperties(o.Path, o.In, *o.MaxProperties), o.Options.recycleResult)
}
res := new(Result)
var res *Result
if o.Options.recycleResult {
res = pools.poolOfResults.BorrowResult()
} else {
res = new(Result)
}
o.precheck(res, val)
// check validity of field names
if o.AdditionalProperties != nil && !o.AdditionalProperties.Allows {
// Case: additionalProperties: false
for k := range val {
_, regularProperty := o.Properties[k]
matched := false
for pk := range o.PatternProperties {
if matches, _ := regexp.MatchString(pk, k); matches {
matched = true
break
}
}
if !regularProperty && k != "$schema" && k != "id" && !matched {
// Special properties "$schema" and "id" are ignored
res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k))
// BUG(fredbi): This section should move to a part dedicated to spec validation as
// it will conflict with regular schemas where a property "headers" is defined.
//
// Croaks a more explicit message on top of the standard one
// on some recognized cases.
//
// NOTE: edge cases with invalid type assertion are simply ignored here.
// NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered
// by higher level callers (the IMPORTANT! tag will be eventually
// removed).
if k == "headers" && val[k] != nil {
// $ref is forbidden in header
if headers, mapOk := val[k].(map[string]interface{}); mapOk {
for headerKey, headerBody := range headers {
if headerBody != nil {
if headerSchema, mapOfMapOk := headerBody.(map[string]interface{}); mapOfMapOk {
if _, found := headerSchema["$ref"]; found {
var msg string
if refString, stringOk := headerSchema["$ref"].(string); stringOk {
msg = strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "")
}
res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg))
}
}
}
}
}
/*
case "$ref":
if val[k] != nil {
// TODO: check context of that ref: warn about siblings, check against invalid context
}
*/
}
}
}
o.validateNoAdditionalProperties(val, res)
} else {
// Cases: no additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <<schema>> }
for key, value := range val {
_, regularProperty := o.Properties[key]
// Validates property against "patternProperties" if applicable
// BUG(fredbi): succeededOnce is always false
// NOTE: how about regular properties which do not match patternProperties?
matched, succeededOnce, _ := o.validatePatternProperty(key, value, res)
if !(regularProperty || matched || succeededOnce) {
// Cases: properties which are not regular properties and have not been matched by the PatternProperties validator
if o.AdditionalProperties != nil && o.AdditionalProperties.Schema != nil {
// AdditionalProperties as Schema
r := NewSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value)
res.mergeForField(data.(map[string]interface{}), key, r)
} else if regularProperty && !(matched || succeededOnce) {
// TODO: this is dead code since regularProperty=false here
res.AddErrors(errors.FailedAllPatternProperties(o.Path, o.In, key))
}
}
}
// Valid cases: additionalProperties: true or undefined
// Cases: empty additionalProperties (implying: true), or additionalProperties: true, or additionalProperties: { <<schema>> }
o.validateAdditionalProperties(val, res)
}
createdFromDefaults := map[string]bool{}
// Property types:
// - regular Property
for pName := range o.Properties {
pSchema := o.Properties[pName] // one instance per iteration
rName := pName
if o.Path != "" {
rName = o.Path + "." + pName
}
// Recursively validates each property against its schema
if v, ok := val[pName]; ok {
r := NewSchemaValidator(&pSchema, o.Root, rName, o.KnownFormats, o.Options.Options()...).Validate(v)
res.mergeForField(data.(map[string]interface{}), pName, r)
} else if pSchema.Default != nil {
// If a default value is defined, creates the property from defaults
// NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does.
createdFromDefaults[pName] = true
res.addPropertySchemata(data.(map[string]interface{}), pName, &pSchema)
}
}
// Check required properties
if len(o.Required) > 0 {
for _, k := range o.Required {
if v, ok := val[k]; !ok && !createdFromDefaults[k] {
res.AddErrors(errors.Required(o.Path+"."+k, o.In, v))
continue
}
}
}
o.validatePropertiesSchema(val, res)
// Check patternProperties
// TODO: it looks like we have done that twice in many cases
for key, value := range val {
_, regularProperty := o.Properties[key]
matched, _ /*succeededOnce*/, patterns := o.validatePatternProperty(key, value, res)
if !regularProperty && (matched /*|| succeededOnce*/) {
for _, pName := range patterns {
if v, ok := o.PatternProperties[pName]; ok {
r := NewSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...).Validate(value)
res.mergeForField(data.(map[string]interface{}), key, r)
}
matched, _, patterns := o.validatePatternProperty(key, value, res) // applies to regular properties as well
if regularProperty || !matched {
continue
}
for _, pName := range patterns {
if v, ok := o.PatternProperties[pName]; ok {
r := newSchemaValidator(&v, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value)
res.mergeForField(data.(map[string]interface{}), key, r)
}
}
}
return res
}
func (o *objectValidator) validateNoAdditionalProperties(val map[string]interface{}, res *Result) {
for k := range val {
if k == "$schema" || k == "id" {
// special properties "$schema" and "id" are ignored
continue
}
_, regularProperty := o.Properties[k]
if regularProperty {
continue
}
matched := false
for pk := range o.PatternProperties {
re, err := compileRegexp(pk)
if err != nil {
continue
}
if matches := re.MatchString(k); matches {
matched = true
break
}
}
if matched {
continue
}
res.AddErrors(errors.PropertyNotAllowed(o.Path, o.In, k))
// BUG(fredbi): This section should move to a part dedicated to spec validation as
// it will conflict with regular schemas where a property "headers" is defined.
//
// Croaks a more explicit message on top of the standard one
// on some recognized cases.
//
// NOTE: edge cases with invalid type assertion are simply ignored here.
// NOTE: prefix your messages here by "IMPORTANT!" so there are not filtered
// by higher level callers (the IMPORTANT! tag will be eventually
// removed).
if k != "headers" || val[k] == nil {
continue
}
// $ref is forbidden in header
headers, mapOk := val[k].(map[string]interface{})
if !mapOk {
continue
}
for headerKey, headerBody := range headers {
if headerBody == nil {
continue
}
headerSchema, mapOfMapOk := headerBody.(map[string]interface{})
if !mapOfMapOk {
continue
}
_, found := headerSchema["$ref"]
if !found {
continue
}
refString, stringOk := headerSchema["$ref"].(string)
if !stringOk {
continue
}
msg := strings.Join([]string{", one may not use $ref=\":", refString, "\""}, "")
res.AddErrors(refNotAllowedInHeaderMsg(o.Path, headerKey, msg))
/*
case "$ref":
if val[k] != nil {
// TODO: check context of that ref: warn about siblings, check against invalid context
}
*/
}
}
}
func (o *objectValidator) validateAdditionalProperties(val map[string]interface{}, res *Result) {
for key, value := range val {
_, regularProperty := o.Properties[key]
if regularProperty {
continue
}
// Validates property against "patternProperties" if applicable
// BUG(fredbi): succeededOnce is always false
// NOTE: how about regular properties which do not match patternProperties?
matched, succeededOnce, _ := o.validatePatternProperty(key, value, res)
if matched || succeededOnce {
continue
}
if o.AdditionalProperties == nil || o.AdditionalProperties.Schema == nil {
continue
}
// Cases: properties which are not regular properties and have not been matched by the PatternProperties validator
// AdditionalProperties as Schema
r := newSchemaValidator(o.AdditionalProperties.Schema, o.Root, o.Path+"."+key, o.KnownFormats, o.Options).Validate(value)
res.mergeForField(val, key, r)
}
// Valid cases: additionalProperties: true or undefined
}
func (o *objectValidator) validatePropertiesSchema(val map[string]interface{}, res *Result) {
createdFromDefaults := map[string]struct{}{}
// Property types:
// - regular Property
pSchema := pools.poolOfSchemas.BorrowSchema() // recycle a spec.Schema object which lifespan extends only to the validation of properties
defer func() {
pools.poolOfSchemas.RedeemSchema(pSchema)
}()
for pName := range o.Properties {
*pSchema = o.Properties[pName]
var rName string
if o.Path == "" {
rName = pName
} else {
rName = o.Path + "." + pName
}
// Recursively validates each property against its schema
v, ok := val[pName]
if ok {
r := newSchemaValidator(pSchema, o.Root, rName, o.KnownFormats, o.Options).Validate(v)
res.mergeForField(val, pName, r)
continue
}
if pSchema.Default != nil {
// if a default value is defined, creates the property from defaults
// NOTE: JSON schema does not enforce default values to be valid against schema. Swagger does.
createdFromDefaults[pName] = struct{}{}
if !o.Options.skipSchemataResult {
res.addPropertySchemata(val, pName, pSchema) // this shallow-clones the content of the pSchema pointer
}
}
}
if len(o.Required) == 0 {
return
}
// Check required properties
for _, k := range o.Required {
v, ok := val[k]
if ok {
continue
}
_, isCreatedFromDefaults := createdFromDefaults[k]
if isCreatedFromDefaults {
continue
}
res.AddErrors(errors.Required(fmt.Sprintf("%s.%s", o.Path, k), o.In, v))
}
}
// TODO: succeededOnce is not used anywhere
func (o *objectValidator) validatePatternProperty(key string, value interface{}, result *Result) (bool, bool, []string) {
matched := false
succeededOnce := false
var patterns []string
for k, schema := range o.PatternProperties {
sch := schema
if match, _ := regexp.MatchString(k, key); match {
patterns = append(patterns, k)
matched = true
validator := NewSchemaValidator(&sch, o.Root, o.Path+"."+key, o.KnownFormats, o.Options.Options()...)
res := validator.Validate(value)
result.Merge(res)
}
if len(o.PatternProperties) == 0 {
return false, false, nil
}
// BUG(fredbi): can't get to here. Should remove dead code (commented out).
matched := false
succeededOnce := false
patterns := make([]string, 0, len(o.PatternProperties))
// if succeededOnce {
// result.Inc()
// }
schema := pools.poolOfSchemas.BorrowSchema()
defer func() {
pools.poolOfSchemas.RedeemSchema(schema)
}()
for k := range o.PatternProperties {
re, err := compileRegexp(k)
if err != nil {
continue
}
match := re.MatchString(key)
if !match {
continue
}
*schema = o.PatternProperties[k]
patterns = append(patterns, k)
matched = true
validator := newSchemaValidator(schema, o.Root, fmt.Sprintf("%s.%s", o.Path, key), o.KnownFormats, o.Options)
res := validator.Validate(value)
result.Merge(res)
}
return matched, succeededOnce, patterns
}
func (o *objectValidator) redeem() {
pools.poolOfObjectValidators.RedeemValidator(o)
}

View File

@ -21,10 +21,29 @@ import "sync"
// NOTE: other options might be needed, for example a go-swagger specific mode.
type Opts struct {
ContinueOnErrors bool // true: continue reporting errors, even if spec is invalid
// StrictPathParamUniqueness enables a strict validation of paths that include
// path parameters. When true, it will enforce that for each method, the path
// is unique, regardless of path parameters such that GET:/petstore/{id} and
// GET:/petstore/{pet} anre considered duplicate paths.
//
// Consider disabling if path parameters can include slashes such as
// GET:/v1/{shelve} and GET:/v1/{book}, where the IDs are "shelve/*" and
// /"shelve/*/book/*" respectively.
StrictPathParamUniqueness bool
SkipSchemataResult bool
}
var (
defaultOpts = Opts{ContinueOnErrors: false} // default is to stop validation on errors
defaultOpts = Opts{
// default is to stop validation on errors
ContinueOnErrors: false,
// StrictPathParamUniqueness is defaulted to true. This maintains existing
// behavior.
StrictPathParamUniqueness: true,
}
defaultOptsMutex = &sync.Mutex{}
)

366
vendor/github.com/go-openapi/validate/pools.go generated vendored Normal file
View File

@ -0,0 +1,366 @@
//go:build !validatedebug
package validate
import (
"sync"
"github.com/go-openapi/spec"
)
var pools allPools
func init() {
resetPools()
}
func resetPools() {
// NOTE: for testing purpose, we might want to reset pools after calling Validate twice.
// The pool is corrupted in that case: calling Put twice inserts a duplicate in the pool
// and further calls to Get are mishandled.
pools = allPools{
poolOfSchemaValidators: schemaValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &SchemaValidator{}
return s
},
},
},
poolOfObjectValidators: objectValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &objectValidator{}
return s
},
},
},
poolOfSliceValidators: sliceValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &schemaSliceValidator{}
return s
},
},
},
poolOfItemsValidators: itemsValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &itemsValidator{}
return s
},
},
},
poolOfBasicCommonValidators: basicCommonValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &basicCommonValidator{}
return s
},
},
},
poolOfHeaderValidators: headerValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &HeaderValidator{}
return s
},
},
},
poolOfParamValidators: paramValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &ParamValidator{}
return s
},
},
},
poolOfBasicSliceValidators: basicSliceValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &basicSliceValidator{}
return s
},
},
},
poolOfNumberValidators: numberValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &numberValidator{}
return s
},
},
},
poolOfStringValidators: stringValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &stringValidator{}
return s
},
},
},
poolOfSchemaPropsValidators: schemaPropsValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &schemaPropsValidator{}
return s
},
},
},
poolOfFormatValidators: formatValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &formatValidator{}
return s
},
},
},
poolOfTypeValidators: typeValidatorsPool{
Pool: &sync.Pool{
New: func() any {
s := &typeValidator{}
return s
},
},
},
poolOfSchemas: schemasPool{
Pool: &sync.Pool{
New: func() any {
s := &spec.Schema{}
return s
},
},
},
poolOfResults: resultsPool{
Pool: &sync.Pool{
New: func() any {
s := &Result{}
return s
},
},
},
}
}
type (
allPools struct {
// memory pools for all validator objects.
//
// Each pool can be borrowed from and redeemed to.
poolOfSchemaValidators schemaValidatorsPool
poolOfObjectValidators objectValidatorsPool
poolOfSliceValidators sliceValidatorsPool
poolOfItemsValidators itemsValidatorsPool
poolOfBasicCommonValidators basicCommonValidatorsPool
poolOfHeaderValidators headerValidatorsPool
poolOfParamValidators paramValidatorsPool
poolOfBasicSliceValidators basicSliceValidatorsPool
poolOfNumberValidators numberValidatorsPool
poolOfStringValidators stringValidatorsPool
poolOfSchemaPropsValidators schemaPropsValidatorsPool
poolOfFormatValidators formatValidatorsPool
poolOfTypeValidators typeValidatorsPool
poolOfSchemas schemasPool
poolOfResults resultsPool
}
schemaValidatorsPool struct {
*sync.Pool
}
objectValidatorsPool struct {
*sync.Pool
}
sliceValidatorsPool struct {
*sync.Pool
}
itemsValidatorsPool struct {
*sync.Pool
}
basicCommonValidatorsPool struct {
*sync.Pool
}
headerValidatorsPool struct {
*sync.Pool
}
paramValidatorsPool struct {
*sync.Pool
}
basicSliceValidatorsPool struct {
*sync.Pool
}
numberValidatorsPool struct {
*sync.Pool
}
stringValidatorsPool struct {
*sync.Pool
}
schemaPropsValidatorsPool struct {
*sync.Pool
}
formatValidatorsPool struct {
*sync.Pool
}
typeValidatorsPool struct {
*sync.Pool
}
schemasPool struct {
*sync.Pool
}
resultsPool struct {
*sync.Pool
}
)
func (p schemaValidatorsPool) BorrowValidator() *SchemaValidator {
return p.Get().(*SchemaValidator)
}
func (p schemaValidatorsPool) RedeemValidator(s *SchemaValidator) {
// NOTE: s might be nil. In that case, Put is a noop.
p.Put(s)
}
func (p objectValidatorsPool) BorrowValidator() *objectValidator {
return p.Get().(*objectValidator)
}
func (p objectValidatorsPool) RedeemValidator(s *objectValidator) {
p.Put(s)
}
func (p sliceValidatorsPool) BorrowValidator() *schemaSliceValidator {
return p.Get().(*schemaSliceValidator)
}
func (p sliceValidatorsPool) RedeemValidator(s *schemaSliceValidator) {
p.Put(s)
}
func (p itemsValidatorsPool) BorrowValidator() *itemsValidator {
return p.Get().(*itemsValidator)
}
func (p itemsValidatorsPool) RedeemValidator(s *itemsValidator) {
p.Put(s)
}
func (p basicCommonValidatorsPool) BorrowValidator() *basicCommonValidator {
return p.Get().(*basicCommonValidator)
}
func (p basicCommonValidatorsPool) RedeemValidator(s *basicCommonValidator) {
p.Put(s)
}
func (p headerValidatorsPool) BorrowValidator() *HeaderValidator {
return p.Get().(*HeaderValidator)
}
func (p headerValidatorsPool) RedeemValidator(s *HeaderValidator) {
p.Put(s)
}
func (p paramValidatorsPool) BorrowValidator() *ParamValidator {
return p.Get().(*ParamValidator)
}
func (p paramValidatorsPool) RedeemValidator(s *ParamValidator) {
p.Put(s)
}
func (p basicSliceValidatorsPool) BorrowValidator() *basicSliceValidator {
return p.Get().(*basicSliceValidator)
}
func (p basicSliceValidatorsPool) RedeemValidator(s *basicSliceValidator) {
p.Put(s)
}
func (p numberValidatorsPool) BorrowValidator() *numberValidator {
return p.Get().(*numberValidator)
}
func (p numberValidatorsPool) RedeemValidator(s *numberValidator) {
p.Put(s)
}
func (p stringValidatorsPool) BorrowValidator() *stringValidator {
return p.Get().(*stringValidator)
}
func (p stringValidatorsPool) RedeemValidator(s *stringValidator) {
p.Put(s)
}
func (p schemaPropsValidatorsPool) BorrowValidator() *schemaPropsValidator {
return p.Get().(*schemaPropsValidator)
}
func (p schemaPropsValidatorsPool) RedeemValidator(s *schemaPropsValidator) {
p.Put(s)
}
func (p formatValidatorsPool) BorrowValidator() *formatValidator {
return p.Get().(*formatValidator)
}
func (p formatValidatorsPool) RedeemValidator(s *formatValidator) {
p.Put(s)
}
func (p typeValidatorsPool) BorrowValidator() *typeValidator {
return p.Get().(*typeValidator)
}
func (p typeValidatorsPool) RedeemValidator(s *typeValidator) {
p.Put(s)
}
func (p schemasPool) BorrowSchema() *spec.Schema {
return p.Get().(*spec.Schema)
}
func (p schemasPool) RedeemSchema(s *spec.Schema) {
p.Put(s)
}
func (p resultsPool) BorrowResult() *Result {
return p.Get().(*Result).cleared()
}
func (p resultsPool) RedeemResult(s *Result) {
if s == emptyResult {
return
}
p.Put(s)
}

1012
vendor/github.com/go-openapi/validate/pools_debug.go generated vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@ -15,7 +15,7 @@
package validate
import (
"fmt"
stderrors "errors"
"reflect"
"strings"
@ -23,6 +23,8 @@ import (
"github.com/go-openapi/spec"
)
var emptyResult = &Result{MatchCount: 1}
// Result represents a validation result set, composed of
// errors and warnings.
//
@ -50,8 +52,10 @@ type Result struct {
// Schemata for slice items
itemSchemata []itemSchemata
cachedFieldSchemta map[FieldKey][]*spec.Schema
cachedItemSchemata map[ItemKey][]*spec.Schema
cachedFieldSchemata map[FieldKey][]*spec.Schema
cachedItemSchemata map[ItemKey][]*spec.Schema
wantsRedeemOnMerge bool
}
// FieldKey is a pair of an object and a field, usable as a key for a map.
@ -116,6 +120,9 @@ func (r *Result) Merge(others ...*Result) *Result {
}
r.mergeWithoutRootSchemata(other)
r.rootObjectSchemata.Append(other.rootObjectSchemata)
if other.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(other)
}
}
return r
}
@ -132,10 +139,9 @@ func (r *Result) RootObjectSchemata() []*spec.Schema {
}
// FieldSchemata returns the schemata which apply to fields in objects.
// nolint: dupl
func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema {
if r.cachedFieldSchemta != nil {
return r.cachedFieldSchemta
if r.cachedFieldSchemata != nil {
return r.cachedFieldSchemata
}
ret := make(map[FieldKey][]*spec.Schema, len(r.fieldSchemata))
@ -147,12 +153,12 @@ func (r *Result) FieldSchemata() map[FieldKey][]*spec.Schema {
ret[key] = append(ret[key], fs.schemata.multiple...)
}
}
r.cachedFieldSchemta = ret
r.cachedFieldSchemata = ret
return ret
}
// ItemSchemata returns the schemata which apply to items in slices.
// nolint: dupl
func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema {
if r.cachedItemSchemata != nil {
return r.cachedItemSchemata
@ -172,12 +178,13 @@ func (r *Result) ItemSchemata() map[ItemKey][]*spec.Schema {
}
func (r *Result) resetCaches() {
r.cachedFieldSchemta = nil
r.cachedFieldSchemata = nil
r.cachedItemSchemata = nil
}
// mergeForField merges other into r, assigning other's root schemata to the given Object and field name.
// nolint: unparam
//
//nolint:unparam
func (r *Result) mergeForField(obj map[string]interface{}, field string, other *Result) *Result {
if other == nil {
return r
@ -188,18 +195,23 @@ func (r *Result) mergeForField(obj map[string]interface{}, field string, other *
if r.fieldSchemata == nil {
r.fieldSchemata = make([]fieldSchemata, len(obj))
}
// clone other schemata, as other is about to be redeemed to the pool
r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{
obj: obj,
field: field,
schemata: other.rootObjectSchemata,
schemata: other.rootObjectSchemata.Clone(),
})
}
if other.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(other)
}
return r
}
// mergeForSlice merges other into r, assigning other's root schemata to the given slice and index.
// nolint: unparam
//
//nolint:unparam
func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Result {
if other == nil {
return r
@ -210,29 +222,38 @@ func (r *Result) mergeForSlice(slice reflect.Value, i int, other *Result) *Resul
if r.itemSchemata == nil {
r.itemSchemata = make([]itemSchemata, slice.Len())
}
// clone other schemata, as other is about to be redeemed to the pool
r.itemSchemata = append(r.itemSchemata, itemSchemata{
slice: slice,
index: i,
schemata: other.rootObjectSchemata,
schemata: other.rootObjectSchemata.Clone(),
})
}
if other.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(other)
}
return r
}
// addRootObjectSchemata adds the given schemata for the root object of the result.
// The slice schemata might be reused. I.e. do not modify it after being added to a result.
//
// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result.
func (r *Result) addRootObjectSchemata(s *spec.Schema) {
r.rootObjectSchemata.Append(schemata{one: s})
clone := *s
r.rootObjectSchemata.Append(schemata{one: &clone})
}
// addPropertySchemata adds the given schemata for the object and field.
// The slice schemata might be reused. I.e. do not modify it after being added to a result.
//
// Since the slice schemata might be reused, it is shallow-cloned before saving it into the result.
func (r *Result) addPropertySchemata(obj map[string]interface{}, fld string, schema *spec.Schema) {
if r.fieldSchemata == nil {
r.fieldSchemata = make([]fieldSchemata, 0, len(obj))
}
r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: schema}})
clone := *schema
r.fieldSchemata = append(r.fieldSchemata, fieldSchemata{obj: obj, field: fld, schemata: schemata{one: &clone}})
}
/*
@ -255,17 +276,21 @@ func (r *Result) mergeWithoutRootSchemata(other *Result) {
if other.fieldSchemata != nil {
if r.fieldSchemata == nil {
r.fieldSchemata = other.fieldSchemata
} else {
r.fieldSchemata = append(r.fieldSchemata, other.fieldSchemata...)
r.fieldSchemata = make([]fieldSchemata, 0, len(other.fieldSchemata))
}
for _, field := range other.fieldSchemata {
field.schemata = field.schemata.Clone()
r.fieldSchemata = append(r.fieldSchemata, field)
}
}
if other.itemSchemata != nil {
if r.itemSchemata == nil {
r.itemSchemata = other.itemSchemata
} else {
r.itemSchemata = append(r.itemSchemata, other.itemSchemata...)
r.itemSchemata = make([]itemSchemata, 0, len(other.itemSchemata))
}
for _, field := range other.itemSchemata {
field.schemata = field.schemata.Clone()
r.itemSchemata = append(r.itemSchemata, field)
}
}
}
@ -280,6 +305,9 @@ func (r *Result) MergeAsErrors(others ...*Result) *Result {
r.AddErrors(other.Errors...)
r.AddErrors(other.Warnings...)
r.MatchCount += other.MatchCount
if other.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(other)
}
}
}
return r
@ -295,6 +323,9 @@ func (r *Result) MergeAsWarnings(others ...*Result) *Result {
r.AddWarnings(other.Errors...)
r.AddWarnings(other.Warnings...)
r.MatchCount += other.MatchCount
if other.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(other)
}
}
}
return r
@ -356,16 +387,21 @@ func (r *Result) keepRelevantErrors() *Result {
strippedErrors := []error{}
for _, e := range r.Errors {
if strings.HasPrefix(e.Error(), "IMPORTANT!") {
strippedErrors = append(strippedErrors, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
strippedErrors = append(strippedErrors, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
}
}
strippedWarnings := []error{}
for _, e := range r.Warnings {
if strings.HasPrefix(e.Error(), "IMPORTANT!") {
strippedWarnings = append(strippedWarnings, fmt.Errorf(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
strippedWarnings = append(strippedWarnings, stderrors.New(strings.TrimPrefix(e.Error(), "IMPORTANT!")))
}
}
strippedResult := new(Result)
var strippedResult *Result
if r.wantsRedeemOnMerge {
strippedResult = pools.poolOfResults.BorrowResult()
} else {
strippedResult = new(Result)
}
strippedResult.Errors = strippedErrors
strippedResult.Warnings = strippedWarnings
return strippedResult
@ -427,6 +463,27 @@ func (r *Result) AsError() error {
return errors.CompositeValidationError(r.Errors...)
}
func (r *Result) cleared() *Result {
// clear the Result to be reusable. Keep allocated capacity.
r.Errors = r.Errors[:0]
r.Warnings = r.Warnings[:0]
r.MatchCount = 0
r.data = nil
r.rootObjectSchemata.one = nil
r.rootObjectSchemata.multiple = r.rootObjectSchemata.multiple[:0]
r.fieldSchemata = r.fieldSchemata[:0]
r.itemSchemata = r.itemSchemata[:0]
for k := range r.cachedFieldSchemata {
delete(r.cachedFieldSchemata, k)
}
for k := range r.cachedItemSchemata {
delete(r.cachedItemSchemata, k)
}
r.wantsRedeemOnMerge = true // mark this result as eligible for redeem when merged into another
return r
}
// schemata is an arbitrary number of schemata. It does a distinction between zero,
// one and many schemata to avoid slice allocations.
type schemata struct {
@ -453,7 +510,7 @@ func (s *schemata) Slice() []*spec.Schema {
return s.multiple
}
// appendSchemata appends the schemata in other to s. It mutated s in-place.
// appendSchemata appends the schemata in other to s. It mutates s in-place.
func (s *schemata) Append(other schemata) {
if other.one == nil && len(other.multiple) == 0 {
return
@ -484,3 +541,23 @@ func (s *schemata) Append(other schemata) {
}
}
}
func (s schemata) Clone() schemata {
var clone schemata
if s.one != nil {
clone.one = new(spec.Schema)
*clone.one = *s.one
}
if len(s.multiple) > 0 {
clone.multiple = make([]*spec.Schema, len(s.multiple))
for idx := 0; idx < len(s.multiple); idx++ {
sp := new(spec.Schema)
*sp = *s.multiple[idx]
clone.multiple[idx] = sp
}
}
return clone
}

View File

@ -24,32 +24,32 @@ import (
"github.com/go-openapi/swag"
)
var (
specSchemaType = reflect.TypeOf(&spec.Schema{})
specParameterType = reflect.TypeOf(&spec.Parameter{})
specHeaderType = reflect.TypeOf(&spec.Header{})
// specItemsType = reflect.TypeOf(&spec.Items{})
)
// SchemaValidator validates data against a JSON schema
type SchemaValidator struct {
Path string
in string
Schema *spec.Schema
validators []valueValidator
validators [8]valueValidator
Root interface{}
KnownFormats strfmt.Registry
Options SchemaValidatorOptions
Options *SchemaValidatorOptions
}
// AgainstSchema validates the specified data against the provided schema, using a registry of supported formats.
//
// When no pre-parsed *spec.Schema structure is provided, it uses a JSON schema as default. See example.
func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registry, options ...Option) error {
res := NewSchemaValidator(schema, nil, "", formats, options...).Validate(data)
res := NewSchemaValidator(schema, nil, "", formats,
append(options, WithRecycleValidators(true), withRecycleResults(true))...,
).Validate(data)
defer func() {
pools.poolOfResults.RedeemResult(res)
}()
if res.HasErrors() {
return errors.CompositeValidationError(res.Errors...)
}
return nil
}
@ -57,6 +57,15 @@ func AgainstSchema(schema *spec.Schema, data interface{}, formats strfmt.Registr
//
// Panics if the provided schema is invalid.
func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, options ...Option) *SchemaValidator {
opts := new(SchemaValidatorOptions)
for _, o := range options {
o(opts)
}
return newSchemaValidator(schema, rootSchema, root, formats, opts)
}
func newSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string, formats strfmt.Registry, opts *SchemaValidatorOptions) *SchemaValidator {
if schema == nil {
return nil
}
@ -72,17 +81,26 @@ func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string
panic(msg)
}
}
s := SchemaValidator{
Path: root,
in: "body",
Schema: schema,
Root: rootSchema,
KnownFormats: formats,
Options: SchemaValidatorOptions{}}
for _, o := range options {
o(&s.Options)
if opts == nil {
opts = new(SchemaValidatorOptions)
}
s.validators = []valueValidator{
var s *SchemaValidator
if opts.recycleValidators {
s = pools.poolOfSchemaValidators.BorrowValidator()
} else {
s = new(SchemaValidator)
}
s.Path = root
s.in = "body"
s.Schema = schema
s.Root = rootSchema
s.Options = opts
s.KnownFormats = formats
s.validators = [8]valueValidator{
s.typeValidator(),
s.schemaPropsValidator(),
s.stringValidator(),
@ -92,7 +110,8 @@ func NewSchemaValidator(schema *spec.Schema, rootSchema interface{}, root string
s.commonValidator(),
s.objectValidator(),
}
return &s
return s
}
// SetPath sets the path for this schema valdiator
@ -101,24 +120,46 @@ func (s *SchemaValidator) SetPath(path string) {
}
// Applies returns true when this schema validator applies
func (s *SchemaValidator) Applies(source interface{}, kind reflect.Kind) bool {
func (s *SchemaValidator) Applies(source interface{}, _ reflect.Kind) bool {
_, ok := source.(*spec.Schema)
return ok
}
// Validate validates the data against the schema
func (s *SchemaValidator) Validate(data interface{}) *Result {
result := &Result{data: data}
if s == nil {
return result
return emptyResult
}
if s.Schema != nil {
if s.Options.recycleValidators {
defer func() {
s.redeemChildren()
s.redeem() // one-time use validator
}()
}
var result *Result
if s.Options.recycleResult {
result = pools.poolOfResults.BorrowResult()
result.data = data
} else {
result = &Result{data: data}
}
if s.Schema != nil && !s.Options.skipSchemataResult {
result.addRootObjectSchemata(s.Schema)
}
if data == nil {
// early exit with minimal validation
result.Merge(s.validators[0].Validate(data)) // type validator
result.Merge(s.validators[6].Validate(data)) // common validator
if s.Options.recycleValidators {
s.validators[0] = nil
s.validators[6] = nil
}
return result
}
@ -147,6 +188,7 @@ func (s *SchemaValidator) Validate(data interface{}) *Result {
if erri != nil {
result.AddErrors(invalidTypeConversionMsg(s.Path, erri))
result.Inc()
return result
}
d = in
@ -155,6 +197,7 @@ func (s *SchemaValidator) Validate(data interface{}) *Result {
if errf != nil {
result.AddErrors(invalidTypeConversionMsg(s.Path, errf))
result.Inc()
return result
}
d = nf
@ -164,14 +207,26 @@ func (s *SchemaValidator) Validate(data interface{}) *Result {
kind = tpe.Kind()
}
for _, v := range s.validators {
for idx, v := range s.validators {
if !v.Applies(s.Schema, kind) {
debugLog("%T does not apply for %v", v, kind)
if s.Options.recycleValidators {
// Validate won't be called, so relinquish this validator
if redeemableChildren, ok := v.(interface{ redeemChildren() }); ok {
redeemableChildren.redeemChildren()
}
if redeemable, ok := v.(interface{ redeem() }); ok {
redeemable.redeem()
}
s.validators[idx] = nil // prevents further (unsafe) usage
}
continue
}
err := v.Validate(d)
result.Merge(err)
result.Merge(v.Validate(d))
if s.Options.recycleValidators {
s.validators[idx] = nil // prevents further (unsafe) usage
}
result.Inc()
}
result.Inc()
@ -180,81 +235,120 @@ func (s *SchemaValidator) Validate(data interface{}) *Result {
}
func (s *SchemaValidator) typeValidator() valueValidator {
return &typeValidator{Type: s.Schema.Type, Nullable: s.Schema.Nullable, Format: s.Schema.Format, In: s.in, Path: s.Path}
return newTypeValidator(
s.Path,
s.in,
s.Schema.Type,
s.Schema.Nullable,
s.Schema.Format,
s.Options,
)
}
func (s *SchemaValidator) commonValidator() valueValidator {
return &basicCommonValidator{
Path: s.Path,
In: s.in,
Enum: s.Schema.Enum,
}
return newBasicCommonValidator(
s.Path,
s.in,
s.Schema.Default,
s.Schema.Enum,
s.Options,
)
}
func (s *SchemaValidator) sliceValidator() valueValidator {
return &schemaSliceValidator{
Path: s.Path,
In: s.in,
MaxItems: s.Schema.MaxItems,
MinItems: s.Schema.MinItems,
UniqueItems: s.Schema.UniqueItems,
AdditionalItems: s.Schema.AdditionalItems,
Items: s.Schema.Items,
Root: s.Root,
KnownFormats: s.KnownFormats,
Options: s.Options,
}
return newSliceValidator(
s.Path,
s.in,
s.Schema.MaxItems,
s.Schema.MinItems,
s.Schema.UniqueItems,
s.Schema.AdditionalItems,
s.Schema.Items,
s.Root,
s.KnownFormats,
s.Options,
)
}
func (s *SchemaValidator) numberValidator() valueValidator {
return &numberValidator{
Path: s.Path,
In: s.in,
Default: s.Schema.Default,
MultipleOf: s.Schema.MultipleOf,
Maximum: s.Schema.Maximum,
ExclusiveMaximum: s.Schema.ExclusiveMaximum,
Minimum: s.Schema.Minimum,
ExclusiveMinimum: s.Schema.ExclusiveMinimum,
}
return newNumberValidator(
s.Path,
s.in,
s.Schema.Default,
s.Schema.MultipleOf,
s.Schema.Maximum,
s.Schema.ExclusiveMaximum,
s.Schema.Minimum,
s.Schema.ExclusiveMinimum,
"",
"",
s.Options,
)
}
func (s *SchemaValidator) stringValidator() valueValidator {
return &stringValidator{
Path: s.Path,
In: s.in,
MaxLength: s.Schema.MaxLength,
MinLength: s.Schema.MinLength,
Pattern: s.Schema.Pattern,
}
return newStringValidator(
s.Path,
s.in,
nil,
false,
false,
s.Schema.MaxLength,
s.Schema.MinLength,
s.Schema.Pattern,
s.Options,
)
}
func (s *SchemaValidator) formatValidator() valueValidator {
return &formatValidator{
Path: s.Path,
In: s.in,
Format: s.Schema.Format,
KnownFormats: s.KnownFormats,
}
return newFormatValidator(
s.Path,
s.in,
s.Schema.Format,
s.KnownFormats,
s.Options,
)
}
func (s *SchemaValidator) schemaPropsValidator() valueValidator {
sch := s.Schema
return newSchemaPropsValidator(s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats, s.Options.Options()...)
return newSchemaPropsValidator(
s.Path, s.in, sch.AllOf, sch.OneOf, sch.AnyOf, sch.Not, sch.Dependencies, s.Root, s.KnownFormats,
s.Options,
)
}
func (s *SchemaValidator) objectValidator() valueValidator {
return &objectValidator{
Path: s.Path,
In: s.in,
MaxProperties: s.Schema.MaxProperties,
MinProperties: s.Schema.MinProperties,
Required: s.Schema.Required,
Properties: s.Schema.Properties,
AdditionalProperties: s.Schema.AdditionalProperties,
PatternProperties: s.Schema.PatternProperties,
Root: s.Root,
KnownFormats: s.KnownFormats,
Options: s.Options,
return newObjectValidator(
s.Path,
s.in,
s.Schema.MaxProperties,
s.Schema.MinProperties,
s.Schema.Required,
s.Schema.Properties,
s.Schema.AdditionalProperties,
s.Schema.PatternProperties,
s.Root,
s.KnownFormats,
s.Options,
)
}
func (s *SchemaValidator) redeem() {
pools.poolOfSchemaValidators.RedeemValidator(s)
}
func (s *SchemaValidator) redeemChildren() {
for i, validator := range s.validators {
if validator == nil {
continue
}
if redeemableChildren, ok := validator.(interface{ redeemChildren() }); ok {
redeemableChildren.redeemChildren()
}
if redeemable, ok := validator.(interface{ redeem() }); ok {
redeemable.redeem()
}
s.validators[i] = nil // free up allocated children if not in pool
}
}

View File

@ -18,6 +18,9 @@ package validate
type SchemaValidatorOptions struct {
EnableObjectArrayTypeCheck bool
EnableArrayMustHaveItemsCheck bool
recycleValidators bool
recycleResult bool
skipSchemataResult bool
}
// Option sets optional rules for schema validation
@ -45,10 +48,36 @@ func SwaggerSchema(enable bool) Option {
}
}
// Options returns current options
// WithRecycleValidators saves memory allocations and makes validators
// available for a single use of Validate() only.
//
// When a validator is recycled, called MUST not call the Validate() method twice.
func WithRecycleValidators(enable bool) Option {
return func(svo *SchemaValidatorOptions) {
svo.recycleValidators = enable
}
}
func withRecycleResults(enable bool) Option {
return func(svo *SchemaValidatorOptions) {
svo.recycleResult = enable
}
}
// WithSkipSchemataResult skips the deep audit payload stored in validation Result
func WithSkipSchemataResult(enable bool) Option {
return func(svo *SchemaValidatorOptions) {
svo.skipSchemataResult = enable
}
}
// Options returns the current set of options
func (svo SchemaValidatorOptions) Options() []Option {
return []Option{
EnableObjectArrayTypeCheck(svo.EnableObjectArrayTypeCheck),
EnableArrayMustHaveItemsCheck(svo.EnableArrayMustHaveItemsCheck),
WithRecycleValidators(svo.recycleValidators),
withRecycleResults(svo.recycleResult),
WithSkipSchemataResult(svo.skipSchemataResult),
}
}

View File

@ -30,211 +30,327 @@ type schemaPropsValidator struct {
AnyOf []spec.Schema
Not *spec.Schema
Dependencies spec.Dependencies
anyOfValidators []SchemaValidator
allOfValidators []SchemaValidator
oneOfValidators []SchemaValidator
anyOfValidators []*SchemaValidator
allOfValidators []*SchemaValidator
oneOfValidators []*SchemaValidator
notValidator *SchemaValidator
Root interface{}
KnownFormats strfmt.Registry
Options SchemaValidatorOptions
Options *SchemaValidatorOptions
}
func (s *schemaPropsValidator) SetPath(path string) {
s.Path = path
}
func newSchemaPropsValidator(path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry, options ...Option) *schemaPropsValidator {
anyValidators := make([]SchemaValidator, 0, len(anyOf))
for _, v := range anyOf {
v := v
anyValidators = append(anyValidators, *NewSchemaValidator(&v, root, path, formats, options...))
func newSchemaPropsValidator(
path string, in string, allOf, oneOf, anyOf []spec.Schema, not *spec.Schema, deps spec.Dependencies, root interface{}, formats strfmt.Registry,
opts *SchemaValidatorOptions) *schemaPropsValidator {
if opts == nil {
opts = new(SchemaValidatorOptions)
}
allValidators := make([]SchemaValidator, 0, len(allOf))
for _, v := range allOf {
v := v
allValidators = append(allValidators, *NewSchemaValidator(&v, root, path, formats, options...))
anyValidators := make([]*SchemaValidator, 0, len(anyOf))
for i := range anyOf {
anyValidators = append(anyValidators, newSchemaValidator(&anyOf[i], root, path, formats, opts))
}
oneValidators := make([]SchemaValidator, 0, len(oneOf))
for _, v := range oneOf {
v := v
oneValidators = append(oneValidators, *NewSchemaValidator(&v, root, path, formats, options...))
allValidators := make([]*SchemaValidator, 0, len(allOf))
for i := range allOf {
allValidators = append(allValidators, newSchemaValidator(&allOf[i], root, path, formats, opts))
}
oneValidators := make([]*SchemaValidator, 0, len(oneOf))
for i := range oneOf {
oneValidators = append(oneValidators, newSchemaValidator(&oneOf[i], root, path, formats, opts))
}
var notValidator *SchemaValidator
if not != nil {
notValidator = NewSchemaValidator(not, root, path, formats, options...)
notValidator = newSchemaValidator(not, root, path, formats, opts)
}
schOptions := &SchemaValidatorOptions{}
for _, o := range options {
o(schOptions)
}
return &schemaPropsValidator{
Path: path,
In: in,
AllOf: allOf,
OneOf: oneOf,
AnyOf: anyOf,
Not: not,
Dependencies: deps,
anyOfValidators: anyValidators,
allOfValidators: allValidators,
oneOfValidators: oneValidators,
notValidator: notValidator,
Root: root,
KnownFormats: formats,
Options: *schOptions,
var s *schemaPropsValidator
if opts.recycleValidators {
s = pools.poolOfSchemaPropsValidators.BorrowValidator()
} else {
s = new(schemaPropsValidator)
}
s.Path = path
s.In = in
s.AllOf = allOf
s.OneOf = oneOf
s.AnyOf = anyOf
s.Not = not
s.Dependencies = deps
s.anyOfValidators = anyValidators
s.allOfValidators = allValidators
s.oneOfValidators = oneValidators
s.notValidator = notValidator
s.Root = root
s.KnownFormats = formats
s.Options = opts
return s
}
func (s *schemaPropsValidator) Applies(source interface{}, kind reflect.Kind) bool {
r := reflect.TypeOf(source) == specSchemaType
debugLog("schema props validator for %q applies %t for %T (kind: %v)\n", s.Path, r, source, kind)
return r
func (s *schemaPropsValidator) Applies(source interface{}, _ reflect.Kind) bool {
_, isSchema := source.(*spec.Schema)
return isSchema
}
func (s *schemaPropsValidator) Validate(data interface{}) *Result {
mainResult := new(Result)
var mainResult *Result
if s.Options.recycleResult {
mainResult = pools.poolOfResults.BorrowResult()
} else {
mainResult = new(Result)
}
// Intermediary error results
// IMPORTANT! messages from underlying validators
keepResultAnyOf := new(Result)
keepResultOneOf := new(Result)
keepResultAllOf := new(Result)
var keepResultAnyOf, keepResultOneOf, keepResultAllOf *Result
if s.Options.recycleValidators {
defer func() {
s.redeemChildren()
s.redeem()
// results are redeemed when merged
}()
}
// Validates at least one in anyOf schemas
var firstSuccess *Result
if len(s.anyOfValidators) > 0 {
var bestFailures *Result
succeededOnce := false
for _, anyOfSchema := range s.anyOfValidators {
result := anyOfSchema.Validate(data)
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultAnyOf.Merge(result.keepRelevantErrors())
if result.IsValid() {
bestFailures = nil
succeededOnce = true
if firstSuccess == nil {
firstSuccess = result
}
keepResultAnyOf = new(Result)
break
}
// MatchCount is used to select errors from the schema with most positive checks
if bestFailures == nil || result.MatchCount > bestFailures.MatchCount {
bestFailures = result
}
}
if !succeededOnce {
mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path))
}
if bestFailures != nil {
mainResult.Merge(bestFailures)
} else if firstSuccess != nil {
mainResult.Merge(firstSuccess)
}
keepResultAnyOf = pools.poolOfResults.BorrowResult()
s.validateAnyOf(data, mainResult, keepResultAnyOf)
}
// Validates exactly one in oneOf schemas
if len(s.oneOfValidators) > 0 {
var bestFailures *Result
var firstSuccess *Result
validated := 0
for _, oneOfSchema := range s.oneOfValidators {
result := oneOfSchema.Validate(data)
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultOneOf.Merge(result.keepRelevantErrors())
if result.IsValid() {
validated++
bestFailures = nil
if firstSuccess == nil {
firstSuccess = result
}
keepResultOneOf = new(Result)
continue
}
// MatchCount is used to select errors from the schema with most positive checks
if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) {
bestFailures = result
}
}
if validated != 1 {
var additionalMsg string
if validated == 0 {
additionalMsg = "Found none valid"
} else {
additionalMsg = fmt.Sprintf("Found %d valid alternatives", validated)
}
mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, additionalMsg))
if bestFailures != nil {
mainResult.Merge(bestFailures)
}
} else if firstSuccess != nil {
mainResult.Merge(firstSuccess)
}
keepResultOneOf = pools.poolOfResults.BorrowResult()
s.validateOneOf(data, mainResult, keepResultOneOf)
}
// Validates all of allOf schemas
if len(s.allOfValidators) > 0 {
validated := 0
for _, allOfSchema := range s.allOfValidators {
result := allOfSchema.Validate(data)
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultAllOf.Merge(result.keepRelevantErrors())
// keepResultAllOf.Merge(result)
if result.IsValid() {
validated++
}
mainResult.Merge(result)
}
if validated != len(s.allOfValidators) {
additionalMsg := ""
if validated == 0 {
additionalMsg = ". None validated"
}
mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, additionalMsg))
}
keepResultAllOf = pools.poolOfResults.BorrowResult()
s.validateAllOf(data, mainResult, keepResultAllOf)
}
if s.notValidator != nil {
result := s.notValidator.Validate(data)
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
if result.IsValid() {
mainResult.AddErrors(mustNotValidatechemaMsg(s.Path))
}
s.validateNot(data, mainResult)
}
if s.Dependencies != nil && len(s.Dependencies) > 0 && reflect.TypeOf(data).Kind() == reflect.Map {
val := data.(map[string]interface{})
for key := range val {
if dep, ok := s.Dependencies[key]; ok {
if dep.Schema != nil {
mainResult.Merge(NewSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options.Options()...).Validate(data))
continue
}
if len(dep.Property) > 0 {
for _, depKey := range dep.Property {
if _, ok := val[depKey]; !ok {
mainResult.AddErrors(hasADependencyMsg(s.Path, depKey))
}
}
}
}
}
s.validateDependencies(data, mainResult)
}
mainResult.Inc()
// In the end we retain best failures for schema validation
// plus, if any, composite errors which may explain special cases (tagged as IMPORTANT!).
return mainResult.Merge(keepResultAllOf, keepResultOneOf, keepResultAnyOf)
}
func (s *schemaPropsValidator) validateAnyOf(data interface{}, mainResult, keepResultAnyOf *Result) {
// Validates at least one in anyOf schemas
var bestFailures *Result
for i, anyOfSchema := range s.anyOfValidators {
result := anyOfSchema.Validate(data)
if s.Options.recycleValidators {
s.anyOfValidators[i] = nil
}
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultAnyOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result
if result.IsValid() {
if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(bestFailures)
}
_ = keepResultAnyOf.cleared()
mainResult.Merge(result)
return
}
// MatchCount is used to select errors from the schema with most positive checks
if bestFailures == nil || result.MatchCount > bestFailures.MatchCount {
if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(bestFailures)
}
bestFailures = result
continue
}
if result.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(result) // this result is ditched
}
}
mainResult.AddErrors(mustValidateAtLeastOneSchemaMsg(s.Path))
mainResult.Merge(bestFailures)
}
func (s *schemaPropsValidator) validateOneOf(data interface{}, mainResult, keepResultOneOf *Result) {
// Validates exactly one in oneOf schemas
var (
firstSuccess, bestFailures *Result
validated int
)
for i, oneOfSchema := range s.oneOfValidators {
result := oneOfSchema.Validate(data)
if s.Options.recycleValidators {
s.oneOfValidators[i] = nil
}
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultOneOf.Merge(result.keepRelevantErrors()) // merges (and redeems) a new instance of Result
if result.IsValid() {
validated++
_ = keepResultOneOf.cleared()
if firstSuccess == nil {
firstSuccess = result
} else if result.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(result) // this result is ditched
}
continue
}
// MatchCount is used to select errors from the schema with most positive checks
if validated == 0 && (bestFailures == nil || result.MatchCount > bestFailures.MatchCount) {
if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(bestFailures)
}
bestFailures = result
} else if result.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(result) // this result is ditched
}
}
switch validated {
case 0:
mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, "Found none valid"))
mainResult.Merge(bestFailures)
// firstSucess necessarily nil
case 1:
mainResult.Merge(firstSuccess)
if bestFailures != nil && bestFailures.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(bestFailures)
}
default:
mainResult.AddErrors(mustValidateOnlyOneSchemaMsg(s.Path, fmt.Sprintf("Found %d valid alternatives", validated)))
mainResult.Merge(bestFailures)
if firstSuccess != nil && firstSuccess.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(firstSuccess)
}
}
}
func (s *schemaPropsValidator) validateAllOf(data interface{}, mainResult, keepResultAllOf *Result) {
// Validates all of allOf schemas
var validated int
for i, allOfSchema := range s.allOfValidators {
result := allOfSchema.Validate(data)
if s.Options.recycleValidators {
s.allOfValidators[i] = nil
}
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
keepResultAllOf.Merge(result.keepRelevantErrors())
if result.IsValid() {
validated++
}
mainResult.Merge(result)
}
switch validated {
case 0:
mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, ". None validated"))
case len(s.allOfValidators):
default:
mainResult.AddErrors(mustValidateAllSchemasMsg(s.Path, ""))
}
}
func (s *schemaPropsValidator) validateNot(data interface{}, mainResult *Result) {
result := s.notValidator.Validate(data)
if s.Options.recycleValidators {
s.notValidator = nil
}
// We keep inner IMPORTANT! errors no matter what MatchCount tells us
if result.IsValid() {
mainResult.AddErrors(mustNotValidatechemaMsg(s.Path))
}
if result.wantsRedeemOnMerge {
pools.poolOfResults.RedeemResult(result) // this result is ditched
}
}
func (s *schemaPropsValidator) validateDependencies(data interface{}, mainResult *Result) {
val := data.(map[string]interface{})
for key := range val {
dep, ok := s.Dependencies[key]
if !ok {
continue
}
if dep.Schema != nil {
mainResult.Merge(
newSchemaValidator(dep.Schema, s.Root, s.Path+"."+key, s.KnownFormats, s.Options).Validate(data),
)
continue
}
if len(dep.Property) > 0 {
for _, depKey := range dep.Property {
if _, ok := val[depKey]; !ok {
mainResult.AddErrors(hasADependencyMsg(s.Path, depKey))
}
}
}
}
}
func (s *schemaPropsValidator) redeem() {
pools.poolOfSchemaPropsValidators.RedeemValidator(s)
}
func (s *schemaPropsValidator) redeemChildren() {
for _, v := range s.anyOfValidators {
if v == nil {
continue
}
v.redeemChildren()
v.redeem()
}
s.anyOfValidators = nil
for _, v := range s.allOfValidators {
if v == nil {
continue
}
v.redeemChildren()
v.redeem()
}
s.allOfValidators = nil
for _, v := range s.oneOfValidators {
if v == nil {
continue
}
v.redeemChildren()
v.redeem()
}
s.oneOfValidators = nil
if s.notValidator != nil {
s.notValidator.redeemChildren()
s.notValidator.redeem()
s.notValidator = nil
}
}

View File

@ -32,7 +32,36 @@ type schemaSliceValidator struct {
Items *spec.SchemaOrArray
Root interface{}
KnownFormats strfmt.Registry
Options SchemaValidatorOptions
Options *SchemaValidatorOptions
}
func newSliceValidator(path, in string,
maxItems, minItems *int64, uniqueItems bool,
additionalItems *spec.SchemaOrBool, items *spec.SchemaOrArray,
root interface{}, formats strfmt.Registry, opts *SchemaValidatorOptions) *schemaSliceValidator {
if opts == nil {
opts = new(SchemaValidatorOptions)
}
var v *schemaSliceValidator
if opts.recycleValidators {
v = pools.poolOfSliceValidators.BorrowValidator()
} else {
v = new(schemaSliceValidator)
}
v.Path = path
v.In = in
v.MaxItems = maxItems
v.MinItems = minItems
v.UniqueItems = uniqueItems
v.AdditionalItems = additionalItems
v.Items = items
v.Root = root
v.KnownFormats = formats
v.Options = opts
return v
}
func (s *schemaSliceValidator) SetPath(path string) {
@ -46,7 +75,18 @@ func (s *schemaSliceValidator) Applies(source interface{}, kind reflect.Kind) bo
}
func (s *schemaSliceValidator) Validate(data interface{}) *Result {
result := new(Result)
if s.Options.recycleValidators {
defer func() {
s.redeem()
}()
}
var result *Result
if s.Options.recycleResult {
result = pools.poolOfResults.BorrowResult()
} else {
result = new(Result)
}
if data == nil {
return result
}
@ -54,8 +94,8 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result {
size := val.Len()
if s.Items != nil && s.Items.Schema != nil {
validator := NewSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options.Options()...)
for i := 0; i < size; i++ {
validator := newSchemaValidator(s.Items.Schema, s.Root, s.Path, s.KnownFormats, s.Options)
validator.SetPath(fmt.Sprintf("%s.%d", s.Path, i))
value := val.Index(i)
result.mergeForSlice(val, i, validator.Validate(value.Interface()))
@ -66,10 +106,11 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result {
if s.Items != nil && len(s.Items.Schemas) > 0 {
itemsSize = len(s.Items.Schemas)
for i := 0; i < itemsSize; i++ {
validator := NewSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...)
if val.Len() <= i {
if size <= i {
break
}
validator := newSchemaValidator(&s.Items.Schemas[i], s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options)
result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
}
}
@ -79,7 +120,7 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result {
}
if s.AdditionalItems.Schema != nil {
for i := itemsSize; i < size-itemsSize+1; i++ {
validator := NewSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options.Options()...)
validator := newSchemaValidator(s.AdditionalItems.Schema, s.Root, fmt.Sprintf("%s.%d", s.Path, i), s.KnownFormats, s.Options)
result.mergeForSlice(val, i, validator.Validate(val.Index(i).Interface()))
}
}
@ -103,3 +144,7 @@ func (s *schemaSliceValidator) Validate(data interface{}) *Result {
result.Inc()
return result
}
func (s *schemaSliceValidator) redeem() {
pools.poolOfSliceValidators.RedeemValidator(s)
}

View File

@ -15,6 +15,8 @@
package validate
import (
"bytes"
"encoding/gob"
"encoding/json"
"fmt"
"sort"
@ -26,23 +28,23 @@ import (
"github.com/go-openapi/loads"
"github.com/go-openapi/spec"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// Spec validates an OpenAPI 2.0 specification document.
//
// Returns an error flattening in a single standard error, all validation messages.
//
// - TODO: $ref should not have siblings
// - TODO: make sure documentation reflects all checks and warnings
// - TODO: check on discriminators
// - TODO: explicit message on unsupported keywords (better than "forbidden property"...)
// - TODO: full list of unresolved refs
// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples
// - TODO: option to determine if we validate for go-swagger or in a more general context
// - TODO: check on required properties to support anyOf, allOf, oneOf
// - TODO: $ref should not have siblings
// - TODO: make sure documentation reflects all checks and warnings
// - TODO: check on discriminators
// - TODO: explicit message on unsupported keywords (better than "forbidden property"...)
// - TODO: full list of unresolved refs
// - TODO: validate numeric constraints (issue#581): this should be handled like defaults and examples
// - TODO: option to determine if we validate for go-swagger or in a more general context
// - TODO: check on required properties to support anyOf, allOf, oneOf
//
// NOTE: SecurityScopes are maps: no need to check uniqueness
//
func Spec(doc *loads.Document, formats strfmt.Registry) error {
errs, _ /*warns*/ := NewSpecValidator(doc.Schema(), formats).Validate(doc)
if errs.HasErrors() {
@ -53,25 +55,38 @@ func Spec(doc *loads.Document, formats strfmt.Registry) error {
// SpecValidator validates a swagger 2.0 spec
type SpecValidator struct {
schema *spec.Schema // swagger 2.0 schema
spec *loads.Document
analyzer *analysis.Spec
expanded *loads.Document
KnownFormats strfmt.Registry
Options Opts // validation options
schema *spec.Schema // swagger 2.0 schema
spec *loads.Document
analyzer *analysis.Spec
expanded *loads.Document
KnownFormats strfmt.Registry
Options Opts // validation options
schemaOptions *SchemaValidatorOptions
}
// NewSpecValidator creates a new swagger spec validator instance
func NewSpecValidator(schema *spec.Schema, formats strfmt.Registry) *SpecValidator {
// schema options that apply to all called validators
schemaOptions := new(SchemaValidatorOptions)
for _, o := range []Option{
SwaggerSchema(true),
WithRecycleValidators(true),
// withRecycleResults(true),
} {
o(schemaOptions)
}
return &SpecValidator{
schema: schema,
KnownFormats: formats,
Options: defaultOpts,
schema: schema,
KnownFormats: formats,
Options: defaultOpts,
schemaOptions: schemaOptions,
}
}
// Validate validates the swagger spec
func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
s.schemaOptions.skipSchemataResult = s.Options.SkipSchemataResult
var sd *loads.Document
errs, warnings := new(Result), new(Result)
@ -85,11 +100,8 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
s.spec = sd
s.analyzer = analysis.New(sd.Spec())
// Swagger schema validator
schv := NewSchemaValidator(s.schema, nil, "", s.KnownFormats, SwaggerSchema(true))
var obj interface{}
// Raw spec unmarshalling errors
var obj interface{}
if err := json.Unmarshal(sd.Raw(), &obj); err != nil {
// NOTE: under normal conditions, the *load.Document has been already unmarshalled
// So this one is just a paranoid check on the behavior of the spec package
@ -103,6 +115,8 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
warnings.AddErrors(errs.Warnings...)
}()
// Swagger schema validator
schv := newSchemaValidator(s.schema, nil, "", s.KnownFormats, s.schemaOptions)
errs.Merge(schv.Validate(obj)) // error -
// There may be a point in continuing to try and determine more accurate errors
if !s.Options.ContinueOnErrors && errs.HasErrors() {
@ -130,13 +144,13 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
}
// Values provided as default MUST validate their schema
df := &defaultValidator{SpecValidator: s}
df := &defaultValidator{SpecValidator: s, schemaOptions: s.schemaOptions}
errs.Merge(df.Validate())
// Values provided as examples MUST validate their schema
// Value provided as examples in a response without schema generate a warning
// Known limitations: examples in responses for mime type not application/json are ignored (warning)
ex := &exampleValidator{SpecValidator: s}
ex := &exampleValidator{SpecValidator: s, schemaOptions: s.schemaOptions}
errs.Merge(ex.Validate())
errs.Merge(s.validateNonEmptyPathParamNames())
@ -148,22 +162,27 @@ func (s *SpecValidator) Validate(data interface{}) (*Result, *Result) {
}
func (s *SpecValidator) validateNonEmptyPathParamNames() *Result {
res := new(Result)
res := pools.poolOfResults.BorrowResult()
if s.spec.Spec().Paths == nil {
// There is no Paths object: error
res.AddErrors(noValidPathMsg())
} else {
if s.spec.Spec().Paths.Paths == nil {
// Paths may be empty: warning
res.AddWarnings(noValidPathMsg())
} else {
for k := range s.spec.Spec().Paths.Paths {
if strings.Contains(k, "{}") {
res.AddErrors(emptyPathParameterMsg(k))
}
}
return res
}
if s.spec.Spec().Paths.Paths == nil {
// Paths may be empty: warning
res.AddWarnings(noValidPathMsg())
return res
}
for k := range s.spec.Spec().Paths.Paths {
if strings.Contains(k, "{}") {
res.AddErrors(emptyPathParameterMsg(k))
}
}
return res
}
@ -177,7 +196,7 @@ func (s *SpecValidator) validateDuplicateOperationIDs() *Result {
// fallback on possible incomplete picture because of previous errors
analyzer = s.analyzer
}
res := new(Result)
res := pools.poolOfResults.BorrowResult()
known := make(map[string]int)
for _, v := range analyzer.OperationIDs() {
if v != "" {
@ -199,7 +218,7 @@ type dupProp struct {
func (s *SpecValidator) validateDuplicatePropertyNames() *Result {
// definition can't declare a property that's already defined by one of its ancestors
res := new(Result)
res := pools.poolOfResults.BorrowResult()
for k, sch := range s.spec.Spec().Definitions {
if len(sch.AllOf) == 0 {
continue
@ -248,7 +267,7 @@ func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema,
schn := nm
schc := &sch
res := new(Result)
res := pools.poolOfResults.BorrowResult()
for schc.Ref.String() != "" {
// gather property names
@ -285,7 +304,7 @@ func (s *SpecValidator) validateSchemaPropertyNames(nm string, sch spec.Schema,
}
func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, knowns map[string]struct{}) ([]string, *Result) {
res := new(Result)
res := pools.poolOfResults.BorrowResult()
if sch.Ref.String() == "" && len(sch.AllOf) == 0 { // Safeguard. We should not be able to actually get there
return nil, res
@ -335,7 +354,7 @@ func (s *SpecValidator) validateCircularAncestry(nm string, sch spec.Schema, kno
func (s *SpecValidator) validateItems() *Result {
// validate parameter, items, schema and response objects for presence of item if type is array
res := new(Result)
res := pools.poolOfResults.BorrowResult()
for method, pi := range s.analyzer.Operations() {
for path, op := range pi {
@ -394,7 +413,7 @@ func (s *SpecValidator) validateItems() *Result {
// Verifies constraints on array type
func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID string) *Result {
res := new(Result)
res := pools.poolOfResults.BorrowResult()
if !schema.Type.Contains(arrayType) {
return res
}
@ -418,7 +437,7 @@ func (s *SpecValidator) validateSchemaItems(schema spec.Schema, prefix, opID str
func (s *SpecValidator) validatePathParamPresence(path string, fromPath, fromOperation []string) *Result {
// Each defined operation path parameters must correspond to a named element in the API's path pattern.
// (For example, you cannot have a path parameter named id for the following path /pets/{petId} but you must have a path parameter named petId.)
res := new(Result)
res := pools.poolOfResults.BorrowResult()
for _, l := range fromPath {
var matched bool
for _, r := range fromOperation {
@ -456,7 +475,6 @@ func (s *SpecValidator) validateReferenced() *Result {
return &res
}
// nolint: dupl
func (s *SpecValidator) validateReferencedParameters() *Result {
// Each referenceable definition should have references.
params := s.spec.Spec().Parameters
@ -475,14 +493,13 @@ func (s *SpecValidator) validateReferencedParameters() *Result {
if len(expected) == 0 {
return nil
}
result := new(Result)
result := pools.poolOfResults.BorrowResult()
for k := range expected {
result.AddWarnings(unusedParamMsg(k))
}
return result
}
// nolint: dupl
func (s *SpecValidator) validateReferencedResponses() *Result {
// Each referenceable definition should have references.
responses := s.spec.Spec().Responses
@ -501,14 +518,13 @@ func (s *SpecValidator) validateReferencedResponses() *Result {
if len(expected) == 0 {
return nil
}
result := new(Result)
result := pools.poolOfResults.BorrowResult()
for k := range expected {
result.AddWarnings(unusedResponseMsg(k))
}
return result
}
// nolint: dupl
func (s *SpecValidator) validateReferencedDefinitions() *Result {
// Each referenceable definition must have references.
defs := s.spec.Spec().Definitions
@ -537,7 +553,7 @@ func (s *SpecValidator) validateReferencedDefinitions() *Result {
func (s *SpecValidator) validateRequiredDefinitions() *Result {
// Each property listed in the required array must be defined in the properties of the model
res := new(Result)
res := pools.poolOfResults.BorrowResult()
DEFINITIONS:
for d, schema := range s.spec.Spec().Definitions {
@ -556,7 +572,7 @@ DEFINITIONS:
func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Schema) *Result {
// Takes care of recursive property definitions, which may be nested in additionalProperties schemas
res := new(Result)
res := pools.poolOfResults.BorrowResult()
propertyMatch := false
patternMatch := false
additionalPropertiesMatch := false
@ -615,40 +631,42 @@ func (s *SpecValidator) validateRequiredProperties(path, in string, v *spec.Sche
func (s *SpecValidator) validateParameters() *Result {
// - for each method, path is unique, regardless of path parameters
// e.g. GET:/petstore/{id}, GET:/petstore/{pet}, GET:/petstore are
// considered duplicate paths
// considered duplicate paths, if StrictPathParamUniqueness is enabled.
// - each parameter should have a unique `name` and `type` combination
// - each operation should have only 1 parameter of type body
// - there must be at most 1 parameter in body
// - parameters with pattern property must specify valid patterns
// - $ref in parameters must resolve
// - path param must be required
res := new(Result)
res := pools.poolOfResults.BorrowResult()
rexGarbledPathSegment := mustCompileRegexp(`.*[{}\s]+.*`)
for method, pi := range s.expandedAnalyzer().Operations() {
methodPaths := make(map[string]map[string]string)
for path, op := range pi {
pathToAdd := pathHelp.stripParametersInPath(path)
if s.Options.StrictPathParamUniqueness {
pathToAdd := pathHelp.stripParametersInPath(path)
// Warn on garbled path afer param stripping
if rexGarbledPathSegment.MatchString(pathToAdd) {
res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd))
}
// Warn on garbled path afer param stripping
if rexGarbledPathSegment.MatchString(pathToAdd) {
res.AddWarnings(pathStrippedParamGarbledMsg(pathToAdd))
}
// Check uniqueness of stripped paths
if _, found := methodPaths[method][pathToAdd]; found {
// Check uniqueness of stripped paths
if _, found := methodPaths[method][pathToAdd]; found {
// Sort names for stable, testable output
if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 {
res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd]))
// Sort names for stable, testable output
if strings.Compare(path, methodPaths[method][pathToAdd]) < 0 {
res.AddErrors(pathOverlapMsg(path, methodPaths[method][pathToAdd]))
} else {
res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path))
}
} else {
res.AddErrors(pathOverlapMsg(methodPaths[method][pathToAdd], path))
}
} else {
if _, found := methodPaths[method]; !found {
methodPaths[method] = map[string]string{}
}
methodPaths[method][pathToAdd] = path // Original non stripped path
if _, found := methodPaths[method]; !found {
methodPaths[method] = map[string]string{}
}
methodPaths[method][pathToAdd] = path // Original non stripped path
}
}
var bodyParams []string
@ -659,7 +677,23 @@ func (s *SpecValidator) validateParameters() *Result {
// TODO: should be done after param expansion
res.Merge(s.checkUniqueParams(path, method, op))
// pick the root schema from the swagger specification which describes a parameter
origSchema, ok := s.schema.Definitions["parameter"]
if !ok {
panic("unexpected swagger schema: missing #/definitions/parameter")
}
// clone it once to avoid expanding a global schema (e.g. swagger spec)
paramSchema, err := deepCloneSchema(origSchema)
if err != nil {
panic(fmt.Errorf("can't clone schema: %v", err))
}
for _, pr := range paramHelp.safeExpandedParamsFor(path, method, op.ID, res, s) {
// An expanded parameter must validate the Parameter schema (an unexpanded $ref always passes high-level schema validation)
schv := newSchemaValidator(&paramSchema, s.schema, fmt.Sprintf("%s.%s.parameters.%s", path, method, pr.Name), s.KnownFormats, s.schemaOptions)
obj := swag.ToDynamicJSON(pr)
res.Merge(schv.Validate(obj))
// Validate pattern regexp for parameters with a Pattern property
if _, err := compileRegexp(pr.Pattern); err != nil {
res.AddErrors(invalidPatternInParamMsg(op.ID, pr.Name, pr.Pattern))
@ -741,7 +775,7 @@ func (s *SpecValidator) validateParameters() *Result {
func (s *SpecValidator) validateReferencesValid() *Result {
// each reference must point to a valid object
res := new(Result)
res := pools.poolOfResults.BorrowResult()
for _, r := range s.analyzer.AllRefs() {
if !r.IsValidURI(s.spec.SpecFilePath()) { // Safeguard - spec should always yield a valid URI
res.AddErrors(invalidRefMsg(r.String()))
@ -767,7 +801,7 @@ func (s *SpecValidator) checkUniqueParams(path, method string, op *spec.Operatio
// However, there are some issues with such a factorization:
// - analysis does not seem to fully expand params
// - param keys may be altered by x-go-name
res := new(Result)
res := pools.poolOfResults.BorrowResult()
pnames := make(map[string]struct{})
if op.Parameters != nil { // Safeguard
@ -802,3 +836,17 @@ func (s *SpecValidator) expandedAnalyzer() *analysis.Spec {
}
return s.analyzer
}
func deepCloneSchema(src spec.Schema) (spec.Schema, error) {
var b bytes.Buffer
if err := gob.NewEncoder(&b).Encode(src); err != nil {
return spec.Schema{}, err
}
var dst spec.Schema
if err := gob.NewDecoder(&b).Decode(&dst); err != nil {
return spec.Schema{}, err
}
return dst, nil
}

View File

@ -187,6 +187,8 @@ const (
// UnusedResponseWarning ...
UnusedResponseWarning = "response %q is not used anywhere"
InvalidObject = "expected an object in %q.%s"
)
// Additional error codes
@ -347,11 +349,15 @@ func invalidParameterDefinitionAsSchemaMsg(path, method, operationID string) err
func parameterValidationTypeMismatchMsg(param, path, typ string) errors.Error {
return errors.New(errors.CompositeErrorCode, ParamValidationTypeMismatch, param, path, typ)
}
func invalidObjectMsg(path, in string) errors.Error {
return errors.New(errors.CompositeErrorCode, InvalidObject, path, in)
}
// disabled
// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error {
// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method)
// }
//
// func invalidResponseDefinitionAsSchemaMsg(path, method string) errors.Error {
// return errors.New(errors.CompositeErrorCode, InvalidResponseDefinitionAsSchemaError, path, method)
// }
func someParametersBrokenMsg(path, method, operationID string) errors.Error {
return errors.New(errors.CompositeErrorCode, SomeParametersBrokenError, path, method, operationID)
}

View File

@ -25,11 +25,34 @@ import (
)
type typeValidator struct {
Path string
In string
Type spec.StringOrArray
Nullable bool
Format string
In string
Path string
Options *SchemaValidatorOptions
}
func newTypeValidator(path, in string, typ spec.StringOrArray, nullable bool, format string, opts *SchemaValidatorOptions) *typeValidator {
if opts == nil {
opts = new(SchemaValidatorOptions)
}
var t *typeValidator
if opts.recycleValidators {
t = pools.poolOfTypeValidators.BorrowValidator()
} else {
t = new(typeValidator)
}
t.Path = path
t.In = in
t.Type = typ
t.Nullable = nullable
t.Format = format
t.Options = opts
return t
}
func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) {
@ -90,7 +113,7 @@ func (t *typeValidator) schemaInfoForType(data interface{}) (string, string) {
default:
val := reflect.ValueOf(data)
tpe := val.Type()
switch tpe.Kind() {
switch tpe.Kind() { //nolint:exhaustive
case reflect.Bool:
return booleanType, ""
case reflect.String:
@ -125,23 +148,33 @@ func (t *typeValidator) SetPath(path string) {
t.Path = path
}
func (t *typeValidator) Applies(source interface{}, kind reflect.Kind) bool {
func (t *typeValidator) Applies(source interface{}, _ reflect.Kind) bool {
// typeValidator applies to Schema, Parameter and Header objects
stpe := reflect.TypeOf(source)
r := (len(t.Type) > 0 || t.Format != "") && (stpe == specSchemaType || stpe == specParameterType || stpe == specHeaderType)
debugLog("type validator for %q applies %t for %T (kind: %v)\n", t.Path, r, source, kind)
return r
switch source.(type) {
case *spec.Schema:
case *spec.Parameter:
case *spec.Header:
default:
return false
}
return (len(t.Type) > 0 || t.Format != "")
}
func (t *typeValidator) Validate(data interface{}) *Result {
result := new(Result)
result.Inc()
if t.Options.recycleValidators {
defer func() {
t.redeem()
}()
}
if data == nil {
// nil or zero value for the passed structure require Type: null
if len(t.Type) > 0 && !t.Type.Contains(nullType) && !t.Nullable { // TODO: if a property is not required it also passes this
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType))
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), nullType), t.Options.recycleResult)
}
return result
return emptyResult
}
// check if the type matches, should be used in every validator chain as first item
@ -151,8 +184,6 @@ func (t *typeValidator) Validate(data interface{}) *Result {
// infer schema type (JSON) and format from passed data type
schType, format := t.schemaInfoForType(data)
debugLog("path: %s, schType: %s, format: %s, expType: %s, expFmt: %s, kind: %s", t.Path, schType, format, t.Type, t.Format, val.Kind().String())
// check numerical types
// TODO: check unsigned ints
// TODO: check json.Number (see schema.go)
@ -163,15 +194,20 @@ func (t *typeValidator) Validate(data interface{}) *Result {
if kind != reflect.String && kind != reflect.Slice && t.Format != "" && !(t.Type.Contains(schType) || format == t.Format || isFloatInt || isIntFloat || isLowerInt || isLowerFloat) {
// TODO: test case
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format))
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, t.Format, format), t.Options.recycleResult)
}
if !(t.Type.Contains(numberType) || t.Type.Contains(integerType)) && t.Format != "" && (kind == reflect.String || kind == reflect.Slice) {
return result
return emptyResult
}
if !(t.Type.Contains(schType) || isFloatInt || isIntFloat) {
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType))
return errorHelp.sErr(errors.InvalidType(t.Path, t.In, strings.Join(t.Type, ","), schType), t.Options.recycleResult)
}
return result
return emptyResult
}
func (t *typeValidator) redeem() {
pools.poolOfTypeValidators.RedeemValidator(t)
}

File diff suppressed because it is too large Load Diff

View File

@ -120,7 +120,7 @@ func UniqueItems(path, in string, data interface{}) *errors.Validation {
// MinLength validates a string for minimum length
func MinLength(path, in, data string, minLength int64) *errors.Validation {
strLen := int64(utf8.RuneCount([]byte(data)))
strLen := int64(utf8.RuneCountInString(data))
if strLen < minLength {
return errors.TooShort(path, in, minLength, data)
}
@ -129,7 +129,7 @@ func MinLength(path, in, data string, minLength int64) *errors.Validation {
// MaxLength validates a string for maximum length
func MaxLength(path, in, data string, maxLength int64) *errors.Validation {
strLen := int64(utf8.RuneCount([]byte(data)))
strLen := int64(utf8.RuneCountInString(data))
if strLen > maxLength {
return errors.TooLong(path, in, maxLength, data)
}
@ -315,7 +315,7 @@ func FormatOf(path, in, format, data string, registry strfmt.Registry) *errors.V
// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
func MaximumNativeType(path, in string, val interface{}, max float64, exclusive bool) *errors.Validation {
kind := reflect.ValueOf(val).Type().Kind()
switch kind {
switch kind { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
value := valueHelp.asInt64(val)
return MaximumInt(path, in, value, int64(max), exclusive)
@ -345,7 +345,7 @@ func MaximumNativeType(path, in string, val interface{}, max float64, exclusive
// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
func MinimumNativeType(path, in string, val interface{}, min float64, exclusive bool) *errors.Validation {
kind := reflect.ValueOf(val).Type().Kind()
switch kind {
switch kind { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
value := valueHelp.asInt64(val)
return MinimumInt(path, in, value, int64(min), exclusive)
@ -375,7 +375,7 @@ func MinimumNativeType(path, in string, val interface{}, min float64, exclusive
// TODO: Normally, a JSON MAX_SAFE_INTEGER check would ensure conversion remains loss-free
func MultipleOfNativeType(path, in string, val interface{}, multipleOf float64) *errors.Validation {
kind := reflect.ValueOf(val).Type().Kind()
switch kind {
switch kind { //nolint:exhaustive
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
value := valueHelp.asInt64(val)
return MultipleOfInt(path, in, value, int64(multipleOf))
@ -399,7 +399,7 @@ func IsValueValidAgainstRange(val interface{}, typeName, format, prefix, path st
// What is the string representation of val
var stringRep string
switch kind {
switch kind { //nolint:exhaustive
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
stringRep = swag.FormatUint64(valueHelp.asUint64(val))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: