aboutsummaryrefslogtreecommitdiff
path: root/vendor/github.com/go-openapi
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/github.com/go-openapi')
-rw-r--r--vendor/github.com/go-openapi/analysis/.gitignore3
-rw-r--r--vendor/github.com/go-openapi/analysis/.travis.yml18
-rw-r--r--vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/analysis/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/analysis/README.md6
-rw-r--r--vendor/github.com/go-openapi/analysis/analyzer.go785
-rw-r--r--vendor/github.com/go-openapi/analysis/analyzer_test.go284
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten.go800
-rw-r--r--vendor/github.com/go-openapi/analysis/flatten_test.go826
-rw-r--r--vendor/github.com/go-openapi/analysis/mixin.go199
-rw-r--r--vendor/github.com/go-openapi/analysis/mixin_test.go64
-rw-r--r--vendor/github.com/go-openapi/analysis/schema.go234
-rw-r--r--vendor/github.com/go-openapi/analysis/schema_test.go266
-rw-r--r--vendor/github.com/go-openapi/errors/.gitignore2
-rw-r--r--vendor/github.com/go-openapi/errors/.travis.yml12
-rw-r--r--vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/errors/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/errors/README.md5
-rw-r--r--vendor/github.com/go-openapi/errors/api.go150
-rw-r--r--vendor/github.com/go-openapi/errors/api_test.go97
-rw-r--r--vendor/github.com/go-openapi/errors/auth.go20
-rw-r--r--vendor/github.com/go-openapi/errors/auth_test.go27
-rw-r--r--vendor/github.com/go-openapi/errors/headers.go85
-rw-r--r--vendor/github.com/go-openapi/errors/middleware.go51
-rw-r--r--vendor/github.com/go-openapi/errors/middleware_test.go33
-rw-r--r--vendor/github.com/go-openapi/errors/parsing.go59
-rw-r--r--vendor/github.com/go-openapi/errors/parsing_test.go32
-rw-r--r--vendor/github.com/go-openapi/errors/schema.go548
-rw-r--r--vendor/github.com/go-openapi/errors/schema_test.go284
-rw-r--r--vendor/github.com/go-openapi/loads/.drone.sec1
-rw-r--r--vendor/github.com/go-openapi/loads/.drone.yml39
-rw-r--r--vendor/github.com/go-openapi/loads/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/loads/.gitignore4
-rw-r--r--vendor/github.com/go-openapi/loads/.travis.yml16
-rw-r--r--vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/loads/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/loads/README.md5
-rw-r--r--vendor/github.com/go-openapi/loads/json_test.go318
-rw-r--r--vendor/github.com/go-openapi/loads/spec.go279
-rw-r--r--vendor/github.com/go-openapi/loads/spec_test.go717
-rw-r--r--vendor/github.com/go-openapi/spec/expander.go25
-rw-r--r--vendor/github.com/go-openapi/strfmt/.editorconfig26
-rw-r--r--vendor/github.com/go-openapi/strfmt/.gitignore2
-rw-r--r--vendor/github.com/go-openapi/strfmt/.travis.yml18
-rw-r--r--vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md74
-rw-r--r--vendor/github.com/go-openapi/strfmt/LICENSE202
-rw-r--r--vendor/github.com/go-openapi/strfmt/README.md5
-rw-r--r--vendor/github.com/go-openapi/strfmt/bson.go124
-rw-r--r--vendor/github.com/go-openapi/strfmt/bson_test.go53
-rw-r--r--vendor/github.com/go-openapi/strfmt/date.go152
-rw-r--r--vendor/github.com/go-openapi/strfmt/date_test.go83
-rw-r--r--vendor/github.com/go-openapi/strfmt/default.go1697
-rw-r--r--vendor/github.com/go-openapi/strfmt/default_test.go691
-rw-r--r--vendor/github.com/go-openapi/strfmt/doc.go18
-rw-r--r--vendor/github.com/go-openapi/strfmt/duration.go194
-rw-r--r--vendor/github.com/go-openapi/strfmt/duration_test.go143
-rw-r--r--vendor/github.com/go-openapi/strfmt/format.go298
-rw-r--r--vendor/github.com/go-openapi/strfmt/format_test.go223
-rw-r--r--vendor/github.com/go-openapi/strfmt/time.go188
-rw-r--r--vendor/github.com/go-openapi/strfmt/time_test.go154
60 files changed, 16 insertions, 11477 deletions
diff --git a/vendor/github.com/go-openapi/analysis/.gitignore b/vendor/github.com/go-openapi/analysis/.gitignore
deleted file mode 100644
index c96f0b231..000000000
--- a/vendor/github.com/go-openapi/analysis/.gitignore
+++ /dev/null
@@ -1,3 +0,0 @@
-secrets.yml
-coverage.out
-.idea
diff --git a/vendor/github.com/go-openapi/analysis/.travis.yml b/vendor/github.com/go-openapi/analysis/.travis.yml
deleted file mode 100644
index 3aa42ab3a..000000000
--- a/vendor/github.com/go-openapi/analysis/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: go
-go:
-- 1.7
-install:
-- go get -u github.com/stretchr/testify/assert
-- go get -u gopkg.in/yaml.v2
-- go get -u github.com/go-openapi/swag
-- go get -u github.com/go-openapi/jsonpointer
-- go get -u github.com/go-openapi/spec
-- go get -u github.com/go-openapi/strfmt
-- go get -u github.com/go-openapi/loads/fmts
-script:
-- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./...
-after_success:
-- bash <(curl -s https://codecov.io/bash)
-notifications:
- slack:
- secure: Sf7kZf7ZGbnwWUMpffHwMu5A0cHkLK2MYY32LNTPj4+/3qC3Ghl7+9v4TSLOqOlCwdRNjOGblAq7s+GDJed6/xgRQl1JtCi1klzZNrYX4q01pgTPvvGcwbBkIYgeMaPeIRcK9OZnud7sRXdttozgTOpytps2U6Js32ip7uj5mHSg2ub0FwoSJwlS6dbezZ8+eDhoha0F/guY99BEwx8Bd+zROrT2TFGsSGOFGN6wFc7moCqTHO/YkWib13a2QNXqOxCCVBy/lt76Wp+JkeFppjHlzs/2lP3EAk13RIUAaesdEUHvIHrzCyNJEd3/+KO2DzsWOYfpktd+KBCvgaYOsoo7ubdT3IROeAegZdCgo/6xgCEsmFc9ZcqCfN5yNx2A+BZ2Vwmpws+bQ1E1+B5HDzzaiLcYfG4X2O210QVGVDLWsv1jqD+uPYeHY2WRfh5ZsIUFvaqgUEnwHwrK44/8REAhQavt1QAj5uJpsRd7CkRVPWRNK+yIky+wgbVUFEchRNmS55E7QWf+W4+4QZkQi7vUTMc9nbTUu2Es9NfvfudOpM2wZbn98fjpb/qq/nRv6Bk+ca+7XD5/IgNLMbWp2ouDdzbiHLCOfDUiHiDJhLfFZx9Bwo7ZwfzeOlbrQX66bx7xRKYmOe4DLrXhNcpbsMa8qbfxlZRCmYbubB/Y8h4=
diff --git a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e..000000000
--- a/vendor/github.com/go-openapi/analysis/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/analysis/LICENSE b/vendor/github.com/go-openapi/analysis/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/vendor/github.com/go-openapi/analysis/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/analysis/README.md b/vendor/github.com/go-openapi/analysis/README.md
deleted file mode 100644
index d32c30d45..000000000
--- a/vendor/github.com/go-openapi/analysis/README.md
+++ /dev/null
@@ -1,6 +0,0 @@
-# OpenAPI initiative analysis [![Build Status](https://travis-ci.org/go-openapi/analysis.svg?branch=master)](https://travis-ci.org/go-openapi/analysis) [![codecov](https://codecov.io/gh/go-openapi/analysis/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/analysis) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
-
-[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/analysis/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/analysis?status.svg)](http://godoc.org/github.com/go-openapi/analysis)
-
-
-A foundational library to analyze an OAI specification document for easier reasoning about the content. \ No newline at end of file
diff --git a/vendor/github.com/go-openapi/analysis/analyzer.go b/vendor/github.com/go-openapi/analysis/analyzer.go
deleted file mode 100644
index 77323a58e..000000000
--- a/vendor/github.com/go-openapi/analysis/analyzer.go
+++ /dev/null
@@ -1,785 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "fmt"
- slashpath "path"
- "strconv"
- "strings"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-type referenceAnalysis struct {
- schemas map[string]spec.Ref
- responses map[string]spec.Ref
- parameters map[string]spec.Ref
- items map[string]spec.Ref
- headerItems map[string]spec.Ref
- parameterItems map[string]spec.Ref
- allRefs map[string]spec.Ref
- pathItems map[string]spec.Ref
-}
-
-func (r *referenceAnalysis) addRef(key string, ref spec.Ref) {
- r.allRefs["#"+key] = ref
-}
-
-func (r *referenceAnalysis) addItemsRef(key string, items *spec.Items, location string) {
- r.items["#"+key] = items.Ref
- r.addRef(key, items.Ref)
- if location == "header" {
- r.headerItems["#"+key] = items.Ref
- } else {
- r.parameterItems["#"+key] = items.Ref
- }
-}
-
-func (r *referenceAnalysis) addSchemaRef(key string, ref SchemaRef) {
- r.schemas["#"+key] = ref.Schema.Ref
- r.addRef(key, ref.Schema.Ref)
-}
-
-func (r *referenceAnalysis) addResponseRef(key string, resp *spec.Response) {
- r.responses["#"+key] = resp.Ref
- r.addRef(key, resp.Ref)
-}
-
-func (r *referenceAnalysis) addParamRef(key string, param *spec.Parameter) {
- r.parameters["#"+key] = param.Ref
- r.addRef(key, param.Ref)
-}
-
-func (r *referenceAnalysis) addPathItemRef(key string, pathItem *spec.PathItem) {
- r.pathItems["#"+key] = pathItem.Ref
- r.addRef(key, pathItem.Ref)
-}
-
-type patternAnalysis struct {
- parameters map[string]string
- headers map[string]string
- items map[string]string
- schemas map[string]string
- allPatterns map[string]string
-}
-
-func (p *patternAnalysis) addPattern(key, pattern string) {
- p.allPatterns["#"+key] = pattern
-}
-
-func (p *patternAnalysis) addParameterPattern(key, pattern string) {
- p.parameters["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addHeaderPattern(key, pattern string) {
- p.headers["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addItemsPattern(key, pattern string) {
- p.items["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-func (p *patternAnalysis) addSchemaPattern(key, pattern string) {
- p.schemas["#"+key] = pattern
- p.addPattern(key, pattern)
-}
-
-// New takes a swagger spec object and returns an analyzed spec document.
-// The analyzed document contains a number of indices that make it easier to
-// reason about semantics of a swagger specification for use in code generation
-// or validation etc.
-func New(doc *spec.Swagger) *Spec {
- a := &Spec{
- spec: doc,
- consumes: make(map[string]struct{}, 150),
- produces: make(map[string]struct{}, 150),
- authSchemes: make(map[string]struct{}, 150),
- operations: make(map[string]map[string]*spec.Operation, 150),
- allSchemas: make(map[string]SchemaRef, 150),
- allOfs: make(map[string]SchemaRef, 150),
- references: referenceAnalysis{
- schemas: make(map[string]spec.Ref, 150),
- pathItems: make(map[string]spec.Ref, 150),
- responses: make(map[string]spec.Ref, 150),
- parameters: make(map[string]spec.Ref, 150),
- items: make(map[string]spec.Ref, 150),
- headerItems: make(map[string]spec.Ref, 150),
- parameterItems: make(map[string]spec.Ref, 150),
- allRefs: make(map[string]spec.Ref, 150),
- },
- patterns: patternAnalysis{
- parameters: make(map[string]string, 150),
- headers: make(map[string]string, 150),
- items: make(map[string]string, 150),
- schemas: make(map[string]string, 150),
- allPatterns: make(map[string]string, 150),
- },
- }
- a.initialize()
- return a
-}
-
-// Spec takes a swagger spec object and turns it into a registry
-// with a bunch of utility methods to act on the information in the spec
-type Spec struct {
- spec *spec.Swagger
- consumes map[string]struct{}
- produces map[string]struct{}
- authSchemes map[string]struct{}
- operations map[string]map[string]*spec.Operation
- references referenceAnalysis
- patterns patternAnalysis
- allSchemas map[string]SchemaRef
- allOfs map[string]SchemaRef
-}
-
-func (s *Spec) reset() {
- s.consumes = make(map[string]struct{}, 150)
- s.produces = make(map[string]struct{}, 150)
- s.authSchemes = make(map[string]struct{}, 150)
- s.operations = make(map[string]map[string]*spec.Operation, 150)
- s.allSchemas = make(map[string]SchemaRef, 150)
- s.allOfs = make(map[string]SchemaRef, 150)
- s.references.schemas = make(map[string]spec.Ref, 150)
- s.references.pathItems = make(map[string]spec.Ref, 150)
- s.references.responses = make(map[string]spec.Ref, 150)
- s.references.parameters = make(map[string]spec.Ref, 150)
- s.references.items = make(map[string]spec.Ref, 150)
- s.references.headerItems = make(map[string]spec.Ref, 150)
- s.references.parameterItems = make(map[string]spec.Ref, 150)
- s.references.allRefs = make(map[string]spec.Ref, 150)
- s.patterns.parameters = make(map[string]string, 150)
- s.patterns.headers = make(map[string]string, 150)
- s.patterns.items = make(map[string]string, 150)
- s.patterns.schemas = make(map[string]string, 150)
- s.patterns.allPatterns = make(map[string]string, 150)
-}
-
-func (s *Spec) reload() {
- s.reset()
- s.initialize()
-}
-
-func (s *Spec) initialize() {
- for _, c := range s.spec.Consumes {
- s.consumes[c] = struct{}{}
- }
- for _, c := range s.spec.Produces {
- s.produces[c] = struct{}{}
- }
- for _, ss := range s.spec.Security {
- for k := range ss {
- s.authSchemes[k] = struct{}{}
- }
- }
- for path, pathItem := range s.AllPaths() {
- s.analyzeOperations(path, &pathItem)
- }
-
- for name, parameter := range s.spec.Parameters {
- refPref := slashpath.Join("/parameters", jsonpointer.Escape(name))
- if parameter.Items != nil {
- s.analyzeItems("items", parameter.Items, refPref, "parameter")
- }
- if parameter.In == "body" && parameter.Schema != nil {
- s.analyzeSchema("schema", *parameter.Schema, refPref)
- }
- if parameter.Pattern != "" {
- s.patterns.addParameterPattern(refPref, parameter.Pattern)
- }
- }
-
- for name, response := range s.spec.Responses {
- refPref := slashpath.Join("/responses", jsonpointer.Escape(name))
- for k, v := range response.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- if v.Items != nil {
- s.analyzeItems("items", v.Items, hRefPref, "header")
- }
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
- }
- if response.Schema != nil {
- s.analyzeSchema("schema", *response.Schema, refPref)
- }
- }
-
- for name, schema := range s.spec.Definitions {
- s.analyzeSchema(name, schema, "/definitions")
- }
- // TODO: after analyzing all things and flattening schemas etc
- // resolve all the collected references to their final representations
- // best put in a separate method because this could get expensive
-}
-
-func (s *Spec) analyzeOperations(path string, pi *spec.PathItem) {
- // TODO: resolve refs here?
- op := pi
- if pi.Ref.String() != "" {
- key := slashpath.Join("/paths", jsonpointer.Escape(path))
- s.references.addPathItemRef(key, pi)
- }
- s.analyzeOperation("GET", path, op.Get)
- s.analyzeOperation("PUT", path, op.Put)
- s.analyzeOperation("POST", path, op.Post)
- s.analyzeOperation("PATCH", path, op.Patch)
- s.analyzeOperation("DELETE", path, op.Delete)
- s.analyzeOperation("HEAD", path, op.Head)
- s.analyzeOperation("OPTIONS", path, op.Options)
- for i, param := range op.Parameters {
- refPref := slashpath.Join("/paths", jsonpointer.Escape(path), "parameters", strconv.Itoa(i))
- if param.Ref.String() != "" {
- s.references.addParamRef(refPref, &param)
- }
- if param.Pattern != "" {
- s.patterns.addParameterPattern(refPref, param.Pattern)
- }
- if param.Items != nil {
- s.analyzeItems("items", param.Items, refPref, "parameter")
- }
- if param.Schema != nil {
- s.analyzeSchema("schema", *param.Schema, refPref)
- }
- }
-}
-
-func (s *Spec) analyzeItems(name string, items *spec.Items, prefix, location string) {
- if items == nil {
- return
- }
- refPref := slashpath.Join(prefix, name)
- s.analyzeItems(name, items.Items, refPref, location)
- if items.Ref.String() != "" {
- s.references.addItemsRef(refPref, items, location)
- }
- if items.Pattern != "" {
- s.patterns.addItemsPattern(refPref, items.Pattern)
- }
-}
-
-func (s *Spec) analyzeOperation(method, path string, op *spec.Operation) {
- if op == nil {
- return
- }
-
- for _, c := range op.Consumes {
- s.consumes[c] = struct{}{}
- }
- for _, c := range op.Produces {
- s.produces[c] = struct{}{}
- }
- for _, ss := range op.Security {
- for k := range ss {
- s.authSchemes[k] = struct{}{}
- }
- }
- if _, ok := s.operations[method]; !ok {
- s.operations[method] = make(map[string]*spec.Operation)
- }
- s.operations[method][path] = op
- prefix := slashpath.Join("/paths", jsonpointer.Escape(path), strings.ToLower(method))
- for i, param := range op.Parameters {
- refPref := slashpath.Join(prefix, "parameters", strconv.Itoa(i))
- if param.Ref.String() != "" {
- s.references.addParamRef(refPref, &param)
- }
- if param.Pattern != "" {
- s.patterns.addParameterPattern(refPref, param.Pattern)
- }
- s.analyzeItems("items", param.Items, refPref, "parameter")
- if param.In == "body" && param.Schema != nil {
- s.analyzeSchema("schema", *param.Schema, refPref)
- }
- }
- if op.Responses != nil {
- if op.Responses.Default != nil {
- refPref := slashpath.Join(prefix, "responses", "default")
- if op.Responses.Default.Ref.String() != "" {
- s.references.addResponseRef(refPref, op.Responses.Default)
- }
- for k, v := range op.Responses.Default.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- s.analyzeItems("items", v.Items, hRefPref, "header")
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
- }
- if op.Responses.Default.Schema != nil {
- s.analyzeSchema("schema", *op.Responses.Default.Schema, refPref)
- }
- }
- for k, res := range op.Responses.StatusCodeResponses {
- refPref := slashpath.Join(prefix, "responses", strconv.Itoa(k))
- if res.Ref.String() != "" {
- s.references.addResponseRef(refPref, &res)
- }
- for k, v := range res.Headers {
- hRefPref := slashpath.Join(refPref, "headers", k)
- s.analyzeItems("items", v.Items, hRefPref, "header")
- if v.Pattern != "" {
- s.patterns.addHeaderPattern(hRefPref, v.Pattern)
- }
- }
- if res.Schema != nil {
- s.analyzeSchema("schema", *res.Schema, refPref)
- }
- }
- }
-}
-
-func (s *Spec) analyzeSchema(name string, schema spec.Schema, prefix string) {
- refURI := slashpath.Join(prefix, jsonpointer.Escape(name))
- schRef := SchemaRef{
- Name: name,
- Schema: &schema,
- Ref: spec.MustCreateRef("#" + refURI),
- TopLevel: prefix == "/definitions",
- }
-
- s.allSchemas["#"+refURI] = schRef
-
- if schema.Ref.String() != "" {
- s.references.addSchemaRef(refURI, schRef)
- }
- if schema.Pattern != "" {
- s.patterns.addSchemaPattern(refURI, schema.Pattern)
- }
-
- for k, v := range schema.Definitions {
- s.analyzeSchema(k, v, slashpath.Join(refURI, "definitions"))
- }
- for k, v := range schema.Properties {
- s.analyzeSchema(k, v, slashpath.Join(refURI, "properties"))
- }
- for k, v := range schema.PatternProperties {
- s.analyzeSchema(k, v, slashpath.Join(refURI, "patternProperties"))
- }
- for i, v := range schema.AllOf {
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "allOf"))
- }
- if len(schema.AllOf) > 0 {
- s.allOfs["#"+refURI] = schRef
- }
- for i, v := range schema.AnyOf {
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "anyOf"))
- }
- for i, v := range schema.OneOf {
- s.analyzeSchema(strconv.Itoa(i), v, slashpath.Join(refURI, "oneOf"))
- }
- if schema.Not != nil {
- s.analyzeSchema("not", *schema.Not, refURI)
- }
- if schema.AdditionalProperties != nil && schema.AdditionalProperties.Schema != nil {
- s.analyzeSchema("additionalProperties", *schema.AdditionalProperties.Schema, refURI)
- }
- if schema.AdditionalItems != nil && schema.AdditionalItems.Schema != nil {
- s.analyzeSchema("additionalItems", *schema.AdditionalItems.Schema, refURI)
- }
- if schema.Items != nil {
- if schema.Items.Schema != nil {
- s.analyzeSchema("items", *schema.Items.Schema, refURI)
- }
- for i, sch := range schema.Items.Schemas {
- s.analyzeSchema(strconv.Itoa(i), sch, slashpath.Join(refURI, "items"))
- }
- }
-}
-
-// SecurityRequirement is a representation of a security requirement for an operation
-type SecurityRequirement struct {
- Name string
- Scopes []string
-}
-
-// SecurityRequirementsFor gets the security requirements for the operation
-func (s *Spec) SecurityRequirementsFor(operation *spec.Operation) []SecurityRequirement {
- if s.spec.Security == nil && operation.Security == nil {
- return nil
- }
-
- schemes := s.spec.Security
- if operation.Security != nil {
- schemes = operation.Security
- }
-
- unique := make(map[string]SecurityRequirement)
- for _, scheme := range schemes {
- for k, v := range scheme {
- if _, ok := unique[k]; !ok {
- unique[k] = SecurityRequirement{Name: k, Scopes: v}
- }
- }
- }
-
- var result []SecurityRequirement
- for _, v := range unique {
- result = append(result, v)
- }
- return result
-}
-
-// SecurityDefinitionsFor gets the matching security definitions for a set of requirements
-func (s *Spec) SecurityDefinitionsFor(operation *spec.Operation) map[string]spec.SecurityScheme {
- requirements := s.SecurityRequirementsFor(operation)
- if len(requirements) == 0 {
- return nil
- }
- result := make(map[string]spec.SecurityScheme)
- for _, v := range requirements {
- if definition, ok := s.spec.SecurityDefinitions[v.Name]; ok {
- if definition != nil {
- result[v.Name] = *definition
- }
- }
- }
- return result
-}
-
-// ConsumesFor gets the mediatypes for the operation
-func (s *Spec) ConsumesFor(operation *spec.Operation) []string {
-
- if len(operation.Consumes) == 0 {
- cons := make(map[string]struct{}, len(s.spec.Consumes))
- for _, k := range s.spec.Consumes {
- cons[k] = struct{}{}
- }
- return s.structMapKeys(cons)
- }
-
- cons := make(map[string]struct{}, len(operation.Consumes))
- for _, c := range operation.Consumes {
- cons[c] = struct{}{}
- }
- return s.structMapKeys(cons)
-}
-
-// ProducesFor gets the mediatypes for the operation
-func (s *Spec) ProducesFor(operation *spec.Operation) []string {
- if len(operation.Produces) == 0 {
- prod := make(map[string]struct{}, len(s.spec.Produces))
- for _, k := range s.spec.Produces {
- prod[k] = struct{}{}
- }
- return s.structMapKeys(prod)
- }
-
- prod := make(map[string]struct{}, len(operation.Produces))
- for _, c := range operation.Produces {
- prod[c] = struct{}{}
- }
- return s.structMapKeys(prod)
-}
-
-func mapKeyFromParam(param *spec.Parameter) string {
- return fmt.Sprintf("%s#%s", param.In, fieldNameFromParam(param))
-}
-
-func fieldNameFromParam(param *spec.Parameter) string {
- if nm, ok := param.Extensions.GetString("go-name"); ok {
- return nm
- }
- return swag.ToGoName(param.Name)
-}
-
-func (s *Spec) paramsAsMap(parameters []spec.Parameter, res map[string]spec.Parameter) {
- for _, param := range parameters {
- pr := param
- if pr.Ref.String() != "" {
- obj, _, err := pr.Ref.GetPointer().Get(s.spec)
- if err != nil {
- panic(err)
- }
- pr = obj.(spec.Parameter)
- }
- res[mapKeyFromParam(&pr)] = pr
- }
-}
-
-// ParametersFor the specified operation id
-func (s *Spec) ParametersFor(operationID string) []spec.Parameter {
- gatherParams := func(pi *spec.PathItem, op *spec.Operation) []spec.Parameter {
- bag := make(map[string]spec.Parameter)
- s.paramsAsMap(pi.Parameters, bag)
- s.paramsAsMap(op.Parameters, bag)
-
- var res []spec.Parameter
- for _, v := range bag {
- res = append(res, v)
- }
- return res
- }
- for _, pi := range s.spec.Paths.Paths {
- if pi.Get != nil && pi.Get.ID == operationID {
- return gatherParams(&pi, pi.Get)
- }
- if pi.Head != nil && pi.Head.ID == operationID {
- return gatherParams(&pi, pi.Head)
- }
- if pi.Options != nil && pi.Options.ID == operationID {
- return gatherParams(&pi, pi.Options)
- }
- if pi.Post != nil && pi.Post.ID == operationID {
- return gatherParams(&pi, pi.Post)
- }
- if pi.Patch != nil && pi.Patch.ID == operationID {
- return gatherParams(&pi, pi.Patch)
- }
- if pi.Put != nil && pi.Put.ID == operationID {
- return gatherParams(&pi, pi.Put)
- }
- if pi.Delete != nil && pi.Delete.ID == operationID {
- return gatherParams(&pi, pi.Delete)
- }
- }
- return nil
-}
-
-// ParamsFor the specified method and path. Aggregates them with the defaults etc, so it's all the params that
-// apply for the method and path.
-func (s *Spec) ParamsFor(method, path string) map[string]spec.Parameter {
- res := make(map[string]spec.Parameter)
- if pi, ok := s.spec.Paths.Paths[path]; ok {
- s.paramsAsMap(pi.Parameters, res)
- s.paramsAsMap(s.operations[strings.ToUpper(method)][path].Parameters, res)
- }
- return res
-}
-
-// OperationForName gets the operation for the given id
-func (s *Spec) OperationForName(operationID string) (string, string, *spec.Operation, bool) {
- for method, pathItem := range s.operations {
- for path, op := range pathItem {
- if operationID == op.ID {
- return method, path, op, true
- }
- }
- }
- return "", "", nil, false
-}
-
-// OperationFor the given method and path
-func (s *Spec) OperationFor(method, path string) (*spec.Operation, bool) {
- if mp, ok := s.operations[strings.ToUpper(method)]; ok {
- op, fn := mp[path]
- return op, fn
- }
- return nil, false
-}
-
-// Operations gathers all the operations specified in the spec document
-func (s *Spec) Operations() map[string]map[string]*spec.Operation {
- return s.operations
-}
-
-func (s *Spec) structMapKeys(mp map[string]struct{}) []string {
- if len(mp) == 0 {
- return nil
- }
-
- result := make([]string, 0, len(mp))
- for k := range mp {
- result = append(result, k)
- }
- return result
-}
-
-// AllPaths returns all the paths in the swagger spec
-func (s *Spec) AllPaths() map[string]spec.PathItem {
- if s.spec == nil || s.spec.Paths == nil {
- return nil
- }
- return s.spec.Paths.Paths
-}
-
-// OperationIDs gets all the operation ids based on method an dpath
-func (s *Spec) OperationIDs() []string {
- if len(s.operations) == 0 {
- return nil
- }
- result := make([]string, 0, len(s.operations))
- for method, v := range s.operations {
- for p, o := range v {
- if o.ID != "" {
- result = append(result, o.ID)
- } else {
- result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
- }
- }
- }
- return result
-}
-
-// OperationMethodPaths gets all the operation ids based on method an dpath
-func (s *Spec) OperationMethodPaths() []string {
- if len(s.operations) == 0 {
- return nil
- }
- result := make([]string, 0, len(s.operations))
- for method, v := range s.operations {
- for p := range v {
- result = append(result, fmt.Sprintf("%s %s", strings.ToUpper(method), p))
- }
- }
- return result
-}
-
-// RequiredConsumes gets all the distinct consumes that are specified in the specification document
-func (s *Spec) RequiredConsumes() []string {
- return s.structMapKeys(s.consumes)
-}
-
-// RequiredProduces gets all the distinct produces that are specified in the specification document
-func (s *Spec) RequiredProduces() []string {
- return s.structMapKeys(s.produces)
-}
-
-// RequiredSecuritySchemes gets all the distinct security schemes that are specified in the swagger spec
-func (s *Spec) RequiredSecuritySchemes() []string {
- return s.structMapKeys(s.authSchemes)
-}
-
-// SchemaRef is a reference to a schema
-type SchemaRef struct {
- Name string
- Ref spec.Ref
- Schema *spec.Schema
- TopLevel bool
-}
-
-// SchemasWithAllOf returns schema references to all schemas that are defined
-// with an allOf key
-func (s *Spec) SchemasWithAllOf() (result []SchemaRef) {
- for _, v := range s.allOfs {
- result = append(result, v)
- }
- return
-}
-
-// AllDefinitions returns schema references for all the definitions that were discovered
-func (s *Spec) AllDefinitions() (result []SchemaRef) {
- for _, v := range s.allSchemas {
- result = append(result, v)
- }
- return
-}
-
-// AllDefinitionReferences returns json refs for all the discovered schemas
-func (s *Spec) AllDefinitionReferences() (result []string) {
- for _, v := range s.references.schemas {
- result = append(result, v.String())
- }
- return
-}
-
-// AllParameterReferences returns json refs for all the discovered parameters
-func (s *Spec) AllParameterReferences() (result []string) {
- for _, v := range s.references.parameters {
- result = append(result, v.String())
- }
- return
-}
-
-// AllResponseReferences returns json refs for all the discovered responses
-func (s *Spec) AllResponseReferences() (result []string) {
- for _, v := range s.references.responses {
- result = append(result, v.String())
- }
- return
-}
-
-// AllPathItemReferences returns the references for all the items
-func (s *Spec) AllPathItemReferences() (result []string) {
- for _, v := range s.references.pathItems {
- result = append(result, v.String())
- }
- return
-}
-
-// AllItemsReferences returns the references for all the items
-func (s *Spec) AllItemsReferences() (result []string) {
- for _, v := range s.references.items {
- result = append(result, v.String())
- }
- return
-}
-
-// AllReferences returns all the references found in the document
-func (s *Spec) AllReferences() (result []string) {
- for _, v := range s.references.allRefs {
- result = append(result, v.String())
- }
- return
-}
-
-// AllRefs returns all the unique references found in the document
-func (s *Spec) AllRefs() (result []spec.Ref) {
- set := make(map[string]struct{})
- for _, v := range s.references.allRefs {
- a := v.String()
- if a == "" {
- continue
- }
- if _, ok := set[a]; !ok {
- set[a] = struct{}{}
- result = append(result, v)
- }
- }
- return
-}
-
-func cloneStringMap(source map[string]string) map[string]string {
- res := make(map[string]string, len(source))
- for k, v := range source {
- res[k] = v
- }
- return res
-}
-
-// ParameterPatterns returns all the patterns found in parameters
-// the map is cloned to avoid accidental changes
-func (s *Spec) ParameterPatterns() map[string]string {
- return cloneStringMap(s.patterns.parameters)
-}
-
-// HeaderPatterns returns all the patterns found in response headers
-// the map is cloned to avoid accidental changes
-func (s *Spec) HeaderPatterns() map[string]string {
- return cloneStringMap(s.patterns.headers)
-}
-
-// ItemsPatterns returns all the patterns found in simple array items
-// the map is cloned to avoid accidental changes
-func (s *Spec) ItemsPatterns() map[string]string {
- return cloneStringMap(s.patterns.items)
-}
-
-// SchemaPatterns returns all the patterns found in schemas
-// the map is cloned to avoid accidental changes
-func (s *Spec) SchemaPatterns() map[string]string {
- return cloneStringMap(s.patterns.schemas)
-}
-
-// AllPatterns returns all the patterns found in the spec
-// the map is cloned to avoid accidental changes
-func (s *Spec) AllPatterns() map[string]string {
- return cloneStringMap(s.patterns.allPatterns)
-}
diff --git a/vendor/github.com/go-openapi/analysis/analyzer_test.go b/vendor/github.com/go-openapi/analysis/analyzer_test.go
deleted file mode 100644
index 70b80d7f9..000000000
--- a/vendor/github.com/go-openapi/analysis/analyzer_test.go
+++ /dev/null
@@ -1,284 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package analysis
-
-import (
- "encoding/json"
- "fmt"
- "path/filepath"
- "sort"
- "testing"
-
- "github.com/go-openapi/loads/fmts"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
- "github.com/stretchr/testify/assert"
-)
-
-func schemeNames(schemes []SecurityRequirement) []string {
- var names []string
- for _, v := range schemes {
- names = append(names, v.Name)
- }
- sort.Sort(sort.StringSlice(names))
- return names
-}
-
-func TestAnalyzer(t *testing.T) {
- formatParam := spec.QueryParam("format").Typed("string", "")
-
- limitParam := spec.QueryParam("limit").Typed("integer", "int32")
- limitParam.Extensions = spec.Extensions(map[string]interface{}{})
- limitParam.Extensions.Add("go-name", "Limit")
-
- skipParam := spec.QueryParam("skip").Typed("integer", "int32")
- pi := spec.PathItem{}
- pi.Parameters = []spec.Parameter{*limitParam}
-
- op := &spec.Operation{}
- op.Consumes = []string{"application/x-yaml"}
- op.Produces = []string{"application/x-yaml"}
- op.Security = []map[string][]string{
- map[string][]string{"oauth2": []string{}},
- map[string][]string{"basic": nil},
- }
- op.ID = "someOperation"
- op.Parameters = []spec.Parameter{*skipParam}
- pi.Get = op
-
- pi2 := spec.PathItem{}
- pi2.Parameters = []spec.Parameter{*limitParam}
- op2 := &spec.Operation{}
- op2.ID = "anotherOperation"
- op2.Parameters = []spec.Parameter{*skipParam}
- pi2.Get = op2
-
- spec := &spec.Swagger{
- SwaggerProps: spec.SwaggerProps{
- Consumes: []string{"application/json"},
- Produces: []string{"application/json"},
- Security: []map[string][]string{
- map[string][]string{"apikey": nil},
- },
- SecurityDefinitions: map[string]*spec.SecurityScheme{
- "basic": spec.BasicAuth(),
- "apiKey": spec.APIKeyAuth("api_key", "query"),
- "oauth2": spec.OAuth2AccessToken("http://authorize.com", "http://token.com"),
- },
- Parameters: map[string]spec.Parameter{"format": *formatParam},
- Paths: &spec.Paths{
- Paths: map[string]spec.PathItem{
- "/": pi,
- "/items": pi2,
- },
- },
- },
- }
- analyzer := New(spec)
-
- assert.Len(t, analyzer.consumes, 2)
- assert.Len(t, analyzer.produces, 2)
- assert.Len(t, analyzer.operations, 1)
- assert.Equal(t, analyzer.operations["GET"]["/"], spec.Paths.Paths["/"].Get)
-
- expected := []string{"application/x-yaml"}
- sort.Sort(sort.StringSlice(expected))
- consumes := analyzer.ConsumesFor(spec.Paths.Paths["/"].Get)
- sort.Sort(sort.StringSlice(consumes))
- assert.Equal(t, expected, consumes)
-
- produces := analyzer.ProducesFor(spec.Paths.Paths["/"].Get)
- sort.Sort(sort.StringSlice(produces))
- assert.Equal(t, expected, produces)
-
- expected = []string{"application/json"}
- sort.Sort(sort.StringSlice(expected))
- consumes = analyzer.ConsumesFor(spec.Paths.Paths["/items"].Get)
- sort.Sort(sort.StringSlice(consumes))
- assert.Equal(t, expected, consumes)
-
- produces = analyzer.ProducesFor(spec.Paths.Paths["/items"].Get)
- sort.Sort(sort.StringSlice(produces))
- assert.Equal(t, expected, produces)
-
- expectedSchemes := []SecurityRequirement{SecurityRequirement{"oauth2", []string{}}, SecurityRequirement{"basic", nil}}
- schemes := analyzer.SecurityRequirementsFor(spec.Paths.Paths["/"].Get)
- assert.Equal(t, schemeNames(expectedSchemes), schemeNames(schemes))
-
- securityDefinitions := analyzer.SecurityDefinitionsFor(spec.Paths.Paths["/"].Get)
- assert.Equal(t, securityDefinitions["basic"], *spec.SecurityDefinitions["basic"])
- assert.Equal(t, securityDefinitions["oauth2"], *spec.SecurityDefinitions["oauth2"])
-
- parameters := analyzer.ParamsFor("GET", "/")
- assert.Len(t, parameters, 2)
-
- operations := analyzer.OperationIDs()
- assert.Len(t, operations, 2)
-
- producers := analyzer.RequiredProduces()
- assert.Len(t, producers, 2)
- consumers := analyzer.RequiredConsumes()
- assert.Len(t, consumers, 2)
- authSchemes := analyzer.RequiredSecuritySchemes()
- assert.Len(t, authSchemes, 3)
-
- ops := analyzer.Operations()
- assert.Len(t, ops, 1)
- assert.Len(t, ops["GET"], 2)
-
- op, ok := analyzer.OperationFor("get", "/")
- assert.True(t, ok)
- assert.NotNil(t, op)
-
- op, ok = analyzer.OperationFor("delete", "/")
- assert.False(t, ok)
- assert.Nil(t, op)
-}
-
-func TestDefinitionAnalysis(t *testing.T) {
- doc, err := loadSpec(filepath.Join("fixtures", "definitions.yml"))
- if assert.NoError(t, err) {
- analyzer := New(doc)
- definitions := analyzer.allSchemas
- // parameters
- assertSchemaRefExists(t, definitions, "#/parameters/someParam/schema")
- assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/parameters/1/schema")
- assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/parameters/1/schema")
- // responses
- assertSchemaRefExists(t, definitions, "#/responses/someResponse/schema")
- assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/default/schema")
- assertSchemaRefExists(t, definitions, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
- // definitions
- assertSchemaRefExists(t, definitions, "#/definitions/tag")
- assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/id")
- assertSchemaRefExists(t, definitions, "#/definitions/tag/properties/value")
- assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category")
- assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/id")
- assertSchemaRefExists(t, definitions, "#/definitions/tag/definitions/category/properties/value")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalProps/additionalProperties")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/0")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/items/1")
- assertSchemaRefExists(t, definitions, "#/definitions/withAdditionalItems/additionalItems")
- assertSchemaRefExists(t, definitions, "#/definitions/withNot")
- assertSchemaRefExists(t, definitions, "#/definitions/withNot/not")
- assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf")
- assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/0")
- assertSchemaRefExists(t, definitions, "#/definitions/withAnyOf/anyOf/1")
- assertSchemaRefExists(t, definitions, "#/definitions/withAllOf")
- assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/0")
- assertSchemaRefExists(t, definitions, "#/definitions/withAllOf/allOf/1")
- allOfs := analyzer.allOfs
- assert.Len(t, allOfs, 1)
- assert.Contains(t, allOfs, "#/definitions/withAllOf")
- }
-}
-
-func loadSpec(path string) (*spec.Swagger, error) {
- spec.PathLoader = func(path string) (json.RawMessage, error) {
- ext := filepath.Ext(path)
- if ext == ".yml" || ext == ".yaml" {
- return fmts.YAMLDoc(path)
- }
- data, err := swag.LoadFromFileOrHTTP(path)
- if err != nil {
- return nil, err
- }
- return json.RawMessage(data), nil
- }
- data, err := fmts.YAMLDoc(path)
- if err != nil {
- return nil, err
- }
-
- var sw spec.Swagger
- if err := json.Unmarshal(data, &sw); err != nil {
- return nil, err
- }
- return &sw, nil
-}
-
-func TestReferenceAnalysis(t *testing.T) {
- doc, err := loadSpec(filepath.Join("fixtures", "references.yml"))
- if assert.NoError(t, err) {
- definitions := New(doc).references
-
- // parameters
- assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/parameters/0")
- assertRefExists(t, definitions.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0")
-
- // path items
- assertRefExists(t, definitions.pathItems, "#/paths/~1other~1place")
-
- // responses
- assertRefExists(t, definitions.responses, "#/paths/~1some~1where~1{id}/get/responses/404")
-
- // definitions
- assertRefExists(t, definitions.schemas, "#/responses/notFound/schema")
- assertRefExists(t, definitions.schemas, "#/paths/~1some~1where~1{id}/get/responses/200/schema")
- assertRefExists(t, definitions.schemas, "#/definitions/tag/properties/audit")
-
- // items
- assertRefExists(t, definitions.allRefs, "#/paths/~1some~1where~1{id}/get/parameters/1/items")
- }
-}
-
-func assertRefExists(t testing.TB, data map[string]spec.Ref, key string) bool {
- if _, ok := data[key]; !ok {
- return assert.Fail(t, fmt.Sprintf("expected %q to exist in the ref bag", key))
- }
- return true
-}
-
-func assertSchemaRefExists(t testing.TB, data map[string]SchemaRef, key string) bool {
- if _, ok := data[key]; !ok {
- return assert.Fail(t, fmt.Sprintf("expected %q to exist in schema ref bag", key))
- }
- return true
-}
-
-func TestPatternAnalysis(t *testing.T) {
- doc, err := loadSpec(filepath.Join("fixtures", "patterns.yml"))
- if assert.NoError(t, err) {
- pt := New(doc).patterns
-
- // parameters
- assertPattern(t, pt.parameters, "#/parameters/idParam", "a[A-Za-Z0-9]+")
- assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/parameters/1", "b[A-Za-z0-9]+")
- assertPattern(t, pt.parameters, "#/paths/~1some~1where~1{id}/get/parameters/0", "[abc][0-9]+")
-
- // responses
- assertPattern(t, pt.headers, "#/responses/notFound/headers/ContentLength", "[0-9]+")
- assertPattern(t, pt.headers, "#/paths/~1some~1where~1{id}/get/responses/200/headers/X-Request-Id", "d[A-Za-z0-9]+")
-
- // definitions
- assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/parameters/0/schema/properties/value", "e[A-Za-z0-9]+")
- assertPattern(t, pt.schemas, "#/paths/~1other~1place/post/responses/200/schema/properties/data", "[0-9]+[abd]")
- assertPattern(t, pt.schemas, "#/definitions/named", "f[A-Za-z0-9]+")
- assertPattern(t, pt.schemas, "#/definitions/tag/properties/value", "g[A-Za-z0-9]+")
-
- // items
- assertPattern(t, pt.items, "#/paths/~1some~1where~1{id}/get/parameters/1/items", "c[A-Za-z0-9]+")
- assertPattern(t, pt.items, "#/paths/~1other~1place/post/responses/default/headers/Via/items", "[A-Za-z]+")
- }
-}
-
-func assertPattern(t testing.TB, data map[string]string, key, pattern string) bool {
- if assert.Contains(t, data, key) {
- return assert.Equal(t, pattern, data[key])
- }
- return false
-}
diff --git a/vendor/github.com/go-openapi/analysis/flatten.go b/vendor/github.com/go-openapi/analysis/flatten.go
deleted file mode 100644
index 703a0aa89..000000000
--- a/vendor/github.com/go-openapi/analysis/flatten.go
+++ /dev/null
@@ -1,800 +0,0 @@
-package analysis
-
-import (
- "fmt"
- "log"
- "net/http"
- "os"
- "path"
- "path/filepath"
- "sort"
- "strings"
-
- "strconv"
-
- "github.com/go-openapi/jsonpointer"
- swspec "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-// FlattenOpts configuration for flattening a swagger specification.
-type FlattenOpts struct {
- // If Expand is true, we skip flattening the spec and expand it instead
- Expand bool
- Spec *Spec
- BasePath string
-
- _ struct{} // require keys
-}
-
-// ExpandOpts creates a spec.ExpandOptions to configure expanding a specification document.
-func (f *FlattenOpts) ExpandOpts(skipSchemas bool) *swspec.ExpandOptions {
- return &swspec.ExpandOptions{RelativeBase: f.BasePath, SkipSchemas: skipSchemas}
-}
-
-// Swagger gets the swagger specification for this flatten operation
-func (f *FlattenOpts) Swagger() *swspec.Swagger {
- return f.Spec.spec
-}
-
-// Flatten an analyzed spec.
-//
-// To flatten a spec means:
-//
-// Expand the parameters, responses, path items, parameter items and header items.
-// Import external (http, file) references so they become internal to the document.
-// Move every inline schema to be a definition with an auto-generated name in a depth-first fashion.
-// Rewritten schemas get a vendor extension x-go-gen-location so we know in which package they need to be rendered.
-func Flatten(opts FlattenOpts) error {
- // Make sure opts.BasePath is an absolute path
- if !filepath.IsAbs(opts.BasePath) {
- cwd, _ := os.Getwd()
- opts.BasePath = filepath.Join(cwd, opts.BasePath)
- }
- // recursively expand responses, parameters, path items and items
- err := swspec.ExpandSpec(opts.Swagger(), &swspec.ExpandOptions{
- RelativeBase: opts.BasePath,
- SkipSchemas: !opts.Expand,
- })
- if err != nil {
- return err
- }
- opts.Spec.reload() // re-analyze
-
- // at this point there are no other references left but schemas
- if err := importExternalReferences(&opts); err != nil {
- return err
- }
- opts.Spec.reload() // re-analyze
-
- // rewrite the inline schemas (schemas that aren't simple types or arrays of simple types)
- if err := nameInlinedSchemas(&opts); err != nil {
- return err
- }
- opts.Spec.reload() // re-analyze
-
- // TODO: simplifiy known schema patterns to flat objects with properties?
- return nil
-}
-
-func nameInlinedSchemas(opts *FlattenOpts) error {
- namer := &inlineSchemaNamer{Spec: opts.Swagger(), Operations: opRefsByRef(gatherOperations(opts.Spec, nil))}
- depthFirst := sortDepthFirst(opts.Spec.allSchemas)
- for _, key := range depthFirst {
- sch := opts.Spec.allSchemas[key]
- if sch.Schema != nil && sch.Schema.Ref.String() == "" && !sch.TopLevel { // inline schema
- asch, err := Schema(SchemaOpts{Schema: sch.Schema, Root: opts.Swagger(), BasePath: opts.BasePath})
- if err != nil {
- return fmt.Errorf("schema analysis [%s]: %v", sch.Ref.String(), err)
- }
-
- if !asch.IsSimpleSchema { // complex schemas get moved
- if err := namer.Name(key, sch.Schema, asch); err != nil {
- return err
- }
- }
- }
- }
- return nil
-}
-
-var depthGroupOrder = []string{"sharedOpParam", "opParam", "codeResponse", "defaultResponse", "definition"}
-
-func sortDepthFirst(data map[string]SchemaRef) (sorted []string) {
- // group by category (shared params, op param, statuscode response, default response, definitions)
- // sort groups internally by number of parts in the key and lexical names
- // flatten groups into a single list of keys
- grouped := make(map[string]keys, len(data))
- for k := range data {
- split := keyParts(k)
- var pk string
- if split.IsSharedOperationParam() {
- pk = "sharedOpParam"
- }
- if split.IsOperationParam() {
- pk = "opParam"
- }
- if split.IsStatusCodeResponse() {
- pk = "codeResponse"
- }
- if split.IsDefaultResponse() {
- pk = "defaultResponse"
- }
- if split.IsDefinition() {
- pk = "definition"
- }
- grouped[pk] = append(grouped[pk], key{len(split), k})
- }
-
- for _, pk := range depthGroupOrder {
- res := grouped[pk]
- sort.Sort(res)
- for _, v := range res {
- sorted = append(sorted, v.Key)
- }
- }
-
- return
-}
-
-type key struct {
- Segments int
- Key string
-}
-type keys []key
-
-func (k keys) Len() int { return len(k) }
-func (k keys) Swap(i, j int) { k[i], k[j] = k[j], k[i] }
-func (k keys) Less(i, j int) bool {
- return k[i].Segments > k[j].Segments || (k[i].Segments == k[j].Segments && k[i].Key < k[j].Key)
-}
-
-type inlineSchemaNamer struct {
- Spec *swspec.Swagger
- Operations map[string]opRef
-}
-
-func opRefsByRef(oprefs map[string]opRef) map[string]opRef {
- result := make(map[string]opRef, len(oprefs))
- for _, v := range oprefs {
- result[v.Ref.String()] = v
- }
- return result
-}
-
-func (isn *inlineSchemaNamer) Name(key string, schema *swspec.Schema, aschema *AnalyzedSchema) error {
- if swspec.Debug {
- log.Printf("naming inlined schema at %s", key)
- }
-
- parts := keyParts(key)
- for _, name := range namesFromKey(parts, aschema, isn.Operations) {
- if name != "" {
- // create unique name
- newName := uniqifyName(isn.Spec.Definitions, swag.ToJSONName(name))
-
- // clone schema
- sch, err := cloneSchema(schema)
- if err != nil {
- return err
- }
-
- // replace values on schema
- if err := rewriteSchemaToRef(isn.Spec, key, swspec.MustCreateRef("#/definitions/"+newName)); err != nil {
- return fmt.Errorf("name inlined schema: %v", err)
- }
-
- sch.AddExtension("x-go-gen-location", genLocation(parts))
- // fmt.Printf("{\n %q,\n \"\",\n spec.MustCreateRef(%q),\n \"\",\n},\n", key, "#/definitions/"+newName)
- // save cloned schema to definitions
- saveSchema(isn.Spec, newName, sch)
- }
- }
- return nil
-}
-
-func genLocation(parts splitKey) string {
- if parts.IsOperation() {
- return "operations"
- }
- if parts.IsDefinition() {
- return "models"
- }
- return ""
-}
-
-func uniqifyName(definitions swspec.Definitions, name string) string {
- if name == "" {
- name = "oaiGen"
- }
- if len(definitions) == 0 {
- return name
- }
-
- unq := true
- for k := range definitions {
- if strings.ToLower(k) == strings.ToLower(name) {
- unq = false
- break
- }
- }
-
- if unq {
- return name
- }
-
- name += "OAIGen"
- var idx int
- unique := name
- _, known := definitions[unique]
- for known {
- idx++
- unique = fmt.Sprintf("%s%d", name, idx)
- _, known = definitions[unique]
- }
- return unique
-}
-
-func namesFromKey(parts splitKey, aschema *AnalyzedSchema, operations map[string]opRef) []string {
- var baseNames [][]string
- var startIndex int
- if parts.IsOperation() {
- // params
- if parts.IsOperationParam() || parts.IsSharedOperationParam() {
- piref := parts.PathItemRef()
- if piref.String() != "" && parts.IsOperationParam() {
- if op, ok := operations[piref.String()]; ok {
- startIndex = 5
- baseNames = append(baseNames, []string{op.ID, "params", "body"})
- }
- } else if parts.IsSharedOperationParam() {
- pref := parts.PathRef()
- for k, v := range operations {
- if strings.HasPrefix(k, pref.String()) {
- startIndex = 4
- baseNames = append(baseNames, []string{v.ID, "params", "body"})
- }
- }
- }
- }
- // responses
- if parts.IsOperationResponse() {
- piref := parts.PathItemRef()
- if piref.String() != "" {
- if op, ok := operations[piref.String()]; ok {
- startIndex = 6
- baseNames = append(baseNames, []string{op.ID, parts.ResponseName(), "body"})
- }
- }
- }
- }
-
- // definitions
- if parts.IsDefinition() {
- nm := parts.DefinitionName()
- if nm != "" {
- startIndex = 2
- baseNames = append(baseNames, []string{parts.DefinitionName()})
- }
- }
-
- var result []string
- for _, segments := range baseNames {
- nm := parts.BuildName(segments, startIndex, aschema)
- if nm != "" {
- result = append(result, nm)
- }
- }
- sort.Strings(result)
- return result
-}
-
-const (
- pths = "paths"
- responses = "responses"
- parameters = "parameters"
- definitions = "definitions"
-)
-
-var ignoredKeys map[string]struct{}
-
-func init() {
- ignoredKeys = map[string]struct{}{
- "schema": {},
- "properties": {},
- "not": {},
- "anyOf": {},
- "oneOf": {},
- }
-}
-
-type splitKey []string
-
-func (s splitKey) IsDefinition() bool {
- return len(s) > 1 && s[0] == definitions
-}
-
-func (s splitKey) DefinitionName() string {
- if !s.IsDefinition() {
- return ""
- }
- return s[1]
-}
-
-func (s splitKey) isKeyName(i int) bool {
- if i <= 0 {
- return false
- }
- count := 0
- for idx := i - 1; idx > 0; idx-- {
- if s[idx] != "properties" {
- break
- }
- count++
- }
-
- if count%2 != 0 {
- return true
- }
- return false
-}
-
-func (s splitKey) BuildName(segments []string, startIndex int, aschema *AnalyzedSchema) string {
- for i, part := range s[startIndex:] {
- if _, ignored := ignoredKeys[part]; !ignored || s.isKeyName(startIndex+i) {
- if part == "items" || part == "additionalItems" {
- if aschema.IsTuple || aschema.IsTupleWithExtra {
- segments = append(segments, "tuple")
- } else {
- segments = append(segments, "items")
- }
- if part == "additionalItems" {
- segments = append(segments, part)
- }
- continue
- }
- segments = append(segments, part)
- }
- }
- return strings.Join(segments, " ")
-}
-
-func (s splitKey) IsOperation() bool {
- return len(s) > 1 && s[0] == pths
-}
-
-func (s splitKey) IsSharedOperationParam() bool {
- return len(s) > 2 && s[0] == pths && s[2] == parameters
-}
-
-func (s splitKey) IsOperationParam() bool {
- return len(s) > 3 && s[0] == pths && s[3] == parameters
-}
-
-func (s splitKey) IsOperationResponse() bool {
- return len(s) > 3 && s[0] == pths && s[3] == responses
-}
-
-func (s splitKey) IsDefaultResponse() bool {
- return len(s) > 4 && s[0] == pths && s[3] == responses && s[4] == "default"
-}
-
-func (s splitKey) IsStatusCodeResponse() bool {
- isInt := func() bool {
- _, err := strconv.Atoi(s[4])
- return err == nil
- }
- return len(s) > 4 && s[0] == pths && s[3] == responses && isInt()
-}
-
-func (s splitKey) ResponseName() string {
- if s.IsStatusCodeResponse() {
- code, _ := strconv.Atoi(s[4])
- return http.StatusText(code)
- }
- if s.IsDefaultResponse() {
- return "Default"
- }
- return ""
-}
-
-var validMethods map[string]struct{}
-
-func init() {
- validMethods = map[string]struct{}{
- "GET": {},
- "HEAD": {},
- "OPTIONS": {},
- "PATCH": {},
- "POST": {},
- "PUT": {},
- "DELETE": {},
- }
-}
-
-func (s splitKey) PathItemRef() swspec.Ref {
- if len(s) < 3 {
- return swspec.Ref{}
- }
- pth, method := s[1], s[2]
- if _, validMethod := validMethods[strings.ToUpper(method)]; !validMethod && !strings.HasPrefix(method, "x-") {
- return swspec.Ref{}
- }
- return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(pth), strings.ToUpper(method)))
-}
-
-func (s splitKey) PathRef() swspec.Ref {
- if !s.IsOperation() {
- return swspec.Ref{}
- }
- return swspec.MustCreateRef("#" + path.Join("/", pths, jsonpointer.Escape(s[1])))
-}
-
-func keyParts(key string) splitKey {
- var res []string
- for _, part := range strings.Split(key[1:], "/") {
- if part != "" {
- res = append(res, jsonpointer.Unescape(part))
- }
- }
- return res
-}
-
-func rewriteSchemaToRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
- if swspec.Debug {
- log.Printf("rewriting schema to ref for %s with %s", key, ref.String())
- }
- pth := key[1:]
- ptr, err := jsonpointer.New(pth)
- if err != nil {
- return err
- }
-
- value, _, err := ptr.Get(spec)
- if err != nil {
- return err
- }
-
- switch refable := value.(type) {
- case *swspec.Schema:
- return rewriteParentRef(spec, key, ref)
- case *swspec.SchemaOrBool:
- if refable.Schema != nil {
- refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- }
- case *swspec.SchemaOrArray:
- if refable.Schema != nil {
- refable.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- }
- case swspec.Schema:
- return rewriteParentRef(spec, key, ref)
- default:
- return fmt.Errorf("no schema with ref found at %s for %T", key, value)
- }
-
- return nil
-}
-
-func rewriteParentRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
- pth := key[1:]
- parent, entry := path.Dir(pth), path.Base(pth)
- if swspec.Debug {
- log.Println("getting schema holder at:", parent)
- }
-
- pptr, err := jsonpointer.New(parent)
- if err != nil {
- return err
- }
- pvalue, _, err := pptr.Get(spec)
- if err != nil {
- return fmt.Errorf("can't get parent for %s: %v", parent, err)
- }
- if swspec.Debug {
- log.Printf("rewriting holder for %T", pvalue)
- }
-
- switch container := pvalue.(type) {
- case swspec.Response:
- if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
- return err
- }
-
- case *swspec.Response:
- container.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case *swspec.Responses:
- statusCode, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- resp := container.StatusCodeResponses[statusCode]
- resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- container.StatusCodeResponses[statusCode] = resp
-
- case map[string]swspec.Response:
- resp := container[entry]
- resp.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- container[entry] = resp
-
- case swspec.Parameter:
- if err := rewriteParentRef(spec, "#"+parent, ref); err != nil {
- return err
- }
-
- case map[string]swspec.Parameter:
- param := container[entry]
- param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- container[entry] = param
-
- case []swspec.Parameter:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- param := container[idx]
- param.Schema = &swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- container[idx] = param
-
- case swspec.Definitions:
- container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case map[string]swspec.Schema:
- container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case []swspec.Schema:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case *swspec.SchemaOrArray:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
- default:
- return fmt.Errorf("unhandled parent schema rewrite %s (%T)", key, pvalue)
- }
- return nil
-}
-
-func cloneSchema(schema *swspec.Schema) (*swspec.Schema, error) {
- var sch swspec.Schema
- if err := swag.FromDynamicJSON(schema, &sch); err != nil {
- return nil, fmt.Errorf("name inlined schema: %v", err)
- }
- return &sch, nil
-}
-
-func importExternalReferences(opts *FlattenOpts) error {
- groupedRefs := reverseIndexForSchemaRefs(opts)
-
- for refStr, entry := range groupedRefs {
- if !entry.Ref.HasFragmentOnly {
- if swspec.Debug {
- log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
- }
- // resolve to actual schema
- sch := new(swspec.Schema)
- sch.Ref = entry.Ref
- expandOpts := swspec.ExpandOptions{
- RelativeBase: opts.BasePath,
- SkipSchemas: false,
- }
- err := swspec.ExpandSchemaWithBasePath(sch, nil, &expandOpts)
- if err != nil {
- return err
- }
- if sch == nil {
- return fmt.Errorf("no schema found at %s for [%s]", refStr, strings.Join(entry.Keys, ", "))
- }
- if swspec.Debug {
- log.Printf("importing external schema for [%s] from %s", strings.Join(entry.Keys, ", "), refStr)
- }
-
- // generate a unique name
- newName := uniqifyName(opts.Swagger().Definitions, nameFromRef(entry.Ref))
- if swspec.Debug {
- log.Printf("new name for [%s]: %s", strings.Join(entry.Keys, ", "), newName)
- }
-
- // rewrite the external refs to local ones
- for _, key := range entry.Keys {
- if err := updateRef(opts.Swagger(), key, swspec.MustCreateRef("#"+path.Join("/definitions", newName))); err != nil {
- return err
- }
- }
-
- // add the resolved schema to the definitions
- saveSchema(opts.Swagger(), newName, sch)
- }
- }
- return nil
-}
-
-type refRevIdx struct {
- Ref swspec.Ref
- Keys []string
-}
-
-func reverseIndexForSchemaRefs(opts *FlattenOpts) map[string]refRevIdx {
- collected := make(map[string]refRevIdx)
- for key, schRef := range opts.Spec.references.schemas {
- if entry, ok := collected[schRef.String()]; ok {
- entry.Keys = append(entry.Keys, key)
- collected[schRef.String()] = entry
- } else {
- collected[schRef.String()] = refRevIdx{
- Ref: schRef,
- Keys: []string{key},
- }
- }
- }
- return collected
-}
-
-func nameFromRef(ref swspec.Ref) string {
- u := ref.GetURL()
- if u.Fragment != "" {
- return swag.ToJSONName(path.Base(u.Fragment))
- }
- if u.Path != "" {
- bn := path.Base(u.Path)
- if bn != "" && bn != "/" {
- ext := path.Ext(bn)
- if ext != "" {
- return swag.ToJSONName(bn[:len(bn)-len(ext)])
- }
- return swag.ToJSONName(bn)
- }
- }
- return swag.ToJSONName(strings.Replace(u.Host, ".", " ", -1))
-}
-
-func saveSchema(spec *swspec.Swagger, name string, schema *swspec.Schema) {
- if schema == nil {
- return
- }
- if spec.Definitions == nil {
- spec.Definitions = make(map[string]swspec.Schema, 150)
- }
- spec.Definitions[name] = *schema
-}
-
-func updateRef(spec *swspec.Swagger, key string, ref swspec.Ref) error {
- if swspec.Debug {
- log.Printf("updating ref for %s with %s", key, ref.String())
- }
- pth := key[1:]
- ptr, err := jsonpointer.New(pth)
- if err != nil {
- return err
- }
-
- value, _, err := ptr.Get(spec)
- if err != nil {
- return err
- }
-
- switch refable := value.(type) {
- case *swspec.Schema:
- refable.Ref = ref
- case *swspec.SchemaOrBool:
- if refable.Schema != nil {
- refable.Schema.Ref = ref
- }
- case *swspec.SchemaOrArray:
- if refable.Schema != nil {
- refable.Schema.Ref = ref
- }
- case swspec.Schema:
- parent, entry := path.Dir(pth), path.Base(pth)
- if swspec.Debug {
- log.Println("getting schema holder at:", parent)
- }
-
- pptr, err := jsonpointer.New(parent)
- if err != nil {
- return err
- }
- pvalue, _, err := pptr.Get(spec)
- if err != nil {
- return fmt.Errorf("can't get parent for %s: %v", parent, err)
- }
-
- switch container := pvalue.(type) {
- case swspec.Definitions:
- container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case map[string]swspec.Schema:
- container[entry] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case []swspec.Schema:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- container[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- case *swspec.SchemaOrArray:
- idx, err := strconv.Atoi(entry)
- if err != nil {
- return fmt.Errorf("%s not a number: %v", pth, err)
- }
- container.Schemas[idx] = swspec.Schema{SchemaProps: swspec.SchemaProps{Ref: ref}}
-
- }
-
- default:
- return fmt.Errorf("no schema with ref found at %s for %T", key, value)
- }
-
- return nil
-}
-
-func containsString(names []string, name string) bool {
- for _, nm := range names {
- if nm == name {
- return true
- }
- }
- return false
-}
-
-type opRef struct {
- Method string
- Path string
- Key string
- ID string
- Op *swspec.Operation
- Ref swspec.Ref
-}
-
-type opRefs []opRef
-
-func (o opRefs) Len() int { return len(o) }
-func (o opRefs) Swap(i, j int) { o[i], o[j] = o[j], o[i] }
-func (o opRefs) Less(i, j int) bool { return o[i].Key < o[j].Key }
-
-func gatherOperations(specDoc *Spec, operationIDs []string) map[string]opRef {
- var oprefs opRefs
-
- for method, pathItem := range specDoc.Operations() {
- for pth, operation := range pathItem {
- vv := *operation
- oprefs = append(oprefs, opRef{
- Key: swag.ToGoName(strings.ToLower(method) + " " + pth),
- Method: method,
- Path: pth,
- ID: vv.ID,
- Op: &vv,
- Ref: swspec.MustCreateRef("#" + path.Join("/paths", jsonpointer.Escape(pth), method)),
- })
- }
- }
-
- sort.Sort(oprefs)
-
- operations := make(map[string]opRef)
- for _, opr := range oprefs {
- nm := opr.ID
- if nm == "" {
- nm = opr.Key
- }
-
- oo, found := operations[nm]
- if found && oo.Method != opr.Method && oo.Path != opr.Path {
- nm = opr.Key
- }
- if len(operationIDs) == 0 || containsString(operationIDs, opr.ID) || containsString(operationIDs, nm) {
- opr.ID = nm
- opr.Op.ID = nm
- operations[nm] = opr
- }
- }
-
- return operations
-}
diff --git a/vendor/github.com/go-openapi/analysis/flatten_test.go b/vendor/github.com/go-openapi/analysis/flatten_test.go
deleted file mode 100644
index dc7bd4934..000000000
--- a/vendor/github.com/go-openapi/analysis/flatten_test.go
+++ /dev/null
@@ -1,826 +0,0 @@
-package analysis
-
-import (
- "os"
- "path/filepath"
- "strings"
- "testing"
-
- "github.com/go-openapi/jsonpointer"
- "github.com/go-openapi/spec"
- "github.com/stretchr/testify/assert"
-)
-
-func TestSaveDefinition(t *testing.T) {
- sp := &spec.Swagger{}
- saveSchema(sp, "theName", spec.StringProperty())
- assert.Contains(t, sp.Definitions, "theName")
-}
-
-func TestNameFromRef(t *testing.T) {
- values := []struct{ Source, Expected string }{
- {"#/definitions/errorModel", "errorModel"},
- {"http://somewhere.com/definitions/errorModel", "errorModel"},
- {"http://somewhere.com/definitions/errorModel.json", "errorModel"},
- {"/definitions/errorModel", "errorModel"},
- {"/definitions/errorModel.json", "errorModel"},
- {"http://somewhere.com", "somewhereCom"},
- {"#", ""},
- }
-
- for _, v := range values {
- assert.Equal(t, v.Expected, nameFromRef(spec.MustCreateRef(v.Source)))
- }
-}
-
-func TestDefinitionName(t *testing.T) {
- values := []struct {
- Source, Expected string
- Definitions spec.Definitions
- }{
- {"#/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
- {"http://somewhere.com/definitions/errorModel", "errorModel", map[string]spec.Schema(nil)},
- {"#/definitions/errorModel", "errorModel", map[string]spec.Schema{"apples": *spec.StringProperty()}},
- {"#/definitions/errorModel", "errorModelOAIGen", map[string]spec.Schema{"errorModel": *spec.StringProperty()}},
- {"#/definitions/errorModel", "errorModelOAIGen1", map[string]spec.Schema{"errorModel": *spec.StringProperty(), "errorModelOAIGen": *spec.StringProperty()}},
- {"#", "oaiGen", nil},
- }
-
- for _, v := range values {
- assert.Equal(t, v.Expected, uniqifyName(v.Definitions, nameFromRef(spec.MustCreateRef(v.Source))))
- }
-}
-
-func TestUpdateRef(t *testing.T) {
- bp := filepath.Join("fixtures", "external_definitions.yml")
- sp, err := loadSpec(bp)
- if assert.NoError(t, err) {
-
- values := []struct {
- Key string
- Ref spec.Ref
- }{
- {"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
- {"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
- }
-
- for _, v := range values {
- err := updateRef(sp, v.Key, v.Ref)
- if assert.NoError(t, err) {
- ptr, err := jsonpointer.New(v.Key[1:])
- if assert.NoError(t, err) {
- vv, _, err := ptr.Get(sp)
-
- if assert.NoError(t, err) {
- switch tv := vv.(type) {
- case *spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String())
- case spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String())
- case *spec.SchemaOrBool:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
- case *spec.SchemaOrArray:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String())
- default:
- assert.Fail(t, "unknown type", "got %T", vv)
- }
- }
- }
- }
- }
- }
-}
-
-func TestImportExternalReferences(t *testing.T) {
- bp := filepath.Join(".", "fixtures", "external_definitions.yml")
- sp, err := loadSpec(bp)
- if assert.NoError(t, err) {
-
- values := []struct {
- Key string
- Ref spec.Ref
- }{
- {"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
- {"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
- }
- for _, v := range values {
- // technically not necessary to run for each value, but if things go right
- // this is idempotent, so having it repeat shouldn't matter
- // this validates that behavior
- err := importExternalReferences(&FlattenOpts{
- Spec: New(sp),
- BasePath: bp,
- })
-
- if assert.NoError(t, err) {
-
- ptr, err := jsonpointer.New(v.Key[1:])
- if assert.NoError(t, err) {
- vv, _, err := ptr.Get(sp)
-
- if assert.NoError(t, err) {
- switch tv := vv.(type) {
- case *spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
- case spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "for %s", v.Key)
- case *spec.SchemaOrBool:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
- case *spec.SchemaOrArray:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "for %s", v.Key)
- default:
- assert.Fail(t, "unknown type", "got %T", vv)
- }
- }
- }
- }
- }
- assert.Len(t, sp.Definitions, 11)
- assert.Contains(t, sp.Definitions, "tag")
- assert.Contains(t, sp.Definitions, "named")
- assert.Contains(t, sp.Definitions, "record")
- }
-}
-
-func TestRewriteSchemaRef(t *testing.T) {
- bp := filepath.Join("fixtures", "inline_schemas.yml")
- sp, err := loadSpec(bp)
- if assert.NoError(t, err) {
-
- values := []struct {
- Key string
- Ref spec.Ref
- }{
- {"#/parameters/someParam/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/responses/someResponse/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/namedAgain", spec.MustCreateRef("#/definitions/named")},
- {"#/definitions/datedTag/allOf/1", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/record")},
- {"#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tag")},
- {"#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/named")},
- }
-
- for i, v := range values {
- err := rewriteSchemaToRef(sp, v.Key, v.Ref)
- if assert.NoError(t, err) {
- ptr, err := jsonpointer.New(v.Key[1:])
- if assert.NoError(t, err) {
- vv, _, err := ptr.Get(sp)
-
- if assert.NoError(t, err) {
- switch tv := vv.(type) {
- case *spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
- case spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrBool:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrArray:
- assert.Equal(t, v.Ref.String(), tv.Schema.Ref.String(), "at %d for %s", i, v.Key)
- default:
- assert.Fail(t, "unknown type", "got %T", vv)
- }
- }
- }
- }
- }
- }
-}
-
-func TestSplitKey(t *testing.T) {
-
- type KeyFlag uint64
-
- const (
- isOperation KeyFlag = 1 << iota
- isDefinition
- isSharedOperationParam
- isOperationParam
- isOperationResponse
- isDefaultResponse
- isStatusCodeResponse
- )
-
- values := []struct {
- Key string
- Flags KeyFlag
- PathItemRef spec.Ref
- PathRef spec.Ref
- Name string
- }{
- {
- "#/paths/~1some~1where~1{id}/parameters/1/schema",
- isOperation | isSharedOperationParam,
- spec.Ref{},
- spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
- "",
- },
- {
- "#/paths/~1some~1where~1{id}/get/parameters/2/schema",
- isOperation | isOperationParam,
- spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
- spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
- "",
- },
- {
- "#/paths/~1some~1where~1{id}/get/responses/default/schema",
- isOperation | isOperationResponse | isDefaultResponse,
- spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
- spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
- "Default",
- },
- {
- "#/paths/~1some~1where~1{id}/get/responses/200/schema",
- isOperation | isOperationResponse | isStatusCodeResponse,
- spec.MustCreateRef("#/paths/~1some~1where~1{id}/GET"),
- spec.MustCreateRef("#/paths/~1some~1where~1{id}"),
- "OK",
- },
- {
- "#/definitions/namedAgain",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "namedAgain",
- },
- {
- "#/definitions/datedRecords/items/1",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "datedRecords",
- },
- {
- "#/definitions/datedRecords/items/1",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "datedRecords",
- },
- {
- "#/definitions/datedTaggedRecords/items/1",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "datedTaggedRecords",
- },
- {
- "#/definitions/datedTaggedRecords/additionalItems",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "datedTaggedRecords",
- },
- {
- "#/definitions/otherRecords/items",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "otherRecords",
- },
- {
- "#/definitions/tags/additionalProperties",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "tags",
- },
- {
- "#/definitions/namedThing/properties/name",
- isDefinition,
- spec.Ref{},
- spec.Ref{},
- "namedThing",
- },
- }
-
- for i, v := range values {
- parts := keyParts(v.Key)
- pref := parts.PathRef()
- piref := parts.PathItemRef()
- assert.Equal(t, v.PathRef.String(), pref.String(), "pathRef: %s at %d", v.Key, i)
- assert.Equal(t, v.PathItemRef.String(), piref.String(), "pathItemRef: %s at %d", v.Key, i)
-
- if v.Flags&isOperation != 0 {
- assert.True(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsOperation(), "isOperation: %s at %d", v.Key, i)
- }
- if v.Flags&isDefinition != 0 {
- assert.True(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
- assert.Equal(t, v.Name, parts.DefinitionName(), "definition name: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsDefinition(), "isDefinition: %s at %d", v.Key, i)
- if v.Name != "" {
- assert.Equal(t, v.Name, parts.ResponseName(), "response name: %s at %d", v.Key, i)
- }
- }
- if v.Flags&isOperationParam != 0 {
- assert.True(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsOperationParam(), "isOperationParam: %s at %d", v.Key, i)
- }
- if v.Flags&isSharedOperationParam != 0 {
- assert.True(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsSharedOperationParam(), "isSharedOperationParam: %s at %d", v.Key, i)
- }
- if v.Flags&isOperationResponse != 0 {
- assert.True(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsOperationResponse(), "isOperationResponse: %s at %d", v.Key, i)
- }
- if v.Flags&isDefaultResponse != 0 {
- assert.True(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsDefaultResponse(), "isDefaultResponse: %s at %d", v.Key, i)
- }
- if v.Flags&isStatusCodeResponse != 0 {
- assert.True(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
- } else {
- assert.False(t, parts.IsStatusCodeResponse(), "isStatusCodeResponse: %s at %d", v.Key, i)
- }
- }
-}
-
-func definitionPtr(key string) string {
- if !strings.HasPrefix(key, "#/definitions") {
- return key
- }
- return strings.Join(strings.Split(key, "/")[:3], "/")
-}
-
-func TestNamesFromKey(t *testing.T) {
- bp := filepath.Join("fixtures", "inline_schemas.yml")
- sp, err := loadSpec(bp)
- if assert.NoError(t, err) {
-
- values := []struct {
- Key string
- Names []string
- }{
- {"#/paths/~1some~1where~1{id}/parameters/1/schema", []string{"GetSomeWhereID params body", "PostSomeWhereID params body"}},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema", []string{"GetSomeWhereID params body"}},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema", []string{"GetSomeWhereID Default body"}},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema", []string{"GetSomeWhereID OK body"}},
- {"#/definitions/namedAgain", []string{"namedAgain"}},
- {"#/definitions/datedTag/allOf/1", []string{"datedTag allOf 1"}},
- {"#/definitions/datedRecords/items/1", []string{"datedRecords tuple 1"}},
- {"#/definitions/datedTaggedRecords/items/1", []string{"datedTaggedRecords tuple 1"}},
- {"#/definitions/datedTaggedRecords/additionalItems", []string{"datedTaggedRecords tuple additionalItems"}},
- {"#/definitions/otherRecords/items", []string{"otherRecords items"}},
- {"#/definitions/tags/additionalProperties", []string{"tags additionalProperties"}},
- {"#/definitions/namedThing/properties/name", []string{"namedThing name"}},
- }
-
- for i, v := range values {
- ptr, err := jsonpointer.New(definitionPtr(v.Key)[1:])
- if assert.NoError(t, err) {
- vv, _, err := ptr.Get(sp)
- if assert.NoError(t, err) {
- switch tv := vv.(type) {
- case *spec.Schema:
- aschema, err := Schema(SchemaOpts{Schema: tv, Root: sp, BasePath: bp})
- if assert.NoError(t, err) {
- names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
- assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
- }
- case spec.Schema:
- aschema, err := Schema(SchemaOpts{Schema: &tv, Root: sp, BasePath: bp})
- if assert.NoError(t, err) {
- names := namesFromKey(keyParts(v.Key), aschema, opRefsByRef(gatherOperations(New(sp), nil)))
- assert.Equal(t, v.Names, names, "for %s at %d", v.Key, i)
- }
- default:
- assert.Fail(t, "unknown type", "got %T", vv)
- }
- }
- }
- }
- }
-}
-
-func TestDepthFirstSort(t *testing.T) {
- bp := filepath.Join("fixtures", "inline_schemas.yml")
- sp, err := loadSpec(bp)
- values := []string{
- "#/paths/~1some~1where~1{id}/parameters/1/schema/properties/createdAt",
- "#/paths/~1some~1where~1{id}/parameters/1/schema",
- "#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/createdAt",
- "#/paths/~1some~1where~1{id}/get/parameters/2/schema",
- "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/id",
- "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/value",
- "#/paths/~1some~1where~1{id}/get/responses/200/schema",
- "#/paths/~1some~1where~1{id}/get/responses/404/schema",
- "#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/createdAt",
- "#/paths/~1some~1where~1{id}/get/responses/default/schema",
- "#/definitions/datedRecords/items/1/properties/createdAt",
- "#/definitions/datedTaggedRecords/items/1/properties/createdAt",
- "#/definitions/namedThing/properties/name/properties/id",
- "#/definitions/records/items/0/properties/createdAt",
- "#/definitions/datedTaggedRecords/additionalItems/properties/id",
- "#/definitions/datedTaggedRecords/additionalItems/properties/value",
- "#/definitions/otherRecords/items/properties/createdAt",
- "#/definitions/tags/additionalProperties/properties/id",
- "#/definitions/tags/additionalProperties/properties/value",
- "#/definitions/datedRecords/items/0",
- "#/definitions/datedRecords/items/1",
- "#/definitions/datedTag/allOf/0",
- "#/definitions/datedTag/allOf/1",
- "#/definitions/datedTag/properties/id",
- "#/definitions/datedTag/properties/value",
- "#/definitions/datedTaggedRecords/items/0",
- "#/definitions/datedTaggedRecords/items/1",
- "#/definitions/namedAgain/properties/id",
- "#/definitions/namedThing/properties/name",
- "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism/properties/floccinaucinihilipilificationCreatedAt",
- "#/definitions/records/items/0",
- "#/definitions/datedTaggedRecords/additionalItems",
- "#/definitions/otherRecords/items",
- "#/definitions/tags/additionalProperties",
- "#/definitions/datedRecords",
- "#/definitions/datedTag",
- "#/definitions/datedTaggedRecords",
- "#/definitions/namedAgain",
- "#/definitions/namedThing",
- "#/definitions/otherRecords",
- "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism",
- "#/definitions/records",
- "#/definitions/tags",
- }
- if assert.NoError(t, err) {
- a := New(sp)
- result := sortDepthFirst(a.allSchemas)
- assert.Equal(t, values, result)
- }
-}
-
-func TestBuildNameWithReservedKeyWord(t *testing.T) {
- s := splitKey([]string{"definitions", "fullview", "properties", "properties"})
- startIdx := 2
- segments := []string{"fullview"}
- newName := s.BuildName(segments, startIdx, nil)
- assert.Equal(t, "fullview properties", newName)
- s = splitKey([]string{"definitions", "fullview", "properties", "properties", "properties", "properties", "properties", "properties"})
- newName = s.BuildName(segments, startIdx, nil)
- assert.Equal(t, "fullview properties properties properties", newName)
-}
-
-func TestNameInlinedSchemas(t *testing.T) {
- cwd, _ := os.Getwd()
- bp := filepath.Join(cwd, "fixtures", "nested_inline_schemas.yml")
- sp, err := loadSpec(bp)
- err = spec.ExpandSpec(sp, &spec.ExpandOptions{
- RelativeBase: bp,
- SkipSchemas: true,
- })
- assert.NoError(t, err)
- values := []struct {
- Key string
- Location string
- Ref spec.Ref
- }{
- {"#/paths/~1some~1where~1{id}/parameters/1/schema/items", "#/definitions/postSomeWhereIdParamsBody/items", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBodyItems")},
- {"#/paths/~1some~1where~1{id}/parameters/1/schema", "#/paths/~1some~1where~1{id}/parameters/1/schema", spec.MustCreateRef("#/definitions/postSomeWhereIdParamsBody")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/1", "#/definitions/getSomeWhereIdParamsBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record/items/2", "#/definitions/getSomeWhereIdParamsBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/record", "#/definitions/getSomeWhereIdParamsBodyOAIGen/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord")},
- {"#/paths/~1some~1where~1{id}/get/parameters/2/schema", "#/paths/~1some~1where~1{id}/get/parameters/2/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyOAIGen")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdOKBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2Name")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/1", "#/definitions/getSomeWhereIdOKBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems1")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record/items/2", "#/definitions/getSomeWhereIdOKBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecordItems2")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/record", "#/definitions/getSomeWhereIdOKBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBodyRecord")},
- {"#/paths/~1some~1where~1{id}/get/responses/200/schema", "#/paths/~1some~1where~1{id}/get/responses/200/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2/properties/name", "#/definitions/getSomeWhereIdDefaultBodyRecordItems2/properties/name", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2Name")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/1", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/1", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems1")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record/items/2", "#/definitions/getSomeWhereIdDefaultBodyRecord/items/2", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecordItems2")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/record", "#/definitions/getSomeWhereIdDefaultBody/properties/record", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBodyRecord")},
- {"#/paths/~1some~1where~1{id}/get/responses/default/schema", "#/paths/~1some~1where~1{id}/get/responses/default/schema", spec.MustCreateRef("#/definitions/getSomeWhereIdDefaultBody")},
- {"#/definitions/nestedThing/properties/record/items/2/allOf/1/additionalProperties", "#/definitions/nestedThingRecordItems2AllOf1/additionalProperties", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1AdditionalProperties")},
- {"#/definitions/nestedThing/properties/record/items/2/allOf/1", "#/definitions/nestedThingRecordItems2/allOf/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1")},
- {"#/definitions/nestedThing/properties/record/items/2/properties/name", "#/definitions/nestedThingRecordItems2/properties/name", spec.MustCreateRef("#/definitions/nestedThingRecordItems2Name")},
- {"#/definitions/nestedThing/properties/record/items/1", "#/definitions/nestedThingRecord/items/1", spec.MustCreateRef("#/definitions/nestedThingRecordItems1")},
- {"#/definitions/nestedThing/properties/record/items/2", "#/definitions/nestedThingRecord/items/2", spec.MustCreateRef("#/definitions/nestedThingRecordItems2")},
- {"#/definitions/datedRecords/items/1", "#/definitions/datedRecords/items/1", spec.MustCreateRef("#/definitions/datedRecordsItems1")},
- {"#/definitions/datedTaggedRecords/items/1", "#/definitions/datedTaggedRecords/items/1", spec.MustCreateRef("#/definitions/datedTaggedRecordsItems1")},
- {"#/definitions/namedThing/properties/name", "#/definitions/namedThing/properties/name", spec.MustCreateRef("#/definitions/namedThingName")},
- {"#/definitions/nestedThing/properties/record", "#/definitions/nestedThing/properties/record", spec.MustCreateRef("#/definitions/nestedThingRecord")},
- {"#/definitions/records/items/0", "#/definitions/records/items/0", spec.MustCreateRef("#/definitions/recordsItems0")},
- {"#/definitions/datedTaggedRecords/additionalItems", "#/definitions/datedTaggedRecords/additionalItems", spec.MustCreateRef("#/definitions/datedTaggedRecordsItemsAdditionalItems")},
- {"#/definitions/otherRecords/items", "#/definitions/otherRecords/items", spec.MustCreateRef("#/definitions/otherRecordsItems")},
- {"#/definitions/tags/additionalProperties", "#/definitions/tags/additionalProperties", spec.MustCreateRef("#/definitions/tagsAdditionalProperties")},
- }
- if assert.NoError(t, err) {
- err := nameInlinedSchemas(&FlattenOpts{
- Spec: New(sp),
- BasePath: bp,
- })
-
- if assert.NoError(t, err) {
- for i, v := range values {
- ptr, err := jsonpointer.New(v.Location[1:])
- if assert.NoError(t, err, "at %d for %s", i, v.Key) {
- vv, _, err := ptr.Get(sp)
-
- if assert.NoError(t, err, "at %d for %s", i, v.Key) {
- switch tv := vv.(type) {
- case *spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
- case spec.Schema:
- assert.Equal(t, v.Ref.String(), tv.Ref.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrBool:
- var sRef spec.Ref
- if tv != nil && tv.Schema != nil {
- sRef = tv.Schema.Ref
- }
- assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrArray:
- var sRef spec.Ref
- if tv != nil && tv.Schema != nil {
- sRef = tv.Schema.Ref
- }
- assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
- default:
- assert.Fail(t, "unknown type", "got %T", vv)
- }
- }
- }
- }
- }
-
- for k, rr := range New(sp).allSchemas {
- if !strings.HasPrefix(k, "#/responses") && !strings.HasPrefix(k, "#/parameters") {
- if rr.Schema != nil && rr.Schema.Ref.String() == "" && !rr.TopLevel {
- asch, err := Schema(SchemaOpts{Schema: rr.Schema, Root: sp, BasePath: bp})
- if assert.NoError(t, err, "for key: %s", k) {
- if !asch.IsSimpleSchema {
- assert.Fail(t, "not a top level schema", "for key: %s", k)
- }
- }
- }
- }
- }
- }
-}
-
-func TestFlatten(t *testing.T) {
- cwd, _ := os.Getwd()
- bp := filepath.Join(cwd, "fixtures", "flatten.yml")
- sp, err := loadSpec(bp)
- values := []struct {
- Key string
- Location string
- Ref spec.Ref
- Expected interface{}
- }{
- {
- "#/responses/notFound/schema",
- "#/responses/notFound/schema",
- spec.MustCreateRef("#/definitions/error"),
- nil,
- },
- {
- "#/paths/~1some~1where~1{id}/parameters/0",
- "#/paths/~1some~1where~1{id}/parameters/0/name",
- spec.Ref{},
- "id",
- },
- {
- "#/paths/~1other~1place",
- "#/paths/~1other~1place/get/operationId",
- spec.Ref{},
- "modelOp",
- },
- {
- "#/paths/~1some~1where~1{id}/get/parameters/0",
- "#/paths/~1some~1where~1{id}/get/parameters/0/name",
- spec.Ref{},
- "limit",
- },
- {
- "#/paths/~1some~1where~1{id}/get/parameters/1",
- "#/paths/~1some~1where~1{id}/get/parameters/1/name",
- spec.Ref{},
- "some",
- },
- {
- "#/paths/~1some~1where~1{id}/get/parameters/2",
- "#/paths/~1some~1where~1{id}/get/parameters/2/name",
- spec.Ref{},
- "other",
- },
- {
- "#/paths/~1some~1where~1{id}/get/parameters/3",
- "#/paths/~1some~1where~1{id}/get/parameters/3/schema",
- spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBody"),
- "",
- },
- {
- "#/paths/~1some~1where~1{id}/get/responses/200",
- "#/paths/~1some~1where~1{id}/get/responses/200/schema",
- spec.MustCreateRef("#/definitions/getSomeWhereIdOKBody"),
- "",
- },
- {
- "#/definitions/namedAgain",
- "",
- spec.MustCreateRef("#/definitions/named"),
- "",
- },
- {
- "#/definitions/namedThing/properties/name",
- "",
- spec.MustCreateRef("#/definitions/named"),
- "",
- },
- {
- "#/definitions/namedThing/properties/namedAgain",
- "",
- spec.MustCreateRef("#/definitions/namedAgain"),
- "",
- },
- {
- "#/definitions/datedRecords/items/1",
- "",
- spec.MustCreateRef("#/definitions/record"),
- "",
- },
- {
- "#/definitions/otherRecords/items",
- "",
- spec.MustCreateRef("#/definitions/record"),
- "",
- },
- {
- "#/definitions/tags/additionalProperties",
- "",
- spec.MustCreateRef("#/definitions/tag"),
- "",
- },
- {
- "#/definitions/datedTag/allOf/1",
- "",
- spec.MustCreateRef("#/definitions/tag"),
- "",
- },
- {
- "#/definitions/nestedThingRecordItems2/allOf/1",
- "",
- spec.MustCreateRef("#/definitions/nestedThingRecordItems2AllOf1"),
- "",
- },
- {
- "#/definitions/nestedThingRecord/items/1",
- "",
- spec.MustCreateRef("#/definitions/nestedThingRecordItems1"),
- "",
- },
- {
- "#/definitions/nestedThingRecord/items/2",
- "",
- spec.MustCreateRef("#/definitions/nestedThingRecordItems2"),
- "",
- },
- {
- "#/definitions/nestedThing/properties/record",
- "",
- spec.MustCreateRef("#/definitions/nestedThingRecord"),
- "",
- },
- {
- "#/definitions/named",
- "#/definitions/named/type",
- spec.Ref{},
- spec.StringOrArray{"string"},
- },
- {
- "#/definitions/error",
- "#/definitions/error/properties/id/type",
- spec.Ref{},
- spec.StringOrArray{"integer"},
- },
- {
- "#/definitions/record",
- "#/definitions/record/properties/createdAt/format",
- spec.Ref{},
- "date-time",
- },
- {
- "#/definitions/getSomeWhereIdOKBody",
- "#/definitions/getSomeWhereIdOKBody/properties/record",
- spec.MustCreateRef("#/definitions/nestedThing"),
- nil,
- },
- {
- "#/definitions/getSomeWhereIdParamsBody",
- "#/definitions/getSomeWhereIdParamsBody/properties/record",
- spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecord"),
- nil,
- },
- {
- "#/definitions/getSomeWhereIdParamsBodyRecord",
- "#/definitions/getSomeWhereIdParamsBodyRecord/items/1",
- spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems1"),
- nil,
- },
- {
- "#/definitions/getSomeWhereIdParamsBodyRecord",
- "#/definitions/getSomeWhereIdParamsBodyRecord/items/2",
- spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2"),
- nil,
- },
- {
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2",
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2/allOf/0/format",
- spec.Ref{},
- "date",
- },
- {
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2Name",
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2Name/properties/createdAt/format",
- spec.Ref{},
- "date-time",
- },
- {
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2",
- "#/definitions/getSomeWhereIdParamsBodyRecordItems2/properties/name",
- spec.MustCreateRef("#/definitions/getSomeWhereIdParamsBodyRecordItems2Name"),
- "date",
- },
- }
- if assert.NoError(t, err) {
- err := Flatten(FlattenOpts{Spec: New(sp), BasePath: bp})
- //b, _ := sp.MarshalJSON()
- //panic(string(b))
- if assert.NoError(t, err) {
- for i, v := range values {
- pk := v.Key[1:]
- if v.Location != "" {
- pk = v.Location[1:]
- }
- ptr, err := jsonpointer.New(pk)
- if assert.NoError(t, err, "at %d for %s", i, v.Key) {
- d, _, err := ptr.Get(sp)
- if assert.NoError(t, err) {
- if v.Ref.String() != "" {
- switch s := d.(type) {
- case *spec.Schema:
- assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
- case spec.Schema:
- assert.Equal(t, v.Ref.String(), s.Ref.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrArray:
- var sRef spec.Ref
- if s != nil && s.Schema != nil {
- sRef = s.Schema.Ref
- }
- assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
- case *spec.SchemaOrBool:
- var sRef spec.Ref
- if s != nil && s.Schema != nil {
- sRef = s.Schema.Ref
- }
- assert.Equal(t, v.Ref.String(), sRef.String(), "at %d for %s", i, v.Key)
- default:
- assert.Fail(t, "unknown type", "got %T at %d for %s", d, i, v.Key)
- }
- } else {
- assert.Equal(t, v.Expected, d)
- }
- }
- }
- }
- }
- }
-}
diff --git a/vendor/github.com/go-openapi/analysis/mixin.go b/vendor/github.com/go-openapi/analysis/mixin.go
deleted file mode 100644
index a547433ba..000000000
--- a/vendor/github.com/go-openapi/analysis/mixin.go
+++ /dev/null
@@ -1,199 +0,0 @@
-package analysis
-
-import (
- "fmt"
-
- "github.com/go-openapi/spec"
-)
-
-// Mixin modifies the primary swagger spec by adding the paths and
-// definitions from the mixin specs. Top level parameters and
-// responses from the mixins are also carried over. Operation id
-// collisions are avoided by appending "Mixin<N>" but only if
-// needed. No other parts of primary are modified. Consider calling
-// FixEmptyResponseDescriptions() on the modified primary if you read
-// them from storage and they are valid to start with.
-//
-// Entries in "paths", "definitions", "parameters" and "responses" are
-// added to the primary in the order of the given mixins. If the entry
-// already exists in primary it is skipped with a warning message.
-//
-// The count of skipped entries (from collisions) is returned so any
-// deviation from the number expected can flag warning in your build
-// scripts. Carefully review the collisions before accepting them;
-// consider renaming things if possible.
-//
-// No normalization of any keys takes place (paths, type defs,
-// etc). Ensure they are canonical if your downstream tools do
-// key normalization of any form.
-func Mixin(primary *spec.Swagger, mixins ...*spec.Swagger) []string {
- var skipped []string
- opIds := getOpIds(primary)
- if primary.Paths == nil {
- primary.Paths = &spec.Paths{Paths: make(map[string]spec.PathItem)}
- }
- if primary.Paths.Paths == nil {
- primary.Paths.Paths = make(map[string]spec.PathItem)
- }
- if primary.Definitions == nil {
- primary.Definitions = make(spec.Definitions)
- }
- if primary.Parameters == nil {
- primary.Parameters = make(map[string]spec.Parameter)
- }
- if primary.Responses == nil {
- primary.Responses = make(map[string]spec.Response)
- }
-
- for i, m := range mixins {
- for k, v := range m.Definitions {
- // assume name collisions represent IDENTICAL type. careful.
- if _, exists := primary.Definitions[k]; exists {
- warn := fmt.Sprintf("definitions entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
- continue
- }
- primary.Definitions[k] = v
- }
- if m.Paths != nil {
- for k, v := range m.Paths.Paths {
- if _, exists := primary.Paths.Paths[k]; exists {
- warn := fmt.Sprintf("paths entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
- continue
- }
-
- // Swagger requires that operationIds be
- // unique within a spec. If we find a
- // collision we append "Mixin0" to the
- // operatoinId we are adding, where 0 is mixin
- // index. We assume that operationIds with
- // all the proivded specs are already unique.
- piops := pathItemOps(v)
- for _, piop := range piops {
- if opIds[piop.ID] {
- piop.ID = fmt.Sprintf("%v%v%v", piop.ID, "Mixin", i)
- }
- opIds[piop.ID] = true
- }
- primary.Paths.Paths[k] = v
- }
- }
- for k, v := range m.Parameters {
- // could try to rename on conflict but would
- // have to fix $refs in the mixin. Complain
- // for now
- if _, exists := primary.Parameters[k]; exists {
- warn := fmt.Sprintf("top level parameters entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
- continue
- }
- primary.Parameters[k] = v
- }
- for k, v := range m.Responses {
- // could try to rename on conflict but would
- // have to fix $refs in the mixin. Complain
- // for now
- if _, exists := primary.Responses[k]; exists {
- warn := fmt.Sprintf("top level responses entry '%v' already exists in primary or higher priority mixin, skipping\n", k)
- skipped = append(skipped, warn)
- continue
- }
- primary.Responses[k] = v
- }
- }
- return skipped
-}
-
-// FixEmptyResponseDescriptions replaces empty ("") response
-// descriptions in the input with "(empty)" to ensure that the
-// resulting Swagger is stays valid. The problem appears to arise
-// from reading in valid specs that have a explicit response
-// description of "" (valid, response.description is required), but
-// due to zero values being omitted upon re-serializing (omitempty) we
-// lose them unless we stick some chars in there.
-func FixEmptyResponseDescriptions(s *spec.Swagger) {
- if s.Paths != nil {
- for _, v := range s.Paths.Paths {
- if v.Get != nil {
- FixEmptyDescs(v.Get.Responses)
- }
- if v.Put != nil {
- FixEmptyDescs(v.Put.Responses)
- }
- if v.Post != nil {
- FixEmptyDescs(v.Post.Responses)
- }
- if v.Delete != nil {
- FixEmptyDescs(v.Delete.Responses)
- }
- if v.Options != nil {
- FixEmptyDescs(v.Options.Responses)
- }
- if v.Head != nil {
- FixEmptyDescs(v.Head.Responses)
- }
- if v.Patch != nil {
- FixEmptyDescs(v.Patch.Responses)
- }
- }
- }
- for k, v := range s.Responses {
- FixEmptyDesc(&v)
- s.Responses[k] = v
- }
-}
-
-// FixEmptyDescs adds "(empty)" as the description for any Response in
-// the given Responses object that doesn't already have one.
-func FixEmptyDescs(rs *spec.Responses) {
- FixEmptyDesc(rs.Default)
- for k, v := range rs.StatusCodeResponses {
- FixEmptyDesc(&v)
- rs.StatusCodeResponses[k] = v
- }
-}
-
-// FixEmptyDesc adds "(empty)" as the description to the given
-// Response object if it doesn't already have one and isn't a
-// ref. No-op on nil input.
-func FixEmptyDesc(rs *spec.Response) {
- if rs == nil || rs.Description != "" || rs.Ref.Ref.GetURL() != nil {
- return
- }
- rs.Description = "(empty)"
-}
-
-// getOpIds extracts all the paths.<path>.operationIds from the given
-// spec and returns them as the keys in a map with 'true' values.
-func getOpIds(s *spec.Swagger) map[string]bool {
- rv := make(map[string]bool)
- if s.Paths == nil {
- return rv
- }
- for _, v := range s.Paths.Paths {
- piops := pathItemOps(v)
- for _, op := range piops {
- rv[op.ID] = true
- }
- }
- return rv
-}
-
-func pathItemOps(p spec.PathItem) []*spec.Operation {
- var rv []*spec.Operation
- rv = appendOp(rv, p.Get)
- rv = appendOp(rv, p.Put)
- rv = appendOp(rv, p.Post)
- rv = appendOp(rv, p.Delete)
- rv = appendOp(rv, p.Head)
- rv = appendOp(rv, p.Patch)
- return rv
-}
-
-func appendOp(ops []*spec.Operation, op *spec.Operation) []*spec.Operation {
- if op == nil {
- return ops
- }
- return append(ops, op)
-}
diff --git a/vendor/github.com/go-openapi/analysis/mixin_test.go b/vendor/github.com/go-openapi/analysis/mixin_test.go
deleted file mode 100644
index 1d8028217..000000000
--- a/vendor/github.com/go-openapi/analysis/mixin_test.go
+++ /dev/null
@@ -1,64 +0,0 @@
-package analysis
-
-import "testing"
-
-const (
- widgetFile = "fixtures/widget-crud.yml"
- fooFile = "fixtures/foo-crud.yml"
- barFile = "fixtures/bar-crud.yml"
- noPathsFile = "fixtures/no-paths.yml"
- emptyPathsFile = "fixtures/empty-paths.json"
-)
-
-func TestMixin(t *testing.T) {
-
- primary, err := loadSpec(widgetFile)
- if err != nil {
- t.Fatalf("Could not load '%v': %v\n", widgetFile, err)
- }
- mixin1, err := loadSpec(fooFile)
- if err != nil {
- t.Fatalf("Could not load '%v': %v\n", fooFile, err)
- }
- mixin2, err := loadSpec(barFile)
- if err != nil {
- t.Fatalf("Could not load '%v': %v\n", barFile, err)
- }
- mixin3, err := loadSpec(noPathsFile)
- if err != nil {
- t.Fatalf("Could not load '%v': %v\n", noPathsFile, err)
- }
-
- collisions := Mixin(primary, mixin1, mixin2, mixin3)
- if len(collisions) != 16 {
- t.Errorf("TestMixin: Expected 16 collisions, got %v\n%v", len(collisions), collisions)
- }
-
- if len(primary.Paths.Paths) != 7 {
- t.Errorf("TestMixin: Expected 7 paths in merged, got %v\n", len(primary.Paths.Paths))
- }
-
- if len(primary.Definitions) != 8 {
- t.Errorf("TestMixin: Expected 8 definitions in merged, got %v\n", len(primary.Definitions))
- }
-
- if len(primary.Parameters) != 4 {
- t.Errorf("TestMixin: Expected 4 top level parameters in merged, got %v\n", len(primary.Parameters))
- }
-
- if len(primary.Responses) != 2 {
- t.Errorf("TestMixin: Expected 2 top level responses in merged, got %v\n", len(primary.Responses))
- }
-
- // test that adding paths to a primary with no paths works (was NPE)
- emptyPaths, err := loadSpec(emptyPathsFile)
- if err != nil {
- t.Fatalf("Could not load '%v': %v\n", emptyPathsFile, err)
- }
-
- collisions = Mixin(emptyPaths, primary)
- if len(collisions) != 0 {
- t.Errorf("TestMixin: Expected 0 collisions, got %v\n%v", len(collisions), collisions)
- }
-
-}
diff --git a/vendor/github.com/go-openapi/analysis/schema.go b/vendor/github.com/go-openapi/analysis/schema.go
deleted file mode 100644
index 3ef64d753..000000000
--- a/vendor/github.com/go-openapi/analysis/schema.go
+++ /dev/null
@@ -1,234 +0,0 @@
-package analysis
-
-import (
- "github.com/go-openapi/spec"
- "github.com/go-openapi/strfmt"
-)
-
-// SchemaOpts configures the schema analyzer
-type SchemaOpts struct {
- Schema *spec.Schema
- Root interface{}
- BasePath string
- _ struct{}
-}
-
-// Schema analysis, will classify the schema according to known
-// patterns.
-func Schema(opts SchemaOpts) (*AnalyzedSchema, error) {
- a := &AnalyzedSchema{
- schema: opts.Schema,
- root: opts.Root,
- basePath: opts.BasePath,
- }
-
- a.initializeFlags()
- a.inferKnownType()
- a.inferEnum()
- a.inferBaseType()
-
- if err := a.inferMap(); err != nil {
- return nil, err
- }
- if err := a.inferArray(); err != nil {
- return nil, err
- }
-
- if err := a.inferTuple(); err != nil {
- return nil, err
- }
-
- if err := a.inferFromRef(); err != nil {
- return nil, err
- }
-
- a.inferSimpleSchema()
- return a, nil
-}
-
-// AnalyzedSchema indicates what the schema represents
-type AnalyzedSchema struct {
- schema *spec.Schema
- root interface{}
- basePath string
-
- hasProps bool
- hasAllOf bool
- hasItems bool
- hasAdditionalProps bool
- hasAdditionalItems bool
- hasRef bool
-
- IsKnownType bool
- IsSimpleSchema bool
- IsArray bool
- IsSimpleArray bool
- IsMap bool
- IsSimpleMap bool
- IsExtendedObject bool
- IsTuple bool
- IsTupleWithExtra bool
- IsBaseType bool
- IsEnum bool
-}
-
-// Inherits copies value fields from other onto this schema
-func (a *AnalyzedSchema) inherits(other *AnalyzedSchema) {
- if other == nil {
- return
- }
- a.hasProps = other.hasProps
- a.hasAllOf = other.hasAllOf
- a.hasItems = other.hasItems
- a.hasAdditionalItems = other.hasAdditionalItems
- a.hasAdditionalProps = other.hasAdditionalProps
- a.hasRef = other.hasRef
-
- a.IsKnownType = other.IsKnownType
- a.IsSimpleSchema = other.IsSimpleSchema
- a.IsArray = other.IsArray
- a.IsSimpleArray = other.IsSimpleArray
- a.IsMap = other.IsMap
- a.IsSimpleMap = other.IsSimpleMap
- a.IsExtendedObject = other.IsExtendedObject
- a.IsTuple = other.IsTuple
- a.IsTupleWithExtra = other.IsTupleWithExtra
- a.IsBaseType = other.IsBaseType
- a.IsEnum = other.IsEnum
-}
-
-func (a *AnalyzedSchema) inferFromRef() error {
- if a.hasRef {
- sch := new(spec.Schema)
- sch.Ref = a.schema.Ref
- err := spec.ExpandSchema(sch, a.root, nil)
- if err != nil {
- return err
- }
- if sch != nil {
- rsch, err := Schema(SchemaOpts{
- Schema: sch,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
- a.inherits(rsch)
- }
- }
- return nil
-}
-
-func (a *AnalyzedSchema) inferSimpleSchema() {
- a.IsSimpleSchema = a.IsKnownType || a.IsSimpleArray || a.IsSimpleMap
-}
-
-func (a *AnalyzedSchema) inferKnownType() {
- tpe := a.schema.Type
- format := a.schema.Format
- a.IsKnownType = tpe.Contains("boolean") ||
- tpe.Contains("integer") ||
- tpe.Contains("number") ||
- tpe.Contains("string") ||
- (format != "" && strfmt.Default.ContainsName(format)) ||
- (a.isObjectType() && !a.hasProps && !a.hasAllOf && !a.hasAdditionalProps && !a.hasAdditionalItems)
-}
-
-func (a *AnalyzedSchema) inferMap() error {
- if a.isObjectType() {
- hasExtra := a.hasProps || a.hasAllOf
- a.IsMap = a.hasAdditionalProps && !hasExtra
- a.IsExtendedObject = a.hasAdditionalProps && hasExtra
- if a.IsMap {
- if a.schema.AdditionalProperties.Schema != nil {
- msch, err := Schema(SchemaOpts{
- Schema: a.schema.AdditionalProperties.Schema,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
- a.IsSimpleMap = msch.IsSimpleSchema
- } else if a.schema.AdditionalProperties.Allows {
- a.IsSimpleMap = true
- }
- }
- }
- return nil
-}
-
-func (a *AnalyzedSchema) inferArray() error {
- fromValid := a.isArrayType() && (a.schema.Items == nil || a.schema.Items.Len() < 2)
- a.IsArray = fromValid || (a.hasItems && a.schema.Items.Len() < 2)
- if a.IsArray && a.hasItems {
- if a.schema.Items.Schema != nil {
- itsch, err := Schema(SchemaOpts{
- Schema: a.schema.Items.Schema,
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
- a.IsSimpleArray = itsch.IsSimpleSchema
- }
- if len(a.schema.Items.Schemas) > 0 {
- itsch, err := Schema(SchemaOpts{
- Schema: &a.schema.Items.Schemas[0],
- Root: a.root,
- BasePath: a.basePath,
- })
- if err != nil {
- return err
- }
- a.IsSimpleArray = itsch.IsSimpleSchema
- }
- }
- if a.IsArray && !a.hasItems {
- a.IsSimpleArray = true
- }
- return nil
-}
-
-func (a *AnalyzedSchema) inferTuple() error {
- tuple := a.hasItems && a.schema.Items.Len() > 1
- a.IsTuple = tuple && !a.hasAdditionalItems
- a.IsTupleWithExtra = tuple && a.hasAdditionalItems
- return nil
-}
-
-func (a *AnalyzedSchema) inferBaseType() {
- if a.isObjectType() {
- a.IsBaseType = a.schema.Discriminator != ""
- }
-}
-
-func (a *AnalyzedSchema) inferEnum() {
- a.IsEnum = len(a.schema.Enum) > 0
-}
-
-func (a *AnalyzedSchema) initializeFlags() {
- a.hasProps = len(a.schema.Properties) > 0
- a.hasAllOf = len(a.schema.AllOf) > 0
- a.hasRef = a.schema.Ref.String() != ""
-
- a.hasItems = a.schema.Items != nil &&
- (a.schema.Items.Schema != nil || len(a.schema.Items.Schemas) > 0)
-
- a.hasAdditionalProps = a.schema.AdditionalProperties != nil &&
- (a.schema.AdditionalProperties != nil || a.schema.AdditionalProperties.Allows)
-
- a.hasAdditionalItems = a.schema.AdditionalItems != nil &&
- (a.schema.AdditionalItems.Schema != nil || a.schema.AdditionalItems.Allows)
-
-}
-
-func (a *AnalyzedSchema) isObjectType() bool {
- return !a.hasRef && (a.schema.Type == nil || a.schema.Type.Contains("") || a.schema.Type.Contains("object"))
-}
-
-func (a *AnalyzedSchema) isArrayType() bool {
- return !a.hasRef && (a.schema.Type != nil && a.schema.Type.Contains("array"))
-}
diff --git a/vendor/github.com/go-openapi/analysis/schema_test.go b/vendor/github.com/go-openapi/analysis/schema_test.go
deleted file mode 100644
index 0c386b2a4..000000000
--- a/vendor/github.com/go-openapi/analysis/schema_test.go
+++ /dev/null
@@ -1,266 +0,0 @@
-package analysis
-
-import (
- "encoding/json"
- "fmt"
- "path"
- "testing"
-
- "net/http"
- "net/http/httptest"
-
- "github.com/go-openapi/spec"
- "github.com/stretchr/testify/assert"
-)
-
-var knownSchemas = []*spec.Schema{
- spec.BoolProperty(), // 0
- spec.StringProperty(), // 1
- spec.Int8Property(), // 2
- spec.Int16Property(), // 3
- spec.Int32Property(), // 4
- spec.Int64Property(), // 5
- spec.Float32Property(), // 6
- spec.Float64Property(), // 7
- spec.DateProperty(), // 8
- spec.DateTimeProperty(), // 9
- (&spec.Schema{}), // 10
- (&spec.Schema{}).Typed("object", ""), // 11
- (&spec.Schema{}).Typed("", ""), // 12
- (&spec.Schema{}).Typed("", "uuid"), // 13
-}
-
-func newCObj() *spec.Schema {
- return (&spec.Schema{}).Typed("object", "").SetProperty("id", *spec.Int64Property())
-}
-
-var complexObject = newCObj()
-
-var complexSchemas = []*spec.Schema{
- complexObject,
- spec.ArrayProperty(complexObject),
- spec.MapProperty(complexObject),
-}
-
-func knownRefs(base string) []spec.Ref {
- urls := []string{"bool", "string", "integer", "float", "date", "object", "format"}
-
- var result []spec.Ref
- for _, u := range urls {
- result = append(result, spec.MustCreateRef(fmt.Sprintf("%s/%s", base, path.Join("known", u))))
- }
- return result
-}
-
-func complexRefs(base string) []spec.Ref {
- urls := []string{"object", "array", "map"}
-
- var result []spec.Ref
- for _, u := range urls {
- result = append(result, spec.MustCreateRef(fmt.Sprintf("%s/%s", base, path.Join("complex", u))))
- }
- return result
-}
-
-func refServer() *httptest.Server {
- mux := http.NewServeMux()
- mux.Handle("/known/bool", schemaHandler(knownSchemas[0]))
- mux.Handle("/known/string", schemaHandler(knownSchemas[1]))
- mux.Handle("/known/integer", schemaHandler(knownSchemas[5]))
- mux.Handle("/known/float", schemaHandler(knownSchemas[6]))
- mux.Handle("/known/date", schemaHandler(knownSchemas[8]))
- mux.Handle("/known/object", schemaHandler(knownSchemas[11]))
- mux.Handle("/known/format", schemaHandler(knownSchemas[13]))
-
- mux.Handle("/complex/object", schemaHandler(complexSchemas[0]))
- mux.Handle("/complex/array", schemaHandler(complexSchemas[1]))
- mux.Handle("/complex/map", schemaHandler(complexSchemas[2]))
-
- return httptest.NewServer(mux)
-}
-
-func refSchema(ref spec.Ref) *spec.Schema {
- return &spec.Schema{SchemaProps: spec.SchemaProps{Ref: ref}}
-}
-
-func schemaHandler(schema *spec.Schema) http.Handler {
- return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- writeJSON(w, schema)
- })
-}
-
-func writeJSON(w http.ResponseWriter, data interface{}) {
- w.Header().Add("Content-Type", "application/json")
- w.WriteHeader(http.StatusOK)
- enc := json.NewEncoder(w)
- if err := enc.Encode(data); err != nil {
- panic(err)
- }
-}
-
-func TestSchemaAnalysis_KnownTypes(t *testing.T) {
- for i, v := range knownSchemas {
- sch, err := Schema(SchemaOpts{Schema: v})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsKnownType, "item at %d should be a known type", i)
- }
- }
- for i, v := range complexSchemas {
- sch, err := Schema(SchemaOpts{Schema: v})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
- }
- }
-
- serv := refServer()
- defer serv.Close()
-
- for i, ref := range knownRefs(serv.URL) {
- sch, err := Schema(SchemaOpts{Schema: refSchema(ref)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsKnownType, "item at %d should be a known type", i)
- }
- }
- for i, ref := range complexRefs(serv.URL) {
- sch, err := Schema(SchemaOpts{Schema: refSchema(ref)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
- }
- }
-}
-
-func TestSchemaAnalysis_Array(t *testing.T) {
- for i, v := range append(knownSchemas, (&spec.Schema{}).Typed("array", "")) {
- sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsArray, "item at %d should be an array type", i)
- assert.True(t, sch.IsSimpleArray, "item at %d should be a simple array type", i)
- }
- }
-
- for i, v := range complexSchemas {
- sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsArray, "item at %d should be an array type", i)
- assert.False(t, sch.IsSimpleArray, "item at %d should not be a simple array type", i)
- }
- }
-
- serv := refServer()
- defer serv.Close()
-
- for i, ref := range knownRefs(serv.URL) {
- sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(refSchema(ref))})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsArray, "item at %d should be an array type", i)
- assert.True(t, sch.IsSimpleArray, "item at %d should be a simple array type", i)
- }
- }
- for i, ref := range complexRefs(serv.URL) {
- sch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(refSchema(ref))})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.False(t, sch.IsKnownType, "item at %d should not be a known type", i)
- assert.True(t, sch.IsArray, "item at %d should be an array type", i)
- assert.False(t, sch.IsSimpleArray, "item at %d should not be a simple array type", i)
- }
- }
-
-}
-
-func TestSchemaAnalysis_Map(t *testing.T) {
- for i, v := range append(knownSchemas, spec.MapProperty(nil)) {
- sch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsMap, "item at %d should be a map type", i)
- assert.True(t, sch.IsSimpleMap, "item at %d should be a simple map type", i)
- }
- }
-
- for i, v := range complexSchemas {
- sch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsMap, "item at %d should be a map type", i)
- assert.False(t, sch.IsSimpleMap, "item at %d should not be a simple map type", i)
- }
- }
-}
-
-func TestSchemaAnalysis_ExtendedObject(t *testing.T) {
- for i, v := range knownSchemas {
- wex := spec.MapProperty(v).SetProperty("name", *spec.StringProperty())
- sch, err := Schema(SchemaOpts{Schema: wex})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsExtendedObject, "item at %d should be an extended map object type", i)
- assert.False(t, sch.IsMap, "item at %d should not be a map type", i)
- assert.False(t, sch.IsSimpleMap, "item at %d should not be a simple map type", i)
- }
- }
-}
-
-func TestSchemaAnalysis_Tuple(t *testing.T) {
- at := spec.ArrayProperty(nil)
- at.Items = &spec.SchemaOrArray{}
- at.Items.Schemas = append(at.Items.Schemas, *spec.StringProperty(), *spec.Int64Property())
-
- sch, err := Schema(SchemaOpts{Schema: at})
- if assert.NoError(t, err) {
- assert.True(t, sch.IsTuple)
- assert.False(t, sch.IsTupleWithExtra)
- assert.False(t, sch.IsKnownType)
- assert.False(t, sch.IsSimpleSchema)
- }
-}
-
-func TestSchemaAnalysis_TupleWithExtra(t *testing.T) {
- at := spec.ArrayProperty(nil)
- at.Items = &spec.SchemaOrArray{}
- at.Items.Schemas = append(at.Items.Schemas, *spec.StringProperty(), *spec.Int64Property())
- at.AdditionalItems = &spec.SchemaOrBool{Allows: true}
- at.AdditionalItems.Schema = spec.Int32Property()
-
- sch, err := Schema(SchemaOpts{Schema: at})
- if assert.NoError(t, err) {
- assert.False(t, sch.IsTuple)
- assert.True(t, sch.IsTupleWithExtra)
- assert.False(t, sch.IsKnownType)
- assert.False(t, sch.IsSimpleSchema)
- }
-}
-
-func TestSchemaAnalysis_BaseType(t *testing.T) {
- cl := (&spec.Schema{}).Typed("object", "").SetProperty("type", *spec.StringProperty()).WithDiscriminator("type")
-
- sch, err := Schema(SchemaOpts{Schema: cl})
- if assert.NoError(t, err) {
- assert.True(t, sch.IsBaseType)
- assert.False(t, sch.IsKnownType)
- assert.False(t, sch.IsSimpleSchema)
- }
-}
-
-func TestSchemaAnalysis_SimpleSchema(t *testing.T) {
- for i, v := range append(knownSchemas, spec.ArrayProperty(nil), spec.MapProperty(nil)) {
- sch, err := Schema(SchemaOpts{Schema: v})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.True(t, sch.IsSimpleSchema, "item at %d should be a simple schema", i)
- }
-
- asch, err := Schema(SchemaOpts{Schema: spec.ArrayProperty(v)})
- if assert.NoError(t, err, "failed to analyze array schema at %d: %v", i, err) {
- assert.True(t, asch.IsSimpleSchema, "array item at %d should be a simple schema", i)
- }
-
- msch, err := Schema(SchemaOpts{Schema: spec.MapProperty(v)})
- if assert.NoError(t, err, "failed to analyze map schema at %d: %v", i, err) {
- assert.True(t, msch.IsSimpleSchema, "map item at %d should be a simple schema", i)
- }
- }
-
- for i, v := range complexSchemas {
- sch, err := Schema(SchemaOpts{Schema: v})
- if assert.NoError(t, err, "failed to analyze schema at %d: %v", i, err) {
- assert.False(t, sch.IsSimpleSchema, "item at %d should not be a simple schema", i)
- }
- }
-
-}
diff --git a/vendor/github.com/go-openapi/errors/.gitignore b/vendor/github.com/go-openapi/errors/.gitignore
deleted file mode 100644
index dd91ed6a0..000000000
--- a/vendor/github.com/go-openapi/errors/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-secrets.yml
-coverage.out
diff --git a/vendor/github.com/go-openapi/errors/.travis.yml b/vendor/github.com/go-openapi/errors/.travis.yml
deleted file mode 100644
index 8d22a34a9..000000000
--- a/vendor/github.com/go-openapi/errors/.travis.yml
+++ /dev/null
@@ -1,12 +0,0 @@
-language: go
-go:
-- 1.7
-install:
-- go get -u github.com/stretchr/testify/assert
-script:
-- go test -v -race -cover -coverprofile=coverage.txt -covermode=atomic ./...
-after_success:
-- bash <(curl -s https://codecov.io/bash)
-notifications:
- slack:
- secure: gZGp9NaHxi7zawlXJXKY92BGeDR1x0tbIcTyU5nMKLq0fhIaiEBJEeALwZ4VgqsSv3DytSSF5mLH8fevAM3ixE6hxjKQ+lQuf7V/w3btCN1CSWgoua5LOh1kTnqZQtJuRvO4pzoJcT3bJWBsVZ07VGNVzzJEy/zAKCHFqBUCXShw7QemlLBcYWFNqveTlvDIfCzvouoLnPoXwxEpkjxe9uz/ZKZgAnup/fXjC8RFctmgCnkCyvJTk0Y/fZCsufixJrJhshBWTnlrFCzRmgNkz2d+i1Ls3+MJ5EJJ2Tx/A5S63dL49J1f9Kr0AKHADmulSy8JNzIckKwbyFMYUecrsW+Lsu9DhnVMy1jj5pKsJDLRi2iIU3fXTMWbcyQbXjbbnBO2mPdP3Tzme75y4D9fc8hUPeyqVv2BU26NEbQ7EF2pKJ93OXvci7HlwRBgdJa8j6mP2LEDClcPQW00g7N/OZe0cTOMa8L5AwiBlbArwqt9wv6YLJoTG0wpDhzWsFvbCg5bJxe28Yn3fIDD0Lk1I7iSnBbp/5gzF19jmxqvcT8tHRkDL4xfjbENFTZjA5uB4Z4pj4WSyWQILLV/Jwhe3fi9uQwdviFHfj5pnVrmNUiGSOQL672K5wl2c3E9mGwejvsu2dfEz28n7Y/FUnOpY3/cBS0n27JJaerS0zMKNLE=
diff --git a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e..000000000
--- a/vendor/github.com/go-openapi/errors/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/errors/LICENSE b/vendor/github.com/go-openapi/errors/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/vendor/github.com/go-openapi/errors/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/errors/README.md b/vendor/github.com/go-openapi/errors/README.md
deleted file mode 100644
index 48c49fb2d..000000000
--- a/vendor/github.com/go-openapi/errors/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# OpenAPI errors [![Build Status](https://travis-ci.org/go-openapi/errors.svg?branch=master)](https://travis-ci.org/go-openapi/errors) [![codecov](https://codecov.io/gh/go-openapi/errors/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/errors) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
-
-[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/errors/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/errors?status.svg)](http://godoc.org/github.com/go-openapi/errors)
-
-Shared errors used throughout the various libraries for the go-openapi toolkit \ No newline at end of file
diff --git a/vendor/github.com/go-openapi/errors/api.go b/vendor/github.com/go-openapi/errors/api.go
deleted file mode 100644
index 64e0e7300..000000000
--- a/vendor/github.com/go-openapi/errors/api.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "encoding/json"
- "fmt"
- "net/http"
- "strings"
-)
-
-// Error represents a error interface all swagger framework errors implement
-type Error interface {
- error
- Code() int32
-}
-
-type apiError struct {
- code int32
- message string
-}
-
-func (a *apiError) Error() string {
- return a.message
-}
-
-func (a *apiError) Code() int32 {
- return a.code
-}
-
-// New creates a new API error with a code and a message
-func New(code int32, message string, args ...interface{}) Error {
- if len(args) > 0 {
- return &apiError{code, fmt.Sprintf(message, args...)}
- }
- return &apiError{code, message}
-}
-
-// NotFound creates a new not found error
-func NotFound(message string, args ...interface{}) Error {
- if message == "" {
- message = "Not found"
- }
- return New(http.StatusNotFound, fmt.Sprintf(message, args...))
-}
-
-// NotImplemented creates a new not implemented error
-func NotImplemented(message string) Error {
- return New(http.StatusNotImplemented, message)
-}
-
-// MethodNotAllowedError represents an error for when the path matches but the method doesn't
-type MethodNotAllowedError struct {
- code int32
- Allowed []string
- message string
-}
-
-func (m *MethodNotAllowedError) Error() string {
- return m.message
-}
-
-// Code the error code
-func (m *MethodNotAllowedError) Code() int32 {
- return m.code
-}
-
-func errorAsJSON(err Error) []byte {
- b, _ := json.Marshal(struct {
- Code int32 `json:"code"`
- Message string `json:"message"`
- }{err.Code(), err.Error()})
- return b
-}
-
-func flattenComposite(errs *CompositeError) *CompositeError {
- var res []error
- for _, er := range errs.Errors {
- switch e := er.(type) {
- case *CompositeError:
- if len(e.Errors) > 0 {
- flat := flattenComposite(e)
- if len(flat.Errors) > 0 {
- res = append(res, flat.Errors...)
- }
- }
- default:
- if e != nil {
- res = append(res, e)
- }
- }
- }
- return CompositeValidationError(res...)
-}
-
-// MethodNotAllowed creates a new method not allowed error
-func MethodNotAllowed(requested string, allow []string) Error {
- msg := fmt.Sprintf("method %s is not allowed, but [%s] are", requested, strings.Join(allow, ","))
- return &MethodNotAllowedError{code: http.StatusMethodNotAllowed, Allowed: allow, message: msg}
-}
-
-// ServeError the error handler interface implemenation
-func ServeError(rw http.ResponseWriter, r *http.Request, err error) {
- rw.Header().Set("Content-Type", "application/json")
- switch e := err.(type) {
- case *CompositeError:
- er := flattenComposite(e)
- ServeError(rw, r, er.Errors[0])
- case *MethodNotAllowedError:
- rw.Header().Add("Allow", strings.Join(err.(*MethodNotAllowedError).Allowed, ","))
- rw.WriteHeader(asHTTPCode(int(e.Code())))
- if r == nil || r.Method != "HEAD" {
- rw.Write(errorAsJSON(e))
- }
- case Error:
- if e == nil {
- rw.WriteHeader(http.StatusInternalServerError)
- rw.Write(errorAsJSON(New(http.StatusInternalServerError, "Unknown error")))
- return
- }
- rw.WriteHeader(asHTTPCode(int(e.Code())))
- if r == nil || r.Method != "HEAD" {
- rw.Write(errorAsJSON(e))
- }
- default:
- rw.WriteHeader(http.StatusInternalServerError)
- if r == nil || r.Method != "HEAD" {
- rw.Write(errorAsJSON(New(http.StatusInternalServerError, err.Error())))
- }
- }
-}
-
-func asHTTPCode(input int) int {
- if input >= 600 {
- return 422
- }
- return input
-}
diff --git a/vendor/github.com/go-openapi/errors/api_test.go b/vendor/github.com/go-openapi/errors/api_test.go
deleted file mode 100644
index 4c928172d..000000000
--- a/vendor/github.com/go-openapi/errors/api_test.go
+++ /dev/null
@@ -1,97 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "fmt"
- "net/http"
- "net/http/httptest"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestServeError(t *testing.T) {
- // method not allowed wins
- var err error
- err = MethodNotAllowed("GET", []string{"POST", "PUT"})
- recorder := httptest.NewRecorder()
- ServeError(recorder, nil, err)
- assert.Equal(t, http.StatusMethodNotAllowed, recorder.Code)
- assert.Equal(t, "POST,PUT", recorder.Header().Get("Allow"))
- // assert.Equal(t, "application/json", recorder.Header().Get("content-type"))
- assert.Equal(t, `{"code":405,"message":"method GET is not allowed, but [POST,PUT] are"}`, recorder.Body.String())
-
- // renders status code from error when present
- err = NotFound("")
- recorder = httptest.NewRecorder()
- ServeError(recorder, nil, err)
- assert.Equal(t, http.StatusNotFound, recorder.Code)
- // assert.Equal(t, "application/json", recorder.Header().Get("content-type"))
- assert.Equal(t, `{"code":404,"message":"Not found"}`, recorder.Body.String())
-
- // renders mapped status code from error when present
- err = InvalidTypeName("someType")
- recorder = httptest.NewRecorder()
- ServeError(recorder, nil, err)
- assert.Equal(t, http.StatusUnprocessableEntity, recorder.Code)
- // assert.Equal(t, "application/json", recorder.Header().Get("content-type"))
- assert.Equal(t, `{"code":601,"message":"someType is an invalid type name"}`, recorder.Body.String())
-
- // defaults to internal server error
- err = fmt.Errorf("some error")
- recorder = httptest.NewRecorder()
- ServeError(recorder, nil, err)
- assert.Equal(t, http.StatusInternalServerError, recorder.Code)
- // assert.Equal(t, "application/json", recorder.Header().Get("content-type"))
- assert.Equal(t, `{"code":500,"message":"some error"}`, recorder.Body.String())
-}
-
-func TestAPIErrors(t *testing.T) {
- err := New(402, "this failed %s", "yada")
- assert.Error(t, err)
- assert.EqualValues(t, 402, err.Code())
- assert.EqualValues(t, "this failed yada", err.Error())
-
- err = NotFound("this failed %d", 1)
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusNotFound, err.Code())
- assert.EqualValues(t, "this failed 1", err.Error())
-
- err = NotFound("")
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusNotFound, err.Code())
- assert.EqualValues(t, "Not found", err.Error())
-
- err = NotImplemented("not implemented")
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusNotImplemented, err.Code())
- assert.EqualValues(t, "not implemented", err.Error())
-
- err = MethodNotAllowed("GET", []string{"POST", "PUT"})
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusMethodNotAllowed, err.Code())
- assert.EqualValues(t, "method GET is not allowed, but [POST,PUT] are", err.Error())
-
- err = InvalidContentType("application/saml", []string{"application/json", "application/x-yaml"})
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusUnsupportedMediaType, err.Code())
- assert.EqualValues(t, "unsupported media type \"application/saml\", only [application/json application/x-yaml] are allowed", err.Error())
-
- err = InvalidResponseFormat("application/saml", []string{"application/json", "application/x-yaml"})
- assert.Error(t, err)
- assert.EqualValues(t, http.StatusNotAcceptable, err.Code())
- assert.EqualValues(t, "unsupported media type requested, only [application/json application/x-yaml] are available", err.Error())
-}
diff --git a/vendor/github.com/go-openapi/errors/auth.go b/vendor/github.com/go-openapi/errors/auth.go
deleted file mode 100644
index 70eb960b1..000000000
--- a/vendor/github.com/go-openapi/errors/auth.go
+++ /dev/null
@@ -1,20 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-// Unauthenticated returns an unauthenticated error
-func Unauthenticated(scheme string) Error {
- return New(401, "unauthenticated for %s", scheme)
-}
diff --git a/vendor/github.com/go-openapi/errors/auth_test.go b/vendor/github.com/go-openapi/errors/auth_test.go
deleted file mode 100644
index eee7a5c7e..000000000
--- a/vendor/github.com/go-openapi/errors/auth_test.go
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestUnauthenticated(t *testing.T) {
- err := Unauthenticated("basic")
- assert.EqualValues(t, 401, err.Code())
- assert.Equal(t, "unauthenticated for basic", err.Error())
-}
diff --git a/vendor/github.com/go-openapi/errors/headers.go b/vendor/github.com/go-openapi/errors/headers.go
deleted file mode 100644
index 66a1f4bc8..000000000
--- a/vendor/github.com/go-openapi/errors/headers.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "fmt"
- "net/http"
-)
-
-// Validation represents a failure of a precondition
-type Validation struct {
- code int32
- Name string
- In string
- Value interface{}
- message string
- Values []interface{}
-}
-
-func (e *Validation) Error() string {
- return e.message
-}
-
-// Code the error code
-func (e *Validation) Code() int32 {
- return e.code
-}
-
-const (
- contentTypeFail = `unsupported media type %q, only %v are allowed`
- responseFormatFail = `unsupported media type requested, only %v are available`
-)
-
-// InvalidContentType error for an invalid content type
-func InvalidContentType(value string, allowed []string) *Validation {
- var values []interface{}
- for _, v := range allowed {
- values = append(values, v)
- }
- return &Validation{
- code: http.StatusUnsupportedMediaType,
- Name: "Content-Type",
- In: "header",
- Value: value,
- Values: values,
- message: fmt.Sprintf(contentTypeFail, value, allowed),
- }
-}
-
-// InvalidResponseFormat error for an unacceptable response format request
-func InvalidResponseFormat(value string, allowed []string) *Validation {
- var values []interface{}
- for _, v := range allowed {
- values = append(values, v)
- }
- return &Validation{
- code: http.StatusNotAcceptable,
- Name: "Accept",
- In: "header",
- Value: value,
- Values: values,
- message: fmt.Sprintf(responseFormatFail, allowed),
- }
-}
-
-// Validate error message name for aliased property
-func (e *Validation) ValidateName(name string) *Validation {
- if e.Name == "" && name != "" {
- e.Name = name
- e.message = name+e.message
- }
- return e
-}
diff --git a/vendor/github.com/go-openapi/errors/middleware.go b/vendor/github.com/go-openapi/errors/middleware.go
deleted file mode 100644
index 6390d4636..000000000
--- a/vendor/github.com/go-openapi/errors/middleware.go
+++ /dev/null
@@ -1,51 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "bytes"
- "fmt"
- "strings"
-)
-
-// APIVerificationFailed is an error that contains all the missing info for a mismatched section
-// between the api registrations and the api spec
-type APIVerificationFailed struct {
- Section string
- MissingSpecification []string
- MissingRegistration []string
-}
-
-//
-func (v *APIVerificationFailed) Error() string {
- buf := bytes.NewBuffer(nil)
-
- hasRegMissing := len(v.MissingRegistration) > 0
- hasSpecMissing := len(v.MissingSpecification) > 0
-
- if hasRegMissing {
- buf.WriteString(fmt.Sprintf("missing [%s] %s registrations", strings.Join(v.MissingRegistration, ", "), v.Section))
- }
-
- if hasRegMissing && hasSpecMissing {
- buf.WriteString("\n")
- }
-
- if hasSpecMissing {
- buf.WriteString(fmt.Sprintf("missing from spec file [%s] %s", strings.Join(v.MissingSpecification, ", "), v.Section))
- }
-
- return buf.String()
-}
diff --git a/vendor/github.com/go-openapi/errors/middleware_test.go b/vendor/github.com/go-openapi/errors/middleware_test.go
deleted file mode 100644
index feff2074d..000000000
--- a/vendor/github.com/go-openapi/errors/middleware_test.go
+++ /dev/null
@@ -1,33 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestAPIVerificationFailed(t *testing.T) {
- err := &APIVerificationFailed{
- Section: "consumer",
- MissingSpecification: []string{"application/json", "application/x-yaml"},
- MissingRegistration: []string{"text/html", "application/xml"},
- }
-
- expected := `missing [text/html, application/xml] consumer registrations
-missing from spec file [application/json, application/x-yaml] consumer`
- assert.Equal(t, expected, err.Error())
-}
diff --git a/vendor/github.com/go-openapi/errors/parsing.go b/vendor/github.com/go-openapi/errors/parsing.go
deleted file mode 100644
index 1bae87302..000000000
--- a/vendor/github.com/go-openapi/errors/parsing.go
+++ /dev/null
@@ -1,59 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import "fmt"
-
-// ParseError respresents a parsing error
-type ParseError struct {
- code int32
- Name string
- In string
- Value string
- Reason error
- message string
-}
-
-func (e *ParseError) Error() string {
- return e.message
-}
-
-// Code returns the http status code for this error
-func (e *ParseError) Code() int32 {
- return e.code
-}
-
-const (
- parseErrorTemplContent = `parsing %s %s from %q failed, because %s`
- parseErrorTemplContentNoIn = `parsing %s from %q failed, because %s`
-)
-
-// NewParseError creates a new parse error
-func NewParseError(name, in, value string, reason error) *ParseError {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(parseErrorTemplContentNoIn, name, value, reason)
- } else {
- msg = fmt.Sprintf(parseErrorTemplContent, name, in, value, reason)
- }
- return &ParseError{
- code: 400,
- Name: name,
- In: in,
- Value: value,
- Reason: reason,
- message: msg,
- }
-}
diff --git a/vendor/github.com/go-openapi/errors/parsing_test.go b/vendor/github.com/go-openapi/errors/parsing_test.go
deleted file mode 100644
index a51f773c9..000000000
--- a/vendor/github.com/go-openapi/errors/parsing_test.go
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "errors"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestParseError(t *testing.T) {
- err := NewParseError("Content-Type", "header", "application(", errors.New("unable to parse"))
- assert.EqualValues(t, 400, err.Code())
- assert.Equal(t, "parsing Content-Type header from \"application(\" failed, because unable to parse", err.Error())
-
- err = NewParseError("Content-Type", "", "application(", errors.New("unable to parse"))
- assert.EqualValues(t, 400, err.Code())
- assert.Equal(t, "parsing Content-Type from \"application(\" failed, because unable to parse", err.Error())
-}
diff --git a/vendor/github.com/go-openapi/errors/schema.go b/vendor/github.com/go-openapi/errors/schema.go
deleted file mode 100644
index c52f5c314..000000000
--- a/vendor/github.com/go-openapi/errors/schema.go
+++ /dev/null
@@ -1,548 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "fmt"
- "strings"
-)
-
-const (
- invalidType = "%s is an invalid type name"
- typeFail = "%s in %s must be of type %s"
- typeFailWithData = "%s in %s must be of type %s: %q"
- typeFailWithError = "%s in %s must be of type %s, because: %s"
- requiredFail = "%s in %s is required"
- tooLongMessage = "%s in %s should be at most %d chars long"
- tooShortMessage = "%s in %s should be at least %d chars long"
- patternFail = "%s in %s should match '%s'"
- enumFail = "%s in %s should be one of %v"
- multipleOfFail = "%s in %s should be a multiple of %v"
- maxIncFail = "%s in %s should be less than or equal to %v"
- maxExcFail = "%s in %s should be less than %v"
- minIncFail = "%s in %s should be greater than or equal to %v"
- minExcFail = "%s in %s should be greater than %v"
- uniqueFail = "%s in %s shouldn't contain duplicates"
- maxItemsFail = "%s in %s should have at most %d items"
- minItemsFail = "%s in %s should have at least %d items"
- typeFailNoIn = "%s must be of type %s"
- typeFailWithDataNoIn = "%s must be of type %s: %q"
- typeFailWithErrorNoIn = "%s must be of type %s, because: %s"
- requiredFailNoIn = "%s is required"
- tooLongMessageNoIn = "%s should be at most %d chars long"
- tooShortMessageNoIn = "%s should be at least %d chars long"
- patternFailNoIn = "%s should match '%s'"
- enumFailNoIn = "%s should be one of %v"
- multipleOfFailNoIn = "%s should be a multiple of %v"
- maxIncFailNoIn = "%s should be less than or equal to %v"
- maxExcFailNoIn = "%s should be less than %v"
- minIncFailNoIn = "%s should be greater than or equal to %v"
- minExcFailNoIn = "%s should be greater than %v"
- uniqueFailNoIn = "%s shouldn't contain duplicates"
- maxItemsFailNoIn = "%s should have at most %d items"
- minItemsFailNoIn = "%s should have at least %d items"
- noAdditionalItems = "%s in %s can't have additional items"
- noAdditionalItemsNoIn = "%s can't have additional items"
- tooFewProperties = "%s in %s should have at least %d properties"
- tooFewPropertiesNoIn = "%s should have at least %d properties"
- tooManyProperties = "%s in %s should have at most %d properties"
- tooManyPropertiesNoIn = "%s should have at most %d properties"
- unallowedProperty = "%s.%s in %s is a forbidden property"
- unallowedPropertyNoIn = "%s.%s is a forbidden property"
- failedAllPatternProps = "%s.%s in %s failed all pattern properties"
- failedAllPatternPropsNoIn = "%s.%s failed all pattern properties"
-)
-
-// All code responses can be used to differentiate errors for different handling
-// by the consuming program
-const (
- // CompositeErrorCode remains 422 for backwards-compatibility
- // and to separate it from validation errors with cause
- CompositeErrorCode = 422
- // InvalidTypeCode is used for any subclass of invalid types
- InvalidTypeCode = 600 + iota
- RequiredFailCode
- TooLongFailCode
- TooShortFailCode
- PatternFailCode
- EnumFailCode
- MultipleOfFailCode
- MaxFailCode
- MinFailCode
- UniqueFailCode
- MaxItemsFailCode
- MinItemsFailCode
- NoAdditionalItemsCode
- TooFewPropertiesCode
- TooManyPropertiesCode
- UnallowedPropertyCode
- FailedAllPatternPropsCode
-)
-
-// CompositeError is an error that groups several errors together
-type CompositeError struct {
- Errors []error
- code int32
- message string
-}
-
-// Code for this error
-func (c *CompositeError) Code() int32 {
- return c.code
-}
-
-func (c *CompositeError) Error() string {
- if len(c.Errors) > 0 {
- msgs := []string{c.message + ":"}
- for _, e := range c.Errors {
- msgs = append(msgs, e.Error())
- }
- return strings.Join(msgs, "\n")
- }
- return c.message
-}
-
-// CompositeValidationError an error to wrap a bunch of other errors
-func CompositeValidationError(errors ...error) *CompositeError {
- return &CompositeError{
- code: CompositeErrorCode,
- Errors: append([]error{}, errors...),
- message: "validation failure list",
- }
-}
-
-// FailedAllPatternProperties an error for when the property doesn't match a pattern
-func FailedAllPatternProperties(name, in, key string) *Validation {
- msg := fmt.Sprintf(failedAllPatternProps, name, key, in)
- if in == "" {
- msg = fmt.Sprintf(failedAllPatternPropsNoIn, name, key)
- }
- return &Validation{
- code: FailedAllPatternPropsCode,
- Name: name,
- In: in,
- Value: key,
- message: msg,
- }
-}
-
-// PropertyNotAllowed an error for when the property doesn't match a pattern
-func PropertyNotAllowed(name, in, key string) *Validation {
- msg := fmt.Sprintf(unallowedProperty, name, key, in)
- if in == "" {
- msg = fmt.Sprintf(unallowedPropertyNoIn, name, key)
- }
- return &Validation{
- code: UnallowedPropertyCode,
- Name: name,
- In: in,
- Value: key,
- message: msg,
- }
-}
-
-// TooFewProperties an error for an object with too few properties
-func TooFewProperties(name, in string, n int64) *Validation {
- msg := fmt.Sprintf(tooFewProperties, name, in, n)
- if in == "" {
- msg = fmt.Sprintf(tooFewPropertiesNoIn, name, n)
- }
- return &Validation{
- code: TooFewPropertiesCode,
- Name: name,
- In: in,
- Value: n,
- message: msg,
- }
-}
-
-// TooManyProperties an error for an object with too many properties
-func TooManyProperties(name, in string, n int64) *Validation {
- msg := fmt.Sprintf(tooManyProperties, name, in, n)
- if in == "" {
- msg = fmt.Sprintf(tooManyPropertiesNoIn, name, n)
- }
- return &Validation{
- code: TooManyPropertiesCode,
- Name: name,
- In: in,
- Value: n,
- message: msg,
- }
-}
-
-// AdditionalItemsNotAllowed an error for invalid additional items
-func AdditionalItemsNotAllowed(name, in string) *Validation {
- msg := fmt.Sprintf(noAdditionalItems, name, in)
- if in == "" {
- msg = fmt.Sprintf(noAdditionalItemsNoIn, name)
- }
- return &Validation{
- code: NoAdditionalItemsCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// InvalidCollectionFormat another flavor of invalid type error
-func InvalidCollectionFormat(name, in, format string) *Validation {
- return &Validation{
- code: InvalidTypeCode,
- Name: name,
- In: in,
- Value: format,
- message: fmt.Sprintf("the collection format %q is not supported for the %s param %q", format, in, name),
- }
-}
-
-// InvalidTypeName an error for when the type is invalid
-func InvalidTypeName(typeName string) *Validation {
- return &Validation{
- code: InvalidTypeCode,
- Value: typeName,
- message: fmt.Sprintf(invalidType, typeName),
- }
-}
-
-// InvalidType creates an error for when the type is invalid
-func InvalidType(name, in, typeName string, value interface{}) *Validation {
- var message string
-
- if in != "" {
- switch value.(type) {
- case string:
- message = fmt.Sprintf(typeFailWithData, name, in, typeName, value)
- case error:
- message = fmt.Sprintf(typeFailWithError, name, in, typeName, value)
- default:
- message = fmt.Sprintf(typeFail, name, in, typeName)
- }
- } else {
- switch value.(type) {
- case string:
- message = fmt.Sprintf(typeFailWithDataNoIn, name, typeName, value)
- case error:
- message = fmt.Sprintf(typeFailWithErrorNoIn, name, typeName, value)
- default:
- message = fmt.Sprintf(typeFailNoIn, name, typeName)
- }
- }
-
- return &Validation{
- code: InvalidTypeCode,
- Name: name,
- In: in,
- Value: value,
- message: message,
- }
-
-}
-
-// DuplicateItems error for when an array contains duplicates
-func DuplicateItems(name, in string) *Validation {
- msg := fmt.Sprintf(uniqueFail, name, in)
- if in == "" {
- msg = fmt.Sprintf(uniqueFailNoIn, name)
- }
- return &Validation{
- code: UniqueFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// TooManyItems error for when an array contains too many items
-func TooManyItems(name, in string, max int64) *Validation {
- msg := fmt.Sprintf(maxItemsFail, name, in, max)
- if in == "" {
- msg = fmt.Sprintf(maxItemsFailNoIn, name, max)
- }
-
- return &Validation{
- code: MaxItemsFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// TooFewItems error for when an array contains too few items
-func TooFewItems(name, in string, min int64) *Validation {
- msg := fmt.Sprintf(minItemsFail, name, in, min)
- if in == "" {
- msg = fmt.Sprintf(minItemsFailNoIn, name, min)
- }
- return &Validation{
- code: MinItemsFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// ExceedsMaximumInt error for when maxinum validation fails
-func ExceedsMaximumInt(name, in string, max int64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := maxIncFailNoIn
- if exclusive {
- m = maxExcFailNoIn
- }
- message = fmt.Sprintf(m, name, max)
- } else {
- m := maxIncFail
- if exclusive {
- m = maxExcFail
- }
- message = fmt.Sprintf(m, name, in, max)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: max,
- message: message,
- }
-}
-
-// ExceedsMaximumUint error for when maxinum validation fails
-func ExceedsMaximumUint(name, in string, max uint64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := maxIncFailNoIn
- if exclusive {
- m = maxExcFailNoIn
- }
- message = fmt.Sprintf(m, name, max)
- } else {
- m := maxIncFail
- if exclusive {
- m = maxExcFail
- }
- message = fmt.Sprintf(m, name, in, max)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: max,
- message: message,
- }
-}
-
-// ExceedsMaximum error for when maxinum validation fails
-func ExceedsMaximum(name, in string, max float64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := maxIncFailNoIn
- if exclusive {
- m = maxExcFailNoIn
- }
- message = fmt.Sprintf(m, name, max)
- } else {
- m := maxIncFail
- if exclusive {
- m = maxExcFail
- }
- message = fmt.Sprintf(m, name, in, max)
- }
- return &Validation{
- code: MaxFailCode,
- Name: name,
- In: in,
- Value: max,
- message: message,
- }
-}
-
-// ExceedsMinimumInt error for when maxinum validation fails
-func ExceedsMinimumInt(name, in string, min int64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, min)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, min)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: min,
- message: message,
- }
-}
-
-// ExceedsMinimumUint error for when maxinum validation fails
-func ExceedsMinimumUint(name, in string, min uint64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, min)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, min)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: min,
- message: message,
- }
-}
-
-// ExceedsMinimum error for when maxinum validation fails
-func ExceedsMinimum(name, in string, min float64, exclusive bool) *Validation {
- var message string
- if in == "" {
- m := minIncFailNoIn
- if exclusive {
- m = minExcFailNoIn
- }
- message = fmt.Sprintf(m, name, min)
- } else {
- m := minIncFail
- if exclusive {
- m = minExcFail
- }
- message = fmt.Sprintf(m, name, in, min)
- }
- return &Validation{
- code: MinFailCode,
- Name: name,
- In: in,
- Value: min,
- message: message,
- }
-}
-
-// NotMultipleOf error for when multiple of validation fails
-func NotMultipleOf(name, in string, multiple float64) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(multipleOfFailNoIn, name, multiple)
- } else {
- msg = fmt.Sprintf(multipleOfFail, name, in, multiple)
- }
- return &Validation{
- code: MultipleOfFailCode,
- Name: name,
- In: in,
- Value: multiple,
- message: msg,
- }
-}
-
-// EnumFail error for when an enum validation fails
-func EnumFail(name, in string, value interface{}, values []interface{}) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(enumFailNoIn, name, values)
- } else {
- msg = fmt.Sprintf(enumFail, name, in, values)
- }
-
- return &Validation{
- code: EnumFailCode,
- Name: name,
- In: in,
- Value: value,
- Values: values,
- message: msg,
- }
-}
-
-// Required error for when a value is missing
-func Required(name, in string) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(requiredFailNoIn, name)
- } else {
- msg = fmt.Sprintf(requiredFail, name, in)
- }
- return &Validation{
- code: RequiredFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// TooLong error for when a string is too long
-func TooLong(name, in string, max int64) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(tooLongMessageNoIn, name, max)
- } else {
- msg = fmt.Sprintf(tooLongMessage, name, in, max)
- }
- return &Validation{
- code: TooLongFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// TooShort error for when a string is too short
-func TooShort(name, in string, min int64) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(tooShortMessageNoIn, name, min)
- } else {
- msg = fmt.Sprintf(tooShortMessage, name, in, min)
- }
-
- return &Validation{
- code: TooShortFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
-
-// FailedPattern error for when a string fails a regex pattern match
-// the pattern that is returned is the ECMA syntax version of the pattern not the golang version.
-func FailedPattern(name, in, pattern string) *Validation {
- var msg string
- if in == "" {
- msg = fmt.Sprintf(patternFailNoIn, name, pattern)
- } else {
- msg = fmt.Sprintf(patternFail, name, in, pattern)
- }
-
- return &Validation{
- code: PatternFailCode,
- Name: name,
- In: in,
- message: msg,
- }
-}
diff --git a/vendor/github.com/go-openapi/errors/schema_test.go b/vendor/github.com/go-openapi/errors/schema_test.go
deleted file mode 100644
index 3a6487237..000000000
--- a/vendor/github.com/go-openapi/errors/schema_test.go
+++ /dev/null
@@ -1,284 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package errors
-
-import (
- "errors"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestSchemaErrors(t *testing.T) {
- err := InvalidType("confirmed", "query", "boolean", nil)
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed in query must be of type boolean", err.Error())
-
- err = InvalidType("confirmed", "", "boolean", nil)
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed must be of type boolean", err.Error())
-
- err = InvalidType("confirmed", "query", "boolean", "hello")
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed in query must be of type boolean: \"hello\"", err.Error())
-
- err = InvalidType("confirmed", "query", "boolean", errors.New("hello"))
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed in query must be of type boolean, because: hello", err.Error())
-
- err = InvalidType("confirmed", "", "boolean", "hello")
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed must be of type boolean: \"hello\"", err.Error())
-
- err = InvalidType("confirmed", "", "boolean", errors.New("hello"))
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "confirmed must be of type boolean, because: hello", err.Error())
-
- err = DuplicateItems("uniques", "query")
- assert.Error(t, err)
- assert.EqualValues(t, UniqueFailCode, err.Code())
- assert.Equal(t, "uniques in query shouldn't contain duplicates", err.Error())
-
- err = DuplicateItems("uniques", "")
- assert.Error(t, err)
- assert.EqualValues(t, UniqueFailCode, err.Code())
- assert.Equal(t, "uniques shouldn't contain duplicates", err.Error())
-
- err = TooManyItems("something", "query", 5)
- assert.Error(t, err)
- assert.EqualValues(t, MaxItemsFailCode, err.Code())
- assert.Equal(t, "something in query should have at most 5 items", err.Error())
-
- err = TooManyItems("something", "", 5)
- assert.Error(t, err)
- assert.EqualValues(t, MaxItemsFailCode, err.Code())
- assert.Equal(t, "something should have at most 5 items", err.Error())
-
- err = TooFewItems("something", "", 5)
- assert.Error(t, err)
- assert.EqualValues(t, MinItemsFailCode, err.Code())
- assert.Equal(t, "something should have at least 5 items", err.Error())
-
- err = ExceedsMaximumInt("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximumInt("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximumInt("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than 5", err.Error())
-
- err = ExceedsMaximumInt("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than 5", err.Error())
-
- err = ExceedsMaximumUint("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximumUint("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximumUint("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than 5", err.Error())
-
- err = ExceedsMaximumUint("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than 5", err.Error())
-
- err = ExceedsMaximum("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximum("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than or equal to 5", err.Error())
-
- err = ExceedsMaximum("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something in query should be less than 5", err.Error())
-
- err = ExceedsMaximum("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MaxFailCode, err.Code())
- assert.Equal(t, "something should be less than 5", err.Error())
-
- err = ExceedsMinimumInt("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimumInt("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimumInt("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than 5", err.Error())
-
- err = ExceedsMinimumInt("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than 5", err.Error())
-
- err = ExceedsMinimumUint("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimumUint("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimumUint("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than 5", err.Error())
-
- err = ExceedsMinimumUint("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than 5", err.Error())
-
- err = ExceedsMinimum("something", "query", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimum("something", "", 5, false)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than or equal to 5", err.Error())
-
- err = ExceedsMinimum("something", "query", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something in query should be greater than 5", err.Error())
-
- err = ExceedsMinimum("something", "", 5, true)
- assert.Error(t, err)
- assert.EqualValues(t, MinFailCode, err.Code())
- assert.Equal(t, "something should be greater than 5", err.Error())
-
- err = NotMultipleOf("something", "query", 5)
- assert.Error(t, err)
- assert.EqualValues(t, MultipleOfFailCode, err.Code())
- assert.Equal(t, "something in query should be a multiple of 5", err.Error())
-
- err = NotMultipleOf("something", "", 5)
- assert.Error(t, err)
- assert.EqualValues(t, MultipleOfFailCode, err.Code())
- assert.Equal(t, "something should be a multiple of 5", err.Error())
-
- err = EnumFail("something", "query", "yada", []interface{}{"hello", "world"})
- assert.Error(t, err)
- assert.EqualValues(t, EnumFailCode, err.Code())
- assert.Equal(t, "something in query should be one of [hello world]", err.Error())
-
- err = EnumFail("something", "", "yada", []interface{}{"hello", "world"})
- assert.Error(t, err)
- assert.EqualValues(t, EnumFailCode, err.Code())
- assert.Equal(t, "something should be one of [hello world]", err.Error())
-
- err = Required("something", "query")
- assert.Error(t, err)
- assert.EqualValues(t, RequiredFailCode, err.Code())
- assert.Equal(t, "something in query is required", err.Error())
-
- err = Required("something", "")
- assert.Error(t, err)
- assert.EqualValues(t, RequiredFailCode, err.Code())
- assert.Equal(t, "something is required", err.Error())
-
- err = TooLong("something", "query", 5)
- assert.Error(t, err)
- assert.EqualValues(t, TooLongFailCode, err.Code())
- assert.Equal(t, "something in query should be at most 5 chars long", err.Error())
-
- err = TooLong("something", "", 5)
- assert.Error(t, err)
- assert.EqualValues(t, TooLongFailCode, err.Code())
- assert.Equal(t, "something should be at most 5 chars long", err.Error())
-
- err = TooShort("something", "query", 5)
- assert.Error(t, err)
- assert.EqualValues(t, TooShortFailCode, err.Code())
- assert.Equal(t, "something in query should be at least 5 chars long", err.Error())
-
- err = TooShort("something", "", 5)
- assert.Error(t, err)
- assert.EqualValues(t, TooShortFailCode, err.Code())
- assert.Equal(t, "something should be at least 5 chars long", err.Error())
-
- err = FailedPattern("something", "query", "\\d+")
- assert.Error(t, err)
- assert.EqualValues(t, PatternFailCode, err.Code())
- assert.Equal(t, "something in query should match '\\d+'", err.Error())
-
- err = FailedPattern("something", "", "\\d+")
- assert.Error(t, err)
- assert.EqualValues(t, PatternFailCode, err.Code())
- assert.Equal(t, "something should match '\\d+'", err.Error())
-
- err = InvalidTypeName("something")
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "something is an invalid type name", err.Error())
-
- err = AdditionalItemsNotAllowed("something", "query")
- assert.Error(t, err)
- assert.EqualValues(t, NoAdditionalItemsCode, err.Code())
- assert.Equal(t, "something in query can't have additional items", err.Error())
-
- err = AdditionalItemsNotAllowed("something", "")
- assert.Error(t, err)
- assert.EqualValues(t, NoAdditionalItemsCode, err.Code())
- assert.Equal(t, "something can't have additional items", err.Error())
-
- err = InvalidCollectionFormat("something", "query", "yada")
- assert.Error(t, err)
- assert.EqualValues(t, InvalidTypeCode, err.Code())
- assert.Equal(t, "the collection format \"yada\" is not supported for the query param \"something\"", err.Error())
-
- err2 := CompositeValidationError()
- assert.Error(t, err2)
- assert.EqualValues(t, CompositeErrorCode, err2.Code())
- assert.Equal(t, "validation failure list", err2.Error())
-}
diff --git a/vendor/github.com/go-openapi/loads/.drone.sec b/vendor/github.com/go-openapi/loads/.drone.sec
deleted file mode 100644
index 6d3e84399..000000000
--- a/vendor/github.com/go-openapi/loads/.drone.sec
+++ /dev/null
@@ -1 +0,0 @@
-eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.xUjixvmMMeampw0Doyr_XLvcV5ICmDgDFmlcWqgmO84O3Hwn6dqbMkwOjpKMOyEJW_98b5Om5ED59IFt2S0T_OarlrsJL8jOK5fqxSMNXy2w8LfI-e5l1URverW41ofAVK8m9wK05q2BSJM_M6PyyODaQeDBiCVK1HreMZBlXpuUDVtBMPILQoga0eSZOsTR3DYEpZIS0A0Rsa5yIhMYR5d5-JMYqbqOCB7tNJ-BM83OzYgL7Hrz0J15kqaJmhQ-GJoMJDzOemSO9KxLCOfSPp11R_G3Mfd48xYnuiRuPOTakbOCLxuYviH6uoGVIOhnMyY9qKiDKbOn4BQUi1-igA.6qjQzq9nzAxRRKV_.z79R5cMFAEuEaAh6U9ykiL8oIqzMbs_I2C-hSFRh3HYRJ4fTB-9LrcbF0uASIOq7bBn4OQzW-0QFwYOs1uaawmrByGngV5d0afiZf_LBKcmTF2vtxRi_A_nxD-EHoPmh3lKBU5WNDe_8kLjEeS89HeyyFPuv5iQbqhzdqPFohHKVigwVqVYYLjB8GWQ4t7tC4c8l5rHanaXf71W0e3op2m8bebpZL0JPGhnULVA1oU27TYeLsO112JkIYtBwZxzvAs--bBFoKeGJWVMFzrKN68UACGZ9RFw0uGJbBmVC4-jRuIc6XpqeEqw3KG-rjFzkeEor3575qW-8kiXYqpub9SFUc3SSZkxJ8hB3SrnMBOuDUSenrXNpAbltmV3KAALzN3_bMBQuihwSRIn0Hg7-Dpni8BieMe44RMDvRu6p_71aeU_KW4V7Umy_h8gpIvQFuKGdTQH2ahsyCXL0ojqjMbVMdoWpDQTQ2_Fy8Qt_p2kJ8BgDo-1Akd4a6BNU2NGqsdnrJmtVKcTqLBadf9ylCwxHdGVrtNYORALSms2T6Q1s-poQnMjIwN8lnUD8ABUBpt4uVtrYkiWPVwrwywLQeiHhR-pboe_53kWDAx4Hy4rpbKsaxanYhy_bEbAYKb3aIUA.75GD4kRBCQdcGFYP1QYdCg \ No newline at end of file
diff --git a/vendor/github.com/go-openapi/loads/.drone.yml b/vendor/github.com/go-openapi/loads/.drone.yml
deleted file mode 100644
index 982291035..000000000
--- a/vendor/github.com/go-openapi/loads/.drone.yml
+++ /dev/null
@@ -1,39 +0,0 @@
-clone:
- path: github.com/go-openapi/loads
-
-matrix:
- GO_VERSION:
- - "1.6"
-
-build:
- integration:
- image: golang:$$GO_VERSION
- pull: true
- environment:
- GOCOVMODE: "count"
- commands:
- - go get -u github.com/axw/gocov/gocov
- - go get -u gopkg.in/matm/v1/gocov-html
- - go get -u github.com/cee-dub/go-junit-report
- - go get -u github.com/stretchr/testify/assert
- - go get -u gopkg.in/yaml.v2
- - go get -u github.com/go-openapi/swag
- - go get -u github.com/go-openapi/analysis
- - go get -u github.com/go-openapi/spec
- - ./hack/build-drone.sh
-
-notify:
- slack:
- channel: bots
- webhook_url: $$SLACK_URL
- username: drone
-
-publish:
- coverage:
- server: https://coverage.vmware.run
- token: $$GITHUB_TOKEN
- # threshold: 70
- # must_increase: true
- when:
- matrix:
- GO_VERSION: "1.6"
diff --git a/vendor/github.com/go-openapi/loads/.editorconfig b/vendor/github.com/go-openapi/loads/.editorconfig
deleted file mode 100644
index 3152da69a..000000000
--- a/vendor/github.com/go-openapi/loads/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/loads/.gitignore b/vendor/github.com/go-openapi/loads/.gitignore
deleted file mode 100644
index e4f15f17b..000000000
--- a/vendor/github.com/go-openapi/loads/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-secrets.yml
-coverage.out
-profile.cov
-profile.out
diff --git a/vendor/github.com/go-openapi/loads/.travis.yml b/vendor/github.com/go-openapi/loads/.travis.yml
deleted file mode 100644
index b0d357e65..000000000
--- a/vendor/github.com/go-openapi/loads/.travis.yml
+++ /dev/null
@@ -1,16 +0,0 @@
-language: go
-go:
-- 1.8
-install:
-- go get -u github.com/stretchr/testify
-- go get -u github.com/go-openapi/analysis
-- go get -u github.com/go-openapi/spec
-- go get -u github.com/go-openapi/swag
-- go get -u gopkg.in/yaml.v2
-script:
-- ./hack/coverage
-after_success:
-- bash <(curl -s https://codecov.io/bash)
-notifications:
- slack:
- secure: OxkPwVp35qBTUilgWC8xykSj+sGMcj0h8IIOKD+Rflx2schZVlFfdYdyVBM+s9OqeOfvtuvnR9v1Ye2rPKAvcjWdC4LpRGUsgmItZaI6Um8Aj6+K9udCw5qrtZVfOVmRu8LieH//XznWWKdOultUuniW0MLqw5+II87Gd00RWbCGi0hk0PykHe7uK+PDA2BEbqyZ2WKKYCvfB3j+0nrFOHScXqnh0V05l2E83J4+Sgy1fsPy+1WdX58ZlNBG333ibaC1FS79XvKSmTgKRkx3+YBo97u6ZtUmJa5WZjf2OdLG3KIckGWAv6R5xgxeU31N0Ng8L332w/Edpp2O/M2bZwdnKJ8hJQikXIAQbICbr+lTDzsoNzMdEIYcHpJ5hjPbiUl3Bmd+Jnsjf5McgAZDiWIfpCKZ29tPCEkVwRsOCqkyPRMNMzHHmoja495P5jR+ODS7+J8RFg5xgcnOgpP9D4Wlhztlf5WyZMpkLxTUD+bZq2SRf50HfHFXTkfq22zPl3d1eq0yrLwh/Z/fWKkfb6SyysROL8y6s8u3dpFX1YHSg0BR6i913h4aoZw9B2BG27cafLLTwKYsp2dFo1PWl4O6u9giFJIeqwloZHLKKrwh0cBFhB7RH0I58asxkZpCH6uWjJierahmHe7iS+E6i+9oCHkOZ59hmCYNimIs3hM=
diff --git a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e..000000000
--- a/vendor/github.com/go-openapi/loads/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/loads/LICENSE b/vendor/github.com/go-openapi/loads/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/vendor/github.com/go-openapi/loads/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/loads/README.md b/vendor/github.com/go-openapi/loads/README.md
deleted file mode 100644
index 6dbb8342e..000000000
--- a/vendor/github.com/go-openapi/loads/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Loads OAI specs [![Build Status](https://travis-ci.org/go-openapi/loads.svg?branch=master)](https://travis-ci.org/go-openapi/loads) [![codecov](https://codecov.io/gh/go-openapi/loads/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/loads) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
-
-[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/loads/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/loads?status.svg)](http://godoc.org/github.com/go-openapi/loads)
-
-Loading of OAI specification documents from local or remote locations.
diff --git a/vendor/github.com/go-openapi/loads/json_test.go b/vendor/github.com/go-openapi/loads/json_test.go
deleted file mode 100644
index 8b60eb19f..000000000
--- a/vendor/github.com/go-openapi/loads/json_test.go
+++ /dev/null
@@ -1,318 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package loads
-
-import (
- "net/http"
- "net/http/httptest"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestLoadJSON(t *testing.T) {
- serv := httptest.NewServer(http.HandlerFunc(jsonPestoreServer))
- defer serv.Close()
-
- s, err := JSONSpec(serv.URL)
- assert.NoError(t, err)
- assert.NotNil(t, s)
-
- ts2 := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
- rw.WriteHeader(http.StatusNotFound)
- rw.Write([]byte("{}"))
- }))
- defer ts2.Close()
- _, err = JSONSpec(ts2.URL)
- assert.Error(t, err)
-}
-
-var jsonPestoreServer = func(rw http.ResponseWriter, r *http.Request) {
- rw.WriteHeader(http.StatusOK)
- rw.Write([]byte(petstoreJSON))
-}
-
-const petstoreJSON = `{
- "swagger": "2.0",
- "info": {
- "version": "1.0.0",
- "title": "Swagger Petstore",
- "contact": {
- "name": "Wordnik API Team",
- "url": "http://developer.wordnik.com"
- },
- "license": {
- "name": "Creative Commons 4.0 International",
- "url": "http://creativecommons.org/licenses/by/4.0/"
- }
- },
- "host": "petstore.swagger.wordnik.com",
- "basePath": "/api",
- "schemes": [
- "http"
- ],
- "paths": {
- "/pets": {
- "get": {
- "security": [
- {
- "oauth2": ["read"]
- }
- ],
- "tags": [ "Pet Operations" ],
- "operationId": "getAllPets",
- "parameters": [
- {
- "name": "status",
- "in": "query",
- "description": "The status to filter by",
- "type": "string"
- }
- ],
- "summary": "Finds all pets in the system",
- "responses": {
- "200": {
- "description": "Pet response",
- "schema": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/Pet"
- }
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "post": {
- "security": [
- {
- "oauth2": ["write"]
- }
- ],
- "tags": [ "Pet Operations" ],
- "operationId": "createPet",
- "summary": "Creates a new pet",
- "parameters": [
- {
- "name": "pet",
- "in": "body",
- "description": "The Pet to create",
- "required": true,
- "schema": {
- "$ref": "#/definitions/newPet"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "Created Pet response",
- "schema": {
- "$ref": "#/definitions/Pet"
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- }
- },
- "/pets/{id}": {
- "delete": {
- "security": [
- {
- "oauth2": ["write"]
- }
- ],
- "description": "Deletes the Pet by id",
- "operationId": "deletePet",
- "parameters": [
- {
- "name": "id",
- "in": "path",
- "description": "ID of pet to delete",
- "required": true,
- "type": "integer",
- "format": "int64"
- }
- ],
- "responses": {
- "204": {
- "description": "pet deleted"
- },
- "default": {
- "description": "unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "get": {
- "security": [
- {
- "oauth2": ["read"]
- }
- ],
- "tags": [ "Pet Operations" ],
- "operationId": "getPetById",
- "summary": "Finds the pet by id",
- "responses": {
- "200": {
- "description": "Pet response",
- "schema": {
- "$ref": "#/definitions/Pet"
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "parameters": [
- {
- "name": "id",
- "in": "path",
- "description": "ID of pet",
- "required": true,
- "type": "integer",
- "format": "int64"
- }
- ]
- }
- },
- "definitions": {
- "Category": {
- "id": "Category",
- "properties": {
- "id": {
- "format": "int64",
- "type": "integer"
- },
- "name": {
- "type": "string"
- }
- }
- },
- "Pet": {
- "id": "Pet",
- "properties": {
- "category": {
- "$ref": "#/definitions/Category"
- },
- "id": {
- "description": "unique identifier for the pet",
- "format": "int64",
- "maximum": 100.0,
- "minimum": 0.0,
- "type": "integer"
- },
- "name": {
- "type": "string"
- },
- "photoUrls": {
- "items": {
- "type": "string"
- },
- "type": "array"
- },
- "status": {
- "description": "pet status in the store",
- "enum": [
- "available",
- "pending",
- "sold"
- ],
- "type": "string"
- },
- "tags": {
- "items": {
- "$ref": "#/definitions/Tag"
- },
- "type": "array"
- }
- },
- "required": [
- "id",
- "name"
- ]
- },
- "newPet": {
- "allOf": [
- {
- "$ref": "#/definitions/Pet"
- }
- ],
- "required": [
- "name"
- ]
- },
- "Tag": {
- "id": "Tag",
- "properties": {
- "id": {
- "format": "int64",
- "type": "integer"
- },
- "name": {
- "type": "string"
- }
- }
- },
- "Error": {
- "required": [
- "code",
- "message"
- ],
- "properties": {
- "code": {
- "type": "integer",
- "format": "int32"
- },
- "message": {
- "type": "string"
- }
- }
- }
- },
- "produces": [
- "application/json",
- "application/xml",
- "text/plain",
- "text/html"
- ],
- "securityDefinitions": {
- "oauth2": {
- "type": "oauth2",
- "scopes": {
- "read": "Read access.",
- "write": "Write access"
- },
- "flow": "accessCode",
- "authorizationUrl": "http://petstore.swagger.wordnik.com/oauth/authorize",
- "tokenUrl": "http://petstore.swagger.wordnik.com/oauth/token"
- }
- }
-}`
diff --git a/vendor/github.com/go-openapi/loads/spec.go b/vendor/github.com/go-openapi/loads/spec.go
deleted file mode 100644
index 649ca06e6..000000000
--- a/vendor/github.com/go-openapi/loads/spec.go
+++ /dev/null
@@ -1,279 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package loads
-
-import (
- "bytes"
- "encoding/json"
- "fmt"
- "net/url"
-
- "github.com/go-openapi/analysis"
- "github.com/go-openapi/spec"
- "github.com/go-openapi/swag"
-)
-
-// JSONDoc loads a json document from either a file or a remote url
-func JSONDoc(path string) (json.RawMessage, error) {
- data, err := swag.LoadFromFileOrHTTP(path)
- if err != nil {
- return nil, err
- }
- return json.RawMessage(data), nil
-}
-
-// DocLoader represents a doc loader type
-type DocLoader func(string) (json.RawMessage, error)
-
-// DocMatcher represents a predicate to check if a loader matches
-type DocMatcher func(string) bool
-
-var (
- loaders *loader
- defaultLoader *loader
-)
-
-func init() {
- defaultLoader = &loader{Match: func(_ string) bool { return true }, Fn: JSONDoc}
- loaders = defaultLoader
- spec.PathLoader = loaders.Fn
- AddLoader(swag.YAMLMatcher, swag.YAMLDoc)
-}
-
-// AddLoader for a document
-func AddLoader(predicate DocMatcher, load DocLoader) {
- prev := loaders
- loaders = &loader{
- Match: predicate,
- Fn: load,
- Next: prev,
- }
- spec.PathLoader = loaders.Fn
-}
-
-type loader struct {
- Fn DocLoader
- Match DocMatcher
- Next *loader
-}
-
-// JSONSpec loads a spec from a json document
-func JSONSpec(path string) (*Document, error) {
- data, err := JSONDoc(path)
- if err != nil {
- return nil, err
- }
- // convert to json
- return Analyzed(json.RawMessage(data), "")
-}
-
-// Document represents a swagger spec document
-type Document struct {
- // specAnalyzer
- Analyzer *analysis.Spec
- spec *spec.Swagger
- specFilePath string
- origSpec *spec.Swagger
- schema *spec.Schema
- raw json.RawMessage
-}
-
-// Embedded returns a Document based on embedded specs. No analysis is required
-func Embedded(orig, flat json.RawMessage) (*Document, error) {
- var origSpec, flatSpec spec.Swagger
- if err := json.Unmarshal(orig, &origSpec); err != nil {
- return nil, err
- }
- if err := json.Unmarshal(flat, &flatSpec); err != nil {
- return nil, err
- }
- return &Document{
- raw: orig,
- origSpec: &origSpec,
- spec: &flatSpec,
- }, nil
-}
-
-// Spec loads a new spec document
-func Spec(path string) (*Document, error) {
- specURL, err := url.Parse(path)
- if err != nil {
- return nil, err
- }
- var lastErr error
- for l := loaders.Next; l != nil; l = l.Next {
- if loaders.Match(specURL.Path) {
- b, err2 := loaders.Fn(path)
- if err2 != nil {
- lastErr = err2
- continue
- }
- doc, err := Analyzed(b, "")
- if err != nil {
- return nil, err
- }
- if doc != nil {
- doc.specFilePath = path
- }
- return doc, nil
- }
- }
- if lastErr != nil {
- return nil, lastErr
- }
- b, err := defaultLoader.Fn(path)
- if err != nil {
- return nil, err
- }
-
- document, err := Analyzed(b, "")
- if document != nil {
- document.specFilePath = path
- }
-
- return document, err
-}
-
-// Analyzed creates a new analyzed spec document
-func Analyzed(data json.RawMessage, version string) (*Document, error) {
- if version == "" {
- version = "2.0"
- }
- if version != "2.0" {
- return nil, fmt.Errorf("spec version %q is not supported", version)
- }
-
- raw := data
- trimmed := bytes.TrimSpace(data)
- if len(trimmed) > 0 {
- if trimmed[0] != '{' && trimmed[0] != '[' {
- yml, err := swag.BytesToYAMLDoc(trimmed)
- if err != nil {
- return nil, fmt.Errorf("analyzed: %v", err)
- }
- d, err := swag.YAMLToJSON(yml)
- if err != nil {
- return nil, fmt.Errorf("analyzed: %v", err)
- }
- raw = d
- }
- }
-
- swspec := new(spec.Swagger)
- if err := json.Unmarshal(raw, swspec); err != nil {
- return nil, err
- }
-
- origsqspec := new(spec.Swagger)
- if err := json.Unmarshal(raw, origsqspec); err != nil {
- return nil, err
- }
-
- d := &Document{
- Analyzer: analysis.New(swspec),
- schema: spec.MustLoadSwagger20Schema(),
- spec: swspec,
- raw: raw,
- origSpec: origsqspec,
- }
- return d, nil
-}
-
-// Expanded expands the ref fields in the spec document and returns a new spec document
-func (d *Document) Expanded(options ...*spec.ExpandOptions) (*Document, error) {
- swspec := new(spec.Swagger)
- if err := json.Unmarshal(d.raw, swspec); err != nil {
- return nil, err
- }
-
- var expandOptions *spec.ExpandOptions
- if len(options) > 0 {
- expandOptions = options[0]
- } else {
- expandOptions = &spec.ExpandOptions{
- RelativeBase: d.specFilePath,
- }
- }
-
- if err := spec.ExpandSpec(swspec, expandOptions); err != nil {
- return nil, err
- }
-
- dd := &Document{
- Analyzer: analysis.New(swspec),
- spec: swspec,
- specFilePath: d.specFilePath,
- schema: spec.MustLoadSwagger20Schema(),
- raw: d.raw,
- origSpec: d.origSpec,
- }
- return dd, nil
-}
-
-// BasePath the base path for this spec
-func (d *Document) BasePath() string {
- return d.spec.BasePath
-}
-
-// Version returns the version of this spec
-func (d *Document) Version() string {
- return d.spec.Swagger
-}
-
-// Schema returns the swagger 2.0 schema
-func (d *Document) Schema() *spec.Schema {
- return d.schema
-}
-
-// Spec returns the swagger spec object model
-func (d *Document) Spec() *spec.Swagger {
- return d.spec
-}
-
-// Host returns the host for the API
-func (d *Document) Host() string {
- return d.spec.Host
-}
-
-// Raw returns the raw swagger spec as json bytes
-func (d *Document) Raw() json.RawMessage {
- return d.raw
-}
-
-func (d *Document) OrigSpec() *spec.Swagger {
- return d.origSpec
-}
-
-// ResetDefinitions gives a shallow copy with the models reset
-func (d *Document) ResetDefinitions() *Document {
- defs := make(map[string]spec.Schema, len(d.origSpec.Definitions))
- for k, v := range d.origSpec.Definitions {
- defs[k] = v
- }
-
- d.spec.Definitions = defs
- return d
-}
-
-// Pristine creates a new pristine document instance based on the input data
-func (d *Document) Pristine() *Document {
- dd, _ := Analyzed(d.Raw(), d.Version())
- return dd
-}
-
-// SpecFilePath returns the file path of the spec if one is defined
-func (d *Document) SpecFilePath() string {
- return d.specFilePath
-}
diff --git a/vendor/github.com/go-openapi/loads/spec_test.go b/vendor/github.com/go-openapi/loads/spec_test.go
deleted file mode 100644
index a07694d33..000000000
--- a/vendor/github.com/go-openapi/loads/spec_test.go
+++ /dev/null
@@ -1,717 +0,0 @@
-package loads
-
-import (
- "encoding/json"
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestUnknownSpecVersion(t *testing.T) {
- _, err := Analyzed([]byte{}, "0.9")
- assert.Error(t, err)
-}
-
-func TestDefaultsTo20(t *testing.T) {
- d, err := Analyzed(PetStoreJSONMessage, "")
-
- assert.NoError(t, err)
- assert.NotNil(t, d)
- assert.Equal(t, "2.0", d.Version())
- // assert.Equal(t, "2.0", d.data["swagger"].(string))
- assert.Equal(t, "/api", d.BasePath())
-}
-
-func TestLoadsYAMLContent(t *testing.T) {
- d, err := Analyzed(json.RawMessage([]byte(YAMLSpec)), "")
- if assert.NoError(t, err) {
- if assert.NotNil(t, d) {
- sw := d.Spec()
- assert.Equal(t, "1.0.0", sw.Info.Version)
- }
- }
-}
-
-// for issue 11
-func TestRegressionExpand(t *testing.T) {
- swaggerFile := "fixtures/yaml/swagger/1/2/3/4/swagger.yaml"
- document, err := Spec(swaggerFile)
- assert.NoError(t, err)
- assert.NotNil(t, document)
- d, err := document.Expanded()
- assert.NoError(t, err)
- assert.NotNil(t, d)
- b, _ := d.Spec().MarshalJSON()
- assert.JSONEq(t, expectedExpanded, string(b))
-}
-
-func TestCascadingRefExpand(t *testing.T) {
- swaggerFile := "fixtures/yaml/swagger/spec.yml"
- document, err := Spec(swaggerFile)
- assert.NoError(t, err)
- assert.NotNil(t, document)
- d, err := document.Expanded()
- assert.NoError(t, err)
- assert.NotNil(t, d)
- b, _ := d.Spec().MarshalJSON()
- assert.JSONEq(t, cascadeRefExpanded, string(b))
-}
-
-func TestFailsInvalidJSON(t *testing.T) {
- _, err := Analyzed(json.RawMessage([]byte("{]")), "")
-
- assert.Error(t, err)
-}
-
-var YAMLSpec = `swagger: '2.0'
-
-info:
- version: "1.0.0"
- title: Simple Search API
- description: |
- A very simple api description that makes a x-www-form-urlencoded only API to submit searches.
-
-produces:
- - application/json
-
-consumes:
- - application/json
-
-paths:
- /search:
- post:
- operationId: search
- summary: searches tasks
- description: searches the task titles and descriptions for a match
- consumes:
- - application/x-www-form-urlencoded
- parameters:
- - name: q
- in: formData
- type: string
- description: the search string
- required: true
- /tasks:
- get:
- operationId: getTasks
- summary: Gets Task objects.
- description: |
- Optional query param of **size** determines
- size of returned array
- tags:
- - tasks
- parameters:
- - name: size
- in: query
- description: Size of task list
- type: integer
- format: int32
- default: 20
- - name: completed
- in: query
- description: when true shows completed tasks
- type: boolean
-
- responses:
- default:
- description: Generic Error
- 200:
- description: Successful response
- headers:
- X-Rate-Limit:
- type: integer
- format: int32
- X-Rate-Limit-Remaining:
- type: integer
- format: int32
- default: 42
- X-Rate-Limit-Reset:
- type: integer
- format: int32
- default: "1449875311"
- X-Rate-Limit-Reset-Human:
- type: string
- default: 3 days
- X-Rate-Limit-Reset-Human-Number:
- type: string
- default: 3
- Access-Control-Allow-Origin:
- type: string
- default: "*"
- schema:
- type: array
- items:
- $ref: "#/definitions/Task"
- post:
- operationId: createTask
- summary: Creates a 'Task' object.
- description: |
- Validates the content property for length etc.
- parameters:
- - name: body
- in: body
- schema:
- $ref: "#/definitions/Task"
- tags:
- - tasks
- responses:
- default:
- description: Generic Error
- 201:
- description: Task Created
-
- /tasks/{id}:
- parameters:
- - name: id
- in: path
- type: integer
- format: int32
- description: The id of the task
- required: true
- minimum: 1
- put:
- operationId: updateTask
- summary: updates a task.
- description: |
- Validates the content property for length etc.
- tags:
- - tasks
- parameters:
- - name: body
- in: body
- description: the updated task
- schema:
- $ref: "#/definitions/Task"
- responses:
- default:
- description: Generic Error
- 200:
- description: Task updated
- schema:
- $ref: "#/definitions/Task"
- delete:
- operationId: deleteTask
- summary: deletes a task
- description: |
- Deleting a task is irrevocable.
- tags:
- - tasks
- responses:
- default:
- description: Generic Error
- 204:
- description: Task Deleted
-
-
-definitions:
- Task:
- title: A Task object
- description: |
- This describes a task. Tasks require a content property to be set.
- required:
- - content
- type: object
- properties:
- id:
- title: the unique id of the task
- description: |
- This id property is autogenerated when a task is created.
- type: integer
- format: int64
- readOnly: true
- content:
- title: The content of the task
- description: |
- Task content can contain [GFM](https://help.github.com/articles/github-flavored-markdown/).
- type: string
- minLength: 5
- completed:
- title: when true this task is completed
- type: boolean
- creditcard:
- title: the credit card format usage
- type: string
- format: creditcard
- createdAt:
- title: task creation time
- type: string
- format: date-time
- readOnly: true
-`
-
-// PetStoreJSONMessage json raw message for Petstore20
-var PetStoreJSONMessage = json.RawMessage([]byte(PetStore20))
-
-// PetStore20 json doc for swagger 2.0 pet store
-const PetStore20 = `{
- "swagger": "2.0",
- "info": {
- "version": "1.0.0",
- "title": "Swagger Petstore",
- "contact": {
- "name": "Wordnik API Team",
- "url": "http://developer.wordnik.com"
- },
- "license": {
- "name": "Creative Commons 4.0 International",
- "url": "http://creativecommons.org/licenses/by/4.0/"
- }
- },
- "host": "petstore.swagger.wordnik.com",
- "basePath": "/api",
- "schemes": [
- "http"
- ],
- "paths": {
- "/pets": {
- "get": {
- "security": [
- {
- "basic": []
- }
- ],
- "tags": [ "Pet Operations" ],
- "operationId": "getAllPets",
- "parameters": [
- {
- "name": "status",
- "in": "query",
- "description": "The status to filter by",
- "type": "string"
- },
- {
- "name": "limit",
- "in": "query",
- "description": "The maximum number of results to return",
- "type": "integer",
- "format": "int64"
- }
- ],
- "summary": "Finds all pets in the system",
- "responses": {
- "200": {
- "description": "Pet response",
- "schema": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/Pet"
- }
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "post": {
- "security": [
- {
- "basic": []
- }
- ],
- "tags": [ "Pet Operations" ],
- "operationId": "createPet",
- "summary": "Creates a new pet",
- "consumes": ["application/x-yaml"],
- "produces": ["application/x-yaml"],
- "parameters": [
- {
- "name": "pet",
- "in": "body",
- "description": "The Pet to create",
- "required": true,
- "schema": {
- "$ref": "#/definitions/newPet"
- }
- }
- ],
- "responses": {
- "200": {
- "description": "Created Pet response",
- "schema": {
- "$ref": "#/definitions/Pet"
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- }
- },
- "/pets/{id}": {
- "delete": {
- "security": [
- {
- "apiKey": []
- }
- ],
- "description": "Deletes the Pet by id",
- "operationId": "deletePet",
- "parameters": [
- {
- "name": "id",
- "in": "path",
- "description": "ID of pet to delete",
- "required": true,
- "type": "integer",
- "format": "int64"
- }
- ],
- "responses": {
- "204": {
- "description": "pet deleted"
- },
- "default": {
- "description": "unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "get": {
- "tags": [ "Pet Operations" ],
- "operationId": "getPetById",
- "summary": "Finds the pet by id",
- "responses": {
- "200": {
- "description": "Pet response",
- "schema": {
- "$ref": "#/definitions/Pet"
- }
- },
- "default": {
- "description": "Unexpected error",
- "schema": {
- "$ref": "#/definitions/Error"
- }
- }
- }
- },
- "parameters": [
- {
- "name": "id",
- "in": "path",
- "description": "ID of pet",
- "required": true,
- "type": "integer",
- "format": "int64"
- }
- ]
- }
- },
- "definitions": {
- "Category": {
- "id": "Category",
- "properties": {
- "id": {
- "format": "int64",
- "type": "integer"
- },
- "name": {
- "type": "string"
- }
- }
- },
- "Pet": {
- "id": "Pet",
- "properties": {
- "category": {
- "$ref": "#/definitions/Category"
- },
- "id": {
- "description": "unique identifier for the pet",
- "format": "int64",
- "maximum": 100.0,
- "minimum": 0.0,
- "type": "integer"
- },
- "name": {
- "type": "string"
- },
- "photoUrls": {
- "items": {
- "type": "string"
- },
- "type": "array"
- },
- "status": {
- "description": "pet status in the store",
- "enum": [
- "available",
- "pending",
- "sold"
- ],
- "type": "string"
- },
- "tags": {
- "items": {
- "$ref": "#/definitions/Tag"
- },
- "type": "array"
- }
- },
- "required": [
- "id",
- "name"
- ]
- },
- "newPet": {
- "anyOf": [
- {
- "$ref": "#/definitions/Pet"
- },
- {
- "required": [
- "name"
- ]
- }
- ]
- },
- "Tag": {
- "id": "Tag",
- "properties": {
- "id": {
- "format": "int64",
- "type": "integer"
- },
- "name": {
- "type": "string"
- }
- }
- },
- "Error": {
- "required": [
- "code",
- "message"
- ],
- "properties": {
- "code": {
- "type": "integer",
- "format": "int32"
- },
- "message": {
- "type": "string"
- }
- }
- }
- },
- "consumes": [
- "application/json",
- "application/xml"
- ],
- "produces": [
- "application/json",
- "application/xml",
- "text/plain",
- "text/html"
- ],
- "securityDefinitions": {
- "basic": {
- "type": "basic"
- },
- "apiKey": {
- "type": "apiKey",
- "in": "header",
- "name": "X-API-KEY"
- }
- }
-}
-`
-
-const expectedExpanded = `
-{
- "produces":[
- "application/json",
- "plain/text"
- ],
- "schemes":[
- "https",
- "http"
- ],
- "swagger":"2.0",
- "info":{
- "description":"Something",
- "title":"Something",
- "contact":{
- "name":"Somebody",
- "url":"https://url.com",
- "email":"email@url.com"
- },
- "version":"v1"
- },
- "host":"security.sonusnet.com",
- "basePath":"/api",
- "paths":{
- "/whatnot":{
- "get":{
- "description":"Get something",
- "responses":{
- "200":{
- "description":"The something",
- "schema":{
- "description":"A collection of service events",
- "type":"object",
- "properties":{
- "page":{
- "description":"A description of a paged result",
- "type":"object",
- "properties":{
- "page":{
- "description":"the page that was requested",
- "type":"integer"
- },
- "page_items":{
- "description":"the number of items per page requested",
- "type":"integer"
- },
- "pages":{
- "description":"the total number of pages available",
- "type":"integer"
- },
- "total_items":{
- "description":"the total number of items available",
- "type":"integer",
- "format":"int64"
- }
- }
- },
- "something":{
- "description":"Something",
- "type":"object",
- "properties":{
- "p1":{
- "description":"A string",
- "type":"string"
- },
- "p2":{
- "description":"An integer",
- "type":"integer"
- }
- }
- }
- }
- }
- },
- "500":{
- "description":"Oops"
- }
- }
- }
- }
- },
- "definitions":{
- "Something":{
- "description":"A collection of service events",
- "type":"object",
- "properties":{
- "page":{
- "description":"A description of a paged result",
- "type":"object",
- "properties":{
- "page":{
- "description":"the page that was requested",
- "type":"integer"
- },
- "page_items":{
- "description":"the number of items per page requested",
- "type":"integer"
- },
- "pages":{
- "description":"the total number of pages available",
- "type":"integer"
- },
- "total_items":{
- "description":"the total number of items available",
- "type":"integer",
- "format":"int64"
- }
- }
- },
- "something":{
- "description":"Something",
- "type":"object",
- "properties":{
- "p1":{
- "description":"A string",
- "type":"string"
- },
- "p2":{
- "description":"An integer",
- "type":"integer"
- }
- }
- }
- }
- }
- }
-}
-`
-
-const cascadeRefExpanded = `
-{
- "swagger": "2.0",
- "consumes":[
- "application/json"
- ],
- "produces":[
- "application/json"
- ],
- "schemes":[
- "http"
- ],
- "info":{
- "description":"recursively following JSON references",
- "title":"test 1",
- "contact":{
- "name":"Fred"
- },
- "version":"0.1.1"
- },
- "paths":{
- "/getAll":{
- "get":{
- "operationId":"getAll",
- "parameters":[
- {
- "description":"max number of results",
- "name":"a",
- "in":"body",
- "schema":{
- "type":"string"
- }
- }
- ],
- "responses":{
- "200":{
- "description":"Success",
- "schema":{
- "type":"array",
- "items":{
- "type":"string"
- }
- }
- }
- }
- }
- }
- },
- "definitions":{
- "a":{
- "type":"string"
- },
- "b":{
- "type":"array",
- "items":{
- "type":"string"
- }
- }
- }
-}
-`
diff --git a/vendor/github.com/go-openapi/spec/expander.go b/vendor/github.com/go-openapi/spec/expander.go
index 140daf6e8..59a227059 100644
--- a/vendor/github.com/go-openapi/spec/expander.go
+++ b/vendor/github.com/go-openapi/spec/expander.go
@@ -91,7 +91,7 @@ func ResolveRefWithBase(root interface{}, ref *Ref, opts *ExpandOptions) (*Schem
return nil, err
}
specBasePath := ""
- if opts != nil {
+ if opts != nil && opts.RelativeBase != "" {
specBasePath, _ = absPath(opts.RelativeBase)
}
@@ -466,7 +466,7 @@ func ExpandSpec(spec *Swagger, options *ExpandOptions) error {
// getting the base path of the spec to adjust all subsequent reference resolutions
specBasePath := ""
- if options != nil {
+ if options != nil && options.RelativeBase != "" {
specBasePath, _ = absPath(options.RelativeBase)
}
@@ -535,6 +535,7 @@ func ExpandSchema(schema *Schema, root interface{}, cache ResolutionCache) error
if err != nil {
return err
}
+ defer os.Remove(file.Name())
switch r := root.(type) {
case *Schema:
@@ -561,8 +562,12 @@ func ExpandSchemaWithBasePath(schema *Schema, cache ResolutionCache, opts *Expan
}
if opts == nil {
- return errors.New("cannot expand schema without a basPath")
+ return errors.New("cannot expand schema without a base path")
}
+ if opts.RelativeBase == "" {
+ return errors.New("cannot expand schema with empty base path")
+ }
+
basePath, _ := absPath(opts.RelativeBase)
resolver, err := defaultSchemaLoader(nil, opts, cache)
@@ -647,14 +652,16 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
/* Ref also changes the resolution scope of children expandSchema */
if target.Ref.String() != "" {
/* Here the resolution scope is changed because a $ref was encountered */
- newRef := normalizeFileRef(&target.Ref, basePath)
- newBasePath := newRef.RemoteURI()
+ normalizedRef := normalizeFileRef(&target.Ref, basePath)
+ normalizedBasePath := normalizedRef.RemoteURI()
+
/* this means there is a circle in the recursion tree */
/* return the Ref */
- if swag.ContainsStringsCI(parentRefs, newRef.String()) {
- target.Ref = *newRef
+ if basePath != "" && swag.ContainsStringsCI(parentRefs, normalizedRef.String()) {
+ target.Ref = *normalizedRef
return &target, nil
}
+
debugLog("\nbasePath: %s", basePath)
b, _ := json.Marshal(target)
debugLog("calling Resolve with target: %s", string(b))
@@ -663,8 +670,8 @@ func expandSchema(target Schema, parentRefs []string, resolver *schemaLoader, ba
}
if t != nil {
- parentRefs = append(parentRefs, newRef.String())
- return expandSchema(*t, parentRefs, resolver, newBasePath)
+ parentRefs = append(parentRefs, normalizedRef.String())
+ return expandSchema(*t, parentRefs, resolver, normalizedBasePath)
}
}
diff --git a/vendor/github.com/go-openapi/strfmt/.editorconfig b/vendor/github.com/go-openapi/strfmt/.editorconfig
deleted file mode 100644
index 3152da69a..000000000
--- a/vendor/github.com/go-openapi/strfmt/.editorconfig
+++ /dev/null
@@ -1,26 +0,0 @@
-# top-most EditorConfig file
-root = true
-
-# Unix-style newlines with a newline ending every file
-[*]
-end_of_line = lf
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-trim_trailing_whitespace = true
-
-# Set default charset
-[*.{js,py,go,scala,rb,java,html,css,less,sass,md}]
-charset = utf-8
-
-# Tab indentation (no size specified)
-[*.go]
-indent_style = tab
-
-[*.md]
-trim_trailing_whitespace = false
-
-# Matches the exact files either package.json or .travis.yml
-[{package.json,.travis.yml}]
-indent_style = space
-indent_size = 2
diff --git a/vendor/github.com/go-openapi/strfmt/.gitignore b/vendor/github.com/go-openapi/strfmt/.gitignore
deleted file mode 100644
index dd91ed6a0..000000000
--- a/vendor/github.com/go-openapi/strfmt/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-secrets.yml
-coverage.out
diff --git a/vendor/github.com/go-openapi/strfmt/.travis.yml b/vendor/github.com/go-openapi/strfmt/.travis.yml
deleted file mode 100644
index 88a3a6eae..000000000
--- a/vendor/github.com/go-openapi/strfmt/.travis.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-language: go
-go:
-- 1.7.1
-install:
-- go get -u github.com/stretchr/testify/assert
-- go get -u github.com/pborman/uuid
-- go get -u github.com/asaskevich/govalidator
-- go get -u github.com/mailru/easyjson
-- go get -u github.com/go-openapi/errors
-- go get -u github.com/mitchellh/mapstructure
-- go get -u gopkg.in/mgo.v2/bson
-script:
-- ./hack/coverage
-after_success:
-- bash <(curl -s https://codecov.io/bash)
-notifications:
- slack:
- secure: zE5AtIYTpYfQPnTzP+EaQPN7JKtfFAGv6PrJqoIZLOXa8B6zGb6+J1JRNNxWi7faWbyJOxa4FSSsuPsKZMycUK6wlLFIdhDxwqeo7Ew8r6rdZKdfUHQggfNS9wO79ARoNYUDHtmnaBUS+eWSM1YqSc4i99QxyyfuURLOeAaA/q14YbdlTlaw3lrZ0qT92ot1FnVGNOx064zuHtFeUf+jAVRMZ6Q3rvqllwIlPszE6rmHGXBt2VoJxRaBetdwd7FgkcYw9FPXKHhadwC7/75ZAdmxIukhxNMw4Tr5NuPcqNcnbYLenDP7B3lssGVIrP4BRSqekS1d/tqvdvnnFWHMwrNCkSnSc065G5+qWTlXKAemIclgiXXqE2furBNLm05MDdG8fn5epS0UNarkjD+zX336RiqwBlOX4KbF+vPyqcO98CsN0lnd+H6loc9reiTHs37orFFpQ+309av9be2GGsHUsRB9ssIyrewmhAccOmkRtr2dVTZJNFQwa5Kph5TNJuTjnZEwG/xUkEX2YSfwShOsb062JWiflV6PJdnl80pc9Tn7D5sO5Bf9DbijGRJwwP+YiiJtwtr+vsvS+n4sM0b5eqm4UoRo+JJO8ffoJtHS7ItuyRbVQCwEPJ4221WLcf5PquEEDdAPwR+K4Gj8qTXqTDdxOiES1xFUKVgmzhI=
diff --git a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md b/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
deleted file mode 100644
index 9322b065e..000000000
--- a/vendor/github.com/go-openapi/strfmt/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,74 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of age, body
-size, disability, ethnicity, gender identity and expression, level of experience,
-nationality, personal appearance, race, religion, or sexual identity and
-orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic
- address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
- professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project e-mail
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the project team at ivan+abuse@flanders.co.nz. All
-complaints will be reviewed and investigated and will result in a response that
-is deemed necessary and appropriate to the circumstances. The project team is
-obligated to maintain confidentiality with regard to the reporter of an incident.
-Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
-available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/vendor/github.com/go-openapi/strfmt/LICENSE b/vendor/github.com/go-openapi/strfmt/LICENSE
deleted file mode 100644
index d64569567..000000000
--- a/vendor/github.com/go-openapi/strfmt/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/vendor/github.com/go-openapi/strfmt/README.md b/vendor/github.com/go-openapi/strfmt/README.md
deleted file mode 100644
index 9d1300e8e..000000000
--- a/vendor/github.com/go-openapi/strfmt/README.md
+++ /dev/null
@@ -1,5 +0,0 @@
-# Strfmt [![Build Status](https://travis-ci.org/go-openapi/strfmt.svg?branch=master)](https://travis-ci.org/go-openapi/strfmt) [![codecov](https://codecov.io/gh/go-openapi/strfmt/branch/master/graph/badge.svg)](https://codecov.io/gh/go-openapi/strfmt) [![Slack Status](https://slackin.goswagger.io/badge.svg)](https://slackin.goswagger.io)
-
-[![license](http://img.shields.io/badge/license-Apache%20v2-orange.svg)](https://raw.githubusercontent.com/go-openapi/strfmt/master/LICENSE) [![GoDoc](https://godoc.org/github.com/go-openapi/strfmt?status.svg)](http://godoc.org/github.com/go-openapi/strfmt)
-
-strfmt represents a well known string format such as credit card or email. The go toolkit for open api specifications knows how to deal with those.
diff --git a/vendor/github.com/go-openapi/strfmt/bson.go b/vendor/github.com/go-openapi/strfmt/bson.go
deleted file mode 100644
index 39c6d8f7e..000000000
--- a/vendor/github.com/go-openapi/strfmt/bson.go
+++ /dev/null
@@ -1,124 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "errors"
- "fmt"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-
- "gopkg.in/mgo.v2/bson"
-)
-
-func init() {
- var id ObjectId
- Default.Add("bsonobjectid", &id, IsBSONObjectID)
-}
-
-// IsBSONObjectID returns true when the string is a valid BSON.ObjectId
-func IsBSONObjectID(str string) bool {
- var id bson.ObjectId
- return id.UnmarshalText([]byte(str)) == nil
-}
-
-type ObjectId bson.ObjectId
-
-// NewObjectId creates a ObjectId from a Hex String
-func NewObjectId(hex string) ObjectId {
- return ObjectId(bson.ObjectIdHex(hex))
-}
-
-// MarshalText turns this instance into text
-func (id *ObjectId) MarshalText() ([]byte, error) {
- return []byte(bson.ObjectId(*id).Hex()), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (id *ObjectId) UnmarshalText(data []byte) error { // validation is performed later on
- var rawID bson.ObjectId
- if err := rawID.UnmarshalText(data); err != nil {
- return err
- }
-
- *id = ObjectId(rawID)
- return nil
-}
-
-// Scan read a value from a database driver
-func (id *ObjectId) Scan(raw interface{}) error {
- var data []byte
- switch v := raw.(type) {
- case []byte:
- data = v
- case string:
- data = []byte(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v", v)
- }
-
- return id.UnmarshalText(data)
-}
-
-// Value converts a value to a database driver value
-func (id *ObjectId) Value() (driver.Value, error) {
- return driver.Value(string(*id)), nil
-}
-
-func (id *ObjectId) String() string {
- return string(*id)
-}
-
-func (id *ObjectId) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- id.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (id *ObjectId) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(bson.ObjectId(*id).Hex())
-}
-
-func (id *ObjectId) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- id.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (id *ObjectId) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *id = NewObjectId(data)
- }
-}
-
-func (id *ObjectId) GetBSON() (interface{}, error) {
- return bson.M{"data": bson.ObjectId(*id).Hex()}, nil
-}
-
-func (id *ObjectId) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *id = NewObjectId(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as ObjectId")
-}
diff --git a/vendor/github.com/go-openapi/strfmt/bson_test.go b/vendor/github.com/go-openapi/strfmt/bson_test.go
deleted file mode 100644
index 32ba962b1..000000000
--- a/vendor/github.com/go-openapi/strfmt/bson_test.go
+++ /dev/null
@@ -1,53 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "testing"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestBSONObjectId_fullCycle(t *testing.T) {
- id := NewObjectId("507f1f77bcf86cd799439011")
- bytes, err := id.MarshalText()
- assert.NoError(t, err)
-
- var idCopy ObjectId
-
- err = idCopy.Scan(bytes)
- assert.NoError(t, err)
- assert.Equal(t, id, idCopy)
-
- err = idCopy.UnmarshalText(bytes)
- assert.NoError(t, err)
- assert.Equal(t, id, idCopy)
-
- jsonBytes, err := id.MarshalJSON()
- assert.NoError(t, err)
-
- err = idCopy.UnmarshalJSON(jsonBytes)
- assert.NoError(t, err)
- assert.Equal(t, id, idCopy)
-
- bsonBytes, err := bson.Marshal(&id)
- assert.NoError(t, err)
-
- err = bson.Unmarshal(bsonBytes, &idCopy)
- assert.NoError(t, err)
- assert.Equal(t, id, idCopy)
-}
diff --git a/vendor/github.com/go-openapi/strfmt/date.go b/vendor/github.com/go-openapi/strfmt/date.go
deleted file mode 100644
index 24a11db26..000000000
--- a/vendor/github.com/go-openapi/strfmt/date.go
+++ /dev/null
@@ -1,152 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "errors"
- "fmt"
- "regexp"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-)
-
-func init() {
- d := Date{}
- Default.Add("date", &d, IsDate)
-}
-
-// IsDate returns true when the string is a valid date
-func IsDate(str string) bool {
- matches := rxDate.FindAllStringSubmatch(str, -1)
- if len(matches) == 0 || len(matches[0]) == 0 {
- return false
- }
- m := matches[0]
- return !(m[2] < "01" || m[2] > "12" || m[3] < "01" || m[3] > "31")
-}
-
-const (
- // RFC3339FullDate represents a full-date as specified by RFC3339
- // See: http://goo.gl/xXOvVd
- RFC3339FullDate = "2006-01-02"
- // DatePattern pattern to match for the date format from http://tools.ietf.org/html/rfc3339#section-5.6
- DatePattern = `^([0-9]{4})-([0-9]{2})-([0-9]{2})`
-)
-
-var (
- rxDate = regexp.MustCompile(DatePattern)
-)
-
-// Date represents a date from the API
-//
-// swagger:strfmt date
-type Date time.Time
-
-// String converts this date into a string
-func (d Date) String() string {
- return time.Time(d).Format(RFC3339FullDate)
-}
-
-// UnmarshalText parses a text representation into a date type
-func (d *Date) UnmarshalText(text []byte) error {
- if len(text) == 0 {
- return nil
- }
- dd, err := time.Parse(RFC3339FullDate, string(text))
- if err != nil {
- return err
- }
- *d = Date(dd)
- return nil
-}
-
-// MarshalText serializes this date type to string
-func (d Date) MarshalText() ([]byte, error) {
- return []byte(d.String()), nil
-}
-
-// Scan scans a Date value from database driver type.
-func (d *Date) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- return d.UnmarshalText(v)
- case string:
- return d.UnmarshalText([]byte(v))
- case time.Time:
- *d = Date(v)
- return nil
- case nil:
- *d = Date{}
- return nil
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Date from: %#v", v)
- }
-}
-
-// Value converts Date to a primitive value ready to written to a database.
-func (d Date) Value() (driver.Value, error) {
- return driver.Value(d.String()), nil
-}
-
-func (t Date) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- t.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (t Date) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(time.Time(t).Format(RFC3339FullDate))
-}
-
-func (t *Date) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- t.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (t *Date) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- tt, err := time.Parse(RFC3339FullDate, data)
- if err != nil {
- in.AddError(err)
- return
- }
- *t = Date(tt)
- }
-}
-
-func (t *Date) GetBSON() (interface{}, error) {
- return bson.M{"data": t.String()}, nil
-}
-
-func (t *Date) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- rd, err := time.Parse(RFC3339FullDate, data)
- *t = Date(rd)
- return err
- }
-
- return errors.New("couldn't unmarshal bson raw value as Duration")
-}
diff --git a/vendor/github.com/go-openapi/strfmt/date_test.go b/vendor/github.com/go-openapi/strfmt/date_test.go
deleted file mode 100644
index 87e561117..000000000
--- a/vendor/github.com/go-openapi/strfmt/date_test.go
+++ /dev/null
@@ -1,83 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql"
- "database/sql/driver"
- "testing"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/stretchr/testify/assert"
-)
-
-var _ sql.Scanner = &Date{}
-var _ driver.Valuer = Date{}
-
-func TestDate(t *testing.T) {
- pp := Date{}
- err := pp.UnmarshalText([]byte{})
- assert.NoError(t, err)
- err = pp.UnmarshalText([]byte("yada"))
- assert.Error(t, err)
- orig := "2014-12-15"
- b := []byte(orig)
- bj := []byte("\"" + orig + "\"")
- err = pp.UnmarshalText([]byte(orig))
- assert.NoError(t, err)
- txt, err := pp.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, orig, string(txt))
-
- err = pp.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, orig, pp.String())
-
- b, err = pp.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- dateOriginal := Date(time.Date(2014, 10, 10, 0, 0, 0, 0, time.UTC))
-
- bsonData, err := bson.Marshal(&dateOriginal)
- assert.NoError(t, err)
-
- var dateCopy Date
- err = bson.Unmarshal(bsonData, &dateCopy)
- assert.NoError(t, err)
- assert.Equal(t, dateOriginal, dateCopy)
-}
-
-func TestDate_Scan(t *testing.T) {
- ref := time.Now().Truncate(24 * time.Hour).UTC()
- date, str := Date(ref), ref.Format(RFC3339FullDate)
-
- values := []interface{}{str, []byte(str), ref}
- for _, value := range values {
- result := Date{}
- (&result).Scan(value)
- assert.Equal(t, date, result, "value: %#v", value)
- }
-}
-
-func TestDate_Value(t *testing.T) {
- ref := time.Now().Truncate(24 * time.Hour).UTC()
- date := Date(ref)
- dbv, err := date.Value()
- assert.NoError(t, err)
- assert.EqualValues(t, dbv, ref.Format("2006-01-02"))
-}
diff --git a/vendor/github.com/go-openapi/strfmt/default.go b/vendor/github.com/go-openapi/strfmt/default.go
deleted file mode 100644
index 437142a30..000000000
--- a/vendor/github.com/go-openapi/strfmt/default.go
+++ /dev/null
@@ -1,1697 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "encoding/base64"
- "errors"
- "fmt"
- "regexp"
- "strings"
-
- "github.com/asaskevich/govalidator"
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-
- "gopkg.in/mgo.v2/bson"
-)
-
-const (
- // HostnamePattern http://json-schema.org/latest/json-schema-validation.html#anchor114
- // A string instance is valid against this attribute if it is a valid
- // representation for an Internet host name, as defined by RFC 1034, section 3.1 [RFC1034].
- // http://tools.ietf.org/html/rfc1034#section-3.5
- // <digit> ::= any one of the ten digits 0 through 9
- // var digit = /[0-9]/;
- // <letter> ::= any one of the 52 alphabetic characters A through Z in upper case and a through z in lower case
- // var letter = /[a-zA-Z]/;
- // <let-dig> ::= <letter> | <digit>
- // var letDig = /[0-9a-zA-Z]/;
- // <let-dig-hyp> ::= <let-dig> | "-"
- // var letDigHyp = /[-0-9a-zA-Z]/;
- // <ldh-str> ::= <let-dig-hyp> | <let-dig-hyp> <ldh-str>
- // var ldhStr = /[-0-9a-zA-Z]+/;
- // <label> ::= <letter> [ [ <ldh-str> ] <let-dig> ]
- // var label = /[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?/;
- // <subdomain> ::= <label> | <subdomain> "." <label>
- // var subdomain = /^[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?(\.[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?)*$/;
- // <domain> ::= <subdomain> | " "
- HostnamePattern = `^[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?(\.[a-zA-Z](([-0-9a-zA-Z]+)?[0-9a-zA-Z])?)*$`
- // UUIDPattern Regex for UUID that allows uppercase
- UUIDPattern = `(?i)^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$`
- // UUID3Pattern Regex for UUID3 that allows uppercase
- UUID3Pattern = `(?i)^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$`
- // UUID4Pattern Regex for UUID4 that allows uppercase
- UUID4Pattern = `(?i)^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$`
- // UUID5Pattern Regex for UUID5 that allows uppercase
- UUID5Pattern = `(?i)^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$`
-)
-
-var (
- rxHostname = regexp.MustCompile(HostnamePattern)
- rxUUID = regexp.MustCompile(UUIDPattern)
- rxUUID3 = regexp.MustCompile(UUID3Pattern)
- rxUUID4 = regexp.MustCompile(UUID4Pattern)
- rxUUID5 = regexp.MustCompile(UUID5Pattern)
-)
-
-// IsHostname returns true when the string is a valid hostname
-func IsHostname(str string) bool {
- if !rxHostname.MatchString(str) {
- return false
- }
-
- // the sum of all label octets and label lengths is limited to 255.
- if len(str) > 255 {
- return false
- }
-
- // Each node has a label, which is zero to 63 octets in length
- parts := strings.Split(str, ".")
- valid := true
- for _, p := range parts {
- if len(p) > 63 {
- valid = false
- }
- }
- return valid
-}
-
-// IsUUID returns true is the string matches a UUID, upper case is allowed
-func IsUUID(str string) bool {
- return rxUUID.MatchString(str)
-}
-
-// IsUUID3 returns true is the string matches a UUID, upper case is allowed
-func IsUUID3(str string) bool {
- return rxUUID3.MatchString(str)
-}
-
-// IsUUID4 returns true is the string matches a UUID, upper case is allowed
-func IsUUID4(str string) bool {
- return rxUUID4.MatchString(str)
-}
-
-// IsUUID5 returns true is the string matches a UUID, upper case is allowed
-func IsUUID5(str string) bool {
- return rxUUID5.MatchString(str)
-}
-
-func init() {
- u := URI("")
- Default.Add("uri", &u, govalidator.IsRequestURI)
-
- eml := Email("")
- Default.Add("email", &eml, govalidator.IsEmail)
-
- hn := Hostname("")
- Default.Add("hostname", &hn, IsHostname)
-
- ip4 := IPv4("")
- Default.Add("ipv4", &ip4, govalidator.IsIPv4)
-
- ip6 := IPv6("")
- Default.Add("ipv6", &ip6, govalidator.IsIPv6)
-
- mac := MAC("")
- Default.Add("mac", &mac, govalidator.IsMAC)
-
- uid := UUID("")
- Default.Add("uuid", &uid, IsUUID)
-
- uid3 := UUID3("")
- Default.Add("uuid3", &uid3, IsUUID3)
-
- uid4 := UUID4("")
- Default.Add("uuid4", &uid4, IsUUID4)
-
- uid5 := UUID5("")
- Default.Add("uuid5", &uid5, IsUUID5)
-
- isbn := ISBN("")
- Default.Add("isbn", &isbn, func(str string) bool { return govalidator.IsISBN10(str) || govalidator.IsISBN13(str) })
-
- isbn10 := ISBN10("")
- Default.Add("isbn10", &isbn10, govalidator.IsISBN10)
-
- isbn13 := ISBN13("")
- Default.Add("isbn13", &isbn13, govalidator.IsISBN13)
-
- cc := CreditCard("")
- Default.Add("creditcard", &cc, govalidator.IsCreditCard)
-
- ssn := SSN("")
- Default.Add("ssn", &ssn, govalidator.IsSSN)
-
- hc := HexColor("")
- Default.Add("hexcolor", &hc, govalidator.IsHexcolor)
-
- rc := RGBColor("")
- Default.Add("rgbcolor", &rc, govalidator.IsRGBcolor)
-
- b64 := Base64([]byte(nil))
- Default.Add("byte", &b64, govalidator.IsBase64)
-
- pw := Password("")
- Default.Add("password", &pw, func(_ string) bool { return true })
-}
-
-var formatCheckers = map[string]Validator{
- "byte": govalidator.IsBase64,
-}
-
-// Base64 represents a base64 encoded string
-//
-// swagger:strfmt byte
-type Base64 []byte
-
-// MarshalText turns this instance into text
-func (b Base64) MarshalText() ([]byte, error) {
- enc := base64.URLEncoding
- src := []byte(b)
- buf := make([]byte, enc.EncodedLen(len(src)))
- enc.Encode(buf, src)
- return buf, nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (b *Base64) UnmarshalText(data []byte) error { // validation is performed later on
- enc := base64.URLEncoding
- dbuf := make([]byte, enc.DecodedLen(len(data)))
-
- n, err := enc.Decode(dbuf, data)
- if err != nil {
- return err
- }
-
- *b = dbuf[:n]
- return nil
-}
-
-// Scan read a value from a database driver
-func (b *Base64) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *b = Base64(string(v))
- case string:
- *b = Base64(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Base64 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (b Base64) Value() (driver.Value, error) {
- return driver.Value(string(b)), nil
-}
-
-func (b Base64) String() string {
- return string(b)
-}
-
-func (b Base64) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- b.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (b Base64) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(base64.StdEncoding.EncodeToString([]byte(b)))
-}
-
-func (b *Base64) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- b.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (b *Base64) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- enc := base64.StdEncoding
- dbuf := make([]byte, enc.DecodedLen(len(data)))
-
- n, err := enc.Decode(dbuf, []byte(data))
- if err != nil {
- in.AddError(err)
- return
- }
-
- *b = dbuf[:n]
- }
-}
-
-func (b *Base64) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*b)}, nil
-}
-
-func (b *Base64) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *b = Base64(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as Base64")
-}
-
-// URI represents the uri string format as specified by the json schema spec
-//
-// swagger:strfmt uri
-type URI string
-
-// MarshalText turns this instance into text
-func (u URI) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *URI) UnmarshalText(data []byte) error { // validation is performed later on
- *u = URI(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *URI) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = URI(string(v))
- case string:
- *u = URI(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.URI from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u URI) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u URI) String() string {
- return string(u)
-}
-
-func (u URI) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u URI) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *URI) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *URI) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = URI(data)
- }
-}
-
-func (u *URI) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *URI) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = URI(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as URI")
-}
-
-// Email represents the email string format as specified by the json schema spec
-//
-// swagger:strfmt email
-type Email string
-
-// MarshalText turns this instance into text
-func (e Email) MarshalText() ([]byte, error) {
- return []byte(string(e)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (e *Email) UnmarshalText(data []byte) error { // validation is performed later on
- *e = Email(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (e *Email) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *e = Email(string(v))
- case string:
- *e = Email(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Email from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (e Email) Value() (driver.Value, error) {
- return driver.Value(string(e)), nil
-}
-
-func (e Email) String() string {
- return string(e)
-}
-
-func (e Email) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- e.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (e Email) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(e))
-}
-
-func (e *Email) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- e.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (e *Email) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *e = Email(data)
- }
-}
-
-func (e *Email) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*e)}, nil
-}
-
-func (e *Email) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *e = Email(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as Email")
-}
-
-// Hostname represents the hostname string format as specified by the json schema spec
-//
-// swagger:strfmt hostname
-type Hostname string
-
-// MarshalText turns this instance into text
-func (h Hostname) MarshalText() ([]byte, error) {
- return []byte(string(h)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (h *Hostname) UnmarshalText(data []byte) error { // validation is performed later on
- *h = Hostname(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (h *Hostname) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *h = Hostname(string(v))
- case string:
- *h = Hostname(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Hostname from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (h Hostname) Value() (driver.Value, error) {
- return driver.Value(string(h)), nil
-}
-
-func (h Hostname) String() string {
- return string(h)
-}
-
-func (h Hostname) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- h.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (h Hostname) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(h))
-}
-
-func (h *Hostname) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- h.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (h *Hostname) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *h = Hostname(data)
- }
-}
-
-func (h *Hostname) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*h)}, nil
-}
-
-func (h *Hostname) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *h = Hostname(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as Hostname")
-}
-
-// IPv4 represents an IP v4 address
-//
-// swagger:strfmt ipv4
-type IPv4 string
-
-// MarshalText turns this instance into text
-func (u IPv4) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *IPv4) UnmarshalText(data []byte) error { // validation is performed later on
- *u = IPv4(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *IPv4) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = IPv4(string(v))
- case string:
- *u = IPv4(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u IPv4) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u IPv4) String() string {
- return string(u)
-}
-
-func (u IPv4) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u IPv4) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *IPv4) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *IPv4) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = IPv4(data)
- }
-}
-
-func (u *IPv4) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *IPv4) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = IPv4(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as IPv4")
-}
-
-// IPv6 represents an IP v6 address
-//
-// swagger:strfmt ipv6
-type IPv6 string
-
-// MarshalText turns this instance into text
-func (u IPv6) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *IPv6) UnmarshalText(data []byte) error { // validation is performed later on
- *u = IPv6(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *IPv6) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = IPv6(string(v))
- case string:
- *u = IPv6(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv6 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u IPv6) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u IPv6) String() string {
- return string(u)
-}
-
-func (u IPv6) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u IPv6) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *IPv6) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *IPv6) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = IPv6(data)
- }
-}
-
-func (u *IPv6) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *IPv6) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = IPv6(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as IPv6")
-}
-
-// MAC represents a 48 bit MAC address
-//
-// swagger:strfmt mac
-type MAC string
-
-// MarshalText turns this instance into text
-func (u MAC) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *MAC) UnmarshalText(data []byte) error { // validation is performed later on
- *u = MAC(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *MAC) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = MAC(string(v))
- case string:
- *u = MAC(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.IPv4 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u MAC) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u MAC) String() string {
- return string(u)
-}
-
-func (u MAC) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u MAC) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *MAC) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *MAC) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = MAC(data)
- }
-}
-
-func (u *MAC) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *MAC) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = MAC(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as MAC")
-}
-
-// UUID represents a uuid string format
-//
-// swagger:strfmt uuid
-type UUID string
-
-// MarshalText turns this instance into text
-func (u UUID) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID(string(v))
- case string:
- *u = UUID(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID) String() string {
- return string(u)
-}
-
-func (u UUID) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u UUID) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *UUID) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *UUID) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = UUID(data)
- }
-}
-
-func (u *UUID) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *UUID) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = UUID(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as UUID")
-}
-
-// UUID3 represents a uuid3 string format
-//
-// swagger:strfmt uuid3
-type UUID3 string
-
-// MarshalText turns this instance into text
-func (u UUID3) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID3) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID3(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID3) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID3(string(v))
- case string:
- *u = UUID3(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID3 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID3) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID3) String() string {
- return string(u)
-}
-
-func (u UUID3) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u UUID3) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *UUID3) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *UUID3) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = UUID3(data)
- }
-}
-
-func (u *UUID3) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *UUID3) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = UUID3(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as UUID3")
-}
-
-// UUID4 represents a uuid4 string format
-//
-// swagger:strfmt uuid4
-type UUID4 string
-
-// MarshalText turns this instance into text
-func (u UUID4) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID4) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID4(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID4) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID4(string(v))
- case string:
- *u = UUID4(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID4 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID4) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID4) String() string {
- return string(u)
-}
-
-func (u UUID4) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u UUID4) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *UUID4) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *UUID4) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = UUID4(data)
- }
-}
-
-func (u *UUID4) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *UUID4) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = UUID4(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as UUID4")
-}
-
-// UUID5 represents a uuid5 string format
-//
-// swagger:strfmt uuid5
-type UUID5 string
-
-// MarshalText turns this instance into text
-func (u UUID5) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *UUID5) UnmarshalText(data []byte) error { // validation is performed later on
- *u = UUID5(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *UUID5) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = UUID5(string(v))
- case string:
- *u = UUID5(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.UUID5 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u UUID5) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u UUID5) String() string {
- return string(u)
-}
-
-func (u UUID5) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u UUID5) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *UUID5) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *UUID5) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = UUID5(data)
- }
-}
-
-func (u *UUID5) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *UUID5) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = UUID5(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as UUID5")
-}
-
-// ISBN represents an isbn string format
-//
-// swagger:strfmt isbn
-type ISBN string
-
-// MarshalText turns this instance into text
-func (u ISBN) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN(string(v))
- case string:
- *u = ISBN(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN) String() string {
- return string(u)
-}
-
-func (u ISBN) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u ISBN) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *ISBN) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *ISBN) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = ISBN(data)
- }
-}
-
-func (u *ISBN) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *ISBN) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = ISBN(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as ISBN")
-}
-
-// ISBN10 represents an isbn 10 string format
-//
-// swagger:strfmt isbn10
-type ISBN10 string
-
-// MarshalText turns this instance into text
-func (u ISBN10) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN10) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN10(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN10) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN10(string(v))
- case string:
- *u = ISBN10(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN10 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN10) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN10) String() string {
- return string(u)
-}
-
-func (u ISBN10) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u ISBN10) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *ISBN10) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *ISBN10) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = ISBN10(data)
- }
-}
-
-func (u *ISBN10) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *ISBN10) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = ISBN10(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as ISBN10")
-}
-
-// ISBN13 represents an isbn 13 string format
-//
-// swagger:strfmt isbn13
-type ISBN13 string
-
-// MarshalText turns this instance into text
-func (u ISBN13) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *ISBN13) UnmarshalText(data []byte) error { // validation is performed later on
- *u = ISBN13(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *ISBN13) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = ISBN13(string(v))
- case string:
- *u = ISBN13(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.ISBN13 from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u ISBN13) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u ISBN13) String() string {
- return string(u)
-}
-
-func (u ISBN13) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u ISBN13) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *ISBN13) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *ISBN13) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = ISBN13(data)
- }
-}
-
-func (u *ISBN13) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *ISBN13) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = ISBN13(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as ISBN13")
-}
-
-// CreditCard represents a credit card string format
-//
-// swagger:strfmt creditcard
-type CreditCard string
-
-// MarshalText turns this instance into text
-func (u CreditCard) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *CreditCard) UnmarshalText(data []byte) error { // validation is performed later on
- *u = CreditCard(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *CreditCard) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = CreditCard(string(v))
- case string:
- *u = CreditCard(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.CreditCard from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u CreditCard) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u CreditCard) String() string {
- return string(u)
-}
-
-func (u CreditCard) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u CreditCard) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *CreditCard) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *CreditCard) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = CreditCard(data)
- }
-}
-
-func (u *CreditCard) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *CreditCard) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = CreditCard(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as CreditCard")
-}
-
-// SSN represents a social security string format
-//
-// swagger:strfmt ssn
-type SSN string
-
-// MarshalText turns this instance into text
-func (u SSN) MarshalText() ([]byte, error) {
- return []byte(string(u)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (u *SSN) UnmarshalText(data []byte) error { // validation is performed later on
- *u = SSN(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (u *SSN) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *u = SSN(string(v))
- case string:
- *u = SSN(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.SSN from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (u SSN) Value() (driver.Value, error) {
- return driver.Value(string(u)), nil
-}
-
-func (u SSN) String() string {
- return string(u)
-}
-
-func (u SSN) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- u.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (u SSN) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(u))
-}
-
-func (u *SSN) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- u.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (u *SSN) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *u = SSN(data)
- }
-}
-
-func (u *SSN) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*u)}, nil
-}
-
-func (u *SSN) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *u = SSN(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as SSN")
-}
-
-// HexColor represents a hex color string format
-//
-// swagger:strfmt hexcolor
-type HexColor string
-
-// MarshalText turns this instance into text
-func (h HexColor) MarshalText() ([]byte, error) {
- return []byte(string(h)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (h *HexColor) UnmarshalText(data []byte) error { // validation is performed later on
- *h = HexColor(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (h *HexColor) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *h = HexColor(string(v))
- case string:
- *h = HexColor(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.HexColor from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (h HexColor) Value() (driver.Value, error) {
- return driver.Value(string(h)), nil
-}
-
-func (h HexColor) String() string {
- return string(h)
-}
-
-func (h HexColor) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- h.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (h HexColor) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(h))
-}
-
-func (h *HexColor) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- h.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (h *HexColor) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *h = HexColor(data)
- }
-}
-
-func (h *HexColor) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*h)}, nil
-}
-
-func (h *HexColor) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *h = HexColor(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as HexColor")
-}
-
-// RGBColor represents a RGB color string format
-//
-// swagger:strfmt rgbcolor
-type RGBColor string
-
-// MarshalText turns this instance into text
-func (r RGBColor) MarshalText() ([]byte, error) {
- return []byte(string(r)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (r *RGBColor) UnmarshalText(data []byte) error { // validation is performed later on
- *r = RGBColor(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (r *RGBColor) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *r = RGBColor(string(v))
- case string:
- *r = RGBColor(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.RGBColor from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (r RGBColor) Value() (driver.Value, error) {
- return driver.Value(string(r)), nil
-}
-
-func (r RGBColor) String() string {
- return string(r)
-}
-
-func (r RGBColor) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- r.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (r RGBColor) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(r))
-}
-
-func (r *RGBColor) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- r.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (r *RGBColor) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *r = RGBColor(data)
- }
-}
-
-func (r *RGBColor) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*r)}, nil
-}
-
-func (r *RGBColor) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *r = RGBColor(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as RGBColor")
-}
-
-// Password represents a password.
-// This has no validations and is mainly used as a marker for UI components.
-//
-// swagger:strfmt password
-type Password string
-
-// MarshalText turns this instance into text
-func (r Password) MarshalText() ([]byte, error) {
- return []byte(string(r)), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (r *Password) UnmarshalText(data []byte) error { // validation is performed later on
- *r = Password(string(data))
- return nil
-}
-
-// Scan read a value from a database driver
-func (r *Password) Scan(raw interface{}) error {
- switch v := raw.(type) {
- case []byte:
- *r = Password(string(v))
- case string:
- *r = Password(v)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Password from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts a value to a database driver value
-func (r Password) Value() (driver.Value, error) {
- return driver.Value(string(r)), nil
-}
-
-func (r Password) String() string {
- return string(r)
-}
-
-func (r Password) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- r.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (r Password) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(string(r))
-}
-
-func (r *Password) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- r.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (r *Password) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- *r = Password(data)
- }
-}
-
-func (r *Password) GetBSON() (interface{}, error) {
- return bson.M{"data": string(*r)}, nil
-}
-
-func (r *Password) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- *r = Password(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as Password")
-}
diff --git a/vendor/github.com/go-openapi/strfmt/default_test.go b/vendor/github.com/go-openapi/strfmt/default_test.go
deleted file mode 100644
index 5c445aadb..000000000
--- a/vendor/github.com/go-openapi/strfmt/default_test.go
+++ /dev/null
@@ -1,691 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "testing"
-
- "github.com/pborman/uuid"
- "github.com/stretchr/testify/assert"
- "gopkg.in/mgo.v2/bson"
-)
-
-func testValid(t *testing.T, name, value string) {
- ok := Default.Validates(name, value)
- if !ok {
- t.Errorf("expected %s of type %s to be valid", value, name)
- }
-}
-
-func testInvalid(t *testing.T, name, value string) {
- ok := Default.Validates(name, value)
- if ok {
- t.Errorf("expected %s of type %s to be invalid", value, name)
- }
-}
-
-func TestFormatURI(t *testing.T) {
- uri := URI("http://somewhere.com")
- str := string("http://somewhereelse.com")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := uri.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, URI("http://somewhereelse.com"), string(b))
-
- b, err = uri.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("http://somewhereelse.com"), b)
-
- err = uri.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, URI("http://somewhereelse.com"), string(b))
-
- b, err = uri.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&uri)
- assert.NoError(t, err)
-
- var uriCopy URI
- err = bson.Unmarshal(bsonData, &uriCopy)
- assert.NoError(t, err)
- assert.Equal(t, uri, uriCopy)
-
- testValid(t, "uri", str)
- testInvalid(t, "uri", "somewhere.com")
-}
-
-func TestFormatEmail(t *testing.T) {
- email := Email("somebody@somewhere.com")
- str := string("somebodyelse@somewhere.com")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := email.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, Email("somebodyelse@somewhere.com"), string(b))
-
- b, err = email.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("somebodyelse@somewhere.com"), b)
-
- err = email.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, Email(str), string(b))
-
- b, err = email.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&email)
- assert.NoError(t, err)
-
- var emailCopy Email
- err = bson.Unmarshal(bsonData, &emailCopy)
- assert.NoError(t, err)
- assert.Equal(t, email, emailCopy)
-
- testValid(t, "email", str)
- testInvalid(t, "email", "somebody@somewhere@com")
-}
-
-func TestFormatHostname(t *testing.T) {
- hostname := Hostname("somewhere.com")
- str := string("somewhere.com")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := hostname.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, Hostname("somewhere.com"), string(b))
-
- b, err = hostname.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("somewhere.com"), b)
-
- err = hostname.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, Hostname(str), string(b))
-
- b, err = hostname.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&hostname)
- assert.NoError(t, err)
-
- var hostnameCopy Hostname
- err = bson.Unmarshal(bsonData, &hostnameCopy)
- assert.NoError(t, err)
- assert.Equal(t, hostname, hostnameCopy)
-
- testValid(t, "hostname", str)
- testInvalid(t, "hostname", "somewhere.com!")
-}
-
-func TestFormatIPv4(t *testing.T) {
- ipv4 := IPv4("192.168.254.1")
- str := string("192.168.254.2")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := ipv4.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, IPv4("192.168.254.2"), string(b))
-
- b, err = ipv4.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("192.168.254.2"), b)
-
- err = ipv4.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, IPv4(str), string(b))
-
- b, err = ipv4.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&ipv4)
- assert.NoError(t, err)
-
- var ipv4Copy IPv4
- err = bson.Unmarshal(bsonData, &ipv4Copy)
- assert.NoError(t, err)
- assert.Equal(t, ipv4, ipv4Copy)
-
- testValid(t, "ipv4", str)
- testInvalid(t, "ipv4", "192.168.254.2.2")
-}
-
-func TestFormatIPv6(t *testing.T) {
- ipv6 := IPv6("::1")
- str := string("::2")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := ipv6.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, IPv6("::2"), string(b))
-
- b, err = ipv6.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("::2"), b)
-
- err = ipv6.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, IPv6(str), string(b))
-
- b, err = ipv6.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&ipv6)
- assert.NoError(t, err)
-
- var ipv6Copy IPv6
- err = bson.Unmarshal(bsonData, &ipv6Copy)
- assert.NoError(t, err)
- assert.Equal(t, ipv6, ipv6Copy)
-
- testValid(t, "ipv6", str)
- testInvalid(t, "ipv6", "127.0.0.1")
-}
-
-func TestFormatMAC(t *testing.T) {
- mac := MAC("01:02:03:04:05:06")
- str := string("06:05:04:03:02:01")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := mac.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, MAC("06:05:04:03:02:01"), string(b))
-
- b, err = mac.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("06:05:04:03:02:01"), b)
-
- err = mac.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, MAC(str), string(b))
-
- b, err = mac.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&mac)
- assert.NoError(t, err)
-
- var macCopy MAC
- err = bson.Unmarshal(bsonData, &macCopy)
- assert.NoError(t, err)
- assert.Equal(t, mac, macCopy)
-
- testValid(t, "mac", str)
- testInvalid(t, "mac", "01:02:03:04:05")
-}
-
-func TestFormatUUID3(t *testing.T) {
- first3 := uuid.NewMD5(uuid.NameSpace_URL, []byte("somewhere.com"))
- other3 := uuid.NewMD5(uuid.NameSpace_URL, []byte("somewhereelse.com"))
- uuid3 := UUID3(first3.String())
- str := string(other3.String())
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := uuid3.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID3(other3.String()), string(b))
-
- b, err = uuid3.MarshalText()
- assert.NoError(t, err)
- assert.EqualValues(t, []byte(other3.String()), b)
-
- err = uuid3.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID3(str), string(b))
-
- b, err = uuid3.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&uuid3)
- assert.NoError(t, err)
-
- var uuid3Copy UUID3
- err = bson.Unmarshal(bsonData, &uuid3Copy)
- assert.NoError(t, err)
- assert.Equal(t, uuid3, uuid3Copy)
-
- testValid(t, "uuid3", str)
- testInvalid(t, "uuid3", "not-a-uuid")
-}
-
-func TestFormatUUID4(t *testing.T) {
- first4 := uuid.NewRandom()
- other4 := uuid.NewRandom()
- uuid4 := UUID4(first4.String())
- str := string(other4.String())
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := uuid4.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID4(other4.String()), string(b))
-
- b, err = uuid4.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte(other4.String()), b)
-
- err = uuid4.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID4(str), string(b))
-
- b, err = uuid4.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&uuid4)
- assert.NoError(t, err)
-
- var uuid4Copy UUID4
- err = bson.Unmarshal(bsonData, &uuid4Copy)
- assert.NoError(t, err)
- assert.Equal(t, uuid4, uuid4Copy)
-
- testValid(t, "uuid4", str)
- testInvalid(t, "uuid4", "not-a-uuid")
-}
-
-func TestFormatUUID5(t *testing.T) {
- first5 := uuid.NewSHA1(uuid.NameSpace_URL, []byte("somewhere.com"))
- other5 := uuid.NewSHA1(uuid.NameSpace_URL, []byte("somewhereelse.com"))
- uuid5 := UUID5(first5.String())
- str := string(other5.String())
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := uuid5.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID5(other5.String()), string(b))
-
- b, err = uuid5.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte(other5.String()), b)
-
- err = uuid5.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID5(str), string(b))
-
- b, err = uuid5.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&uuid5)
- assert.NoError(t, err)
-
- var uuid5Copy UUID5
- err = bson.Unmarshal(bsonData, &uuid5Copy)
- assert.NoError(t, err)
- assert.Equal(t, uuid5, uuid5Copy)
-
- testValid(t, "uuid5", str)
- testInvalid(t, "uuid5", "not-a-uuid")
-}
-
-func TestFormatUUID(t *testing.T) {
- first5 := uuid.NewSHA1(uuid.NameSpace_URL, []byte("somewhere.com"))
- other5 := uuid.NewSHA1(uuid.NameSpace_URL, []byte("somewhereelse.com"))
- uuid := UUID(first5.String())
- str := string(other5.String())
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := uuid.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID(other5.String()), string(b))
-
- b, err = uuid.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte(other5.String()), b)
-
- err = uuid.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, UUID(str), string(b))
-
- b, err = uuid.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&uuid)
- assert.NoError(t, err)
-
- var uuidCopy UUID
- err = bson.Unmarshal(bsonData, &uuidCopy)
- assert.NoError(t, err)
- assert.Equal(t, uuid, uuidCopy)
-
- testValid(t, "uuid", str)
- testInvalid(t, "uuid", "not-a-uuid")
-}
-
-func TestFormatISBN(t *testing.T) {
- isbn := ISBN("0321751043")
- str := string("0321751043")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := isbn.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN("0321751043"), string(b))
-
- b, err = isbn.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("0321751043"), b)
-
- err = isbn.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN(str), string(b))
-
- b, err = isbn.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&isbn)
- assert.NoError(t, err)
-
- var isbnCopy ISBN
- err = bson.Unmarshal(bsonData, &isbnCopy)
- assert.NoError(t, err)
- assert.Equal(t, isbn, isbnCopy)
-
- testValid(t, "isbn", str)
- testInvalid(t, "isbn", "836217463") // bad checksum
-}
-
-func TestFormatISBN10(t *testing.T) {
- isbn10 := ISBN10("0321751043")
- str := string("0321751043")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := isbn10.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN10("0321751043"), string(b))
-
- b, err = isbn10.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("0321751043"), b)
-
- err = isbn10.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN10(str), string(b))
-
- b, err = isbn10.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&isbn10)
- assert.NoError(t, err)
-
- var isbn10Copy ISBN10
- err = bson.Unmarshal(bsonData, &isbn10Copy)
- assert.NoError(t, err)
- assert.Equal(t, isbn10, isbn10Copy)
-
- testValid(t, "isbn10", str)
- testInvalid(t, "isbn10", "836217463") // bad checksum
-}
-
-func TestFormatISBN13(t *testing.T) {
- isbn13 := ISBN13("978-0321751041")
- str := string("978-0321751041")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := isbn13.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN13("978-0321751041"), string(b))
-
- b, err = isbn13.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("978-0321751041"), b)
-
- err = isbn13.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, ISBN13(str), string(b))
-
- b, err = isbn13.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&isbn13)
- assert.NoError(t, err)
-
- var isbn13Copy ISBN13
- err = bson.Unmarshal(bsonData, &isbn13Copy)
- assert.NoError(t, err)
- assert.Equal(t, isbn13, isbn13Copy)
-
- testValid(t, "isbn13", str)
- testInvalid(t, "isbn13", "978-0321751042") // bad checksum
-}
-
-func TestFormatHexColor(t *testing.T) {
- hexColor := HexColor("#FFFFFF")
- str := string("#000000")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := hexColor.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, HexColor("#000000"), string(b))
-
- b, err = hexColor.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("#000000"), b)
-
- err = hexColor.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, HexColor(str), string(b))
-
- b, err = hexColor.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&hexColor)
- assert.NoError(t, err)
-
- var hexColorCopy HexColor
- err = bson.Unmarshal(bsonData, &hexColorCopy)
- assert.NoError(t, err)
- assert.Equal(t, hexColor, hexColorCopy)
-
- testValid(t, "hexcolor", str)
- testInvalid(t, "hexcolor", "#fffffffz")
-}
-
-func TestFormatRGBColor(t *testing.T) {
- rgbColor := RGBColor("rgb(255,255,255)")
- str := string("rgb(0,0,0)")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := rgbColor.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, RGBColor("rgb(0,0,0)"), string(b))
-
- b, err = rgbColor.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("rgb(0,0,0)"), b)
-
- err = rgbColor.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, RGBColor(str), string(b))
-
- b, err = rgbColor.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&rgbColor)
- assert.NoError(t, err)
-
- var rgbColorCopy RGBColor
- err = bson.Unmarshal(bsonData, &rgbColorCopy)
- assert.NoError(t, err)
- assert.Equal(t, rgbColor, rgbColorCopy)
-
- testValid(t, "rgbcolor", str)
- testInvalid(t, "rgbcolor", "rgb(300,0,0)")
-}
-
-func TestFormatSSN(t *testing.T) {
- ssn := SSN("111-11-1111")
- str := string("999 99 9999")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := ssn.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, SSN("999 99 9999"), string(b))
-
- b, err = ssn.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("999 99 9999"), b)
-
- err = ssn.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, SSN(str), string(b))
-
- b, err = ssn.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&ssn)
- assert.NoError(t, err)
-
- var ssnCopy SSN
- err = bson.Unmarshal(bsonData, &ssnCopy)
- assert.NoError(t, err)
- assert.Equal(t, ssn, ssnCopy)
-
- testValid(t, "ssn", str)
- testInvalid(t, "ssn", "999 99 999")
-}
-
-func TestFormatCreditCard(t *testing.T) {
- creditCard := CreditCard("4111-1111-1111-1111")
- str := string("4012-8888-8888-1881")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := creditCard.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, CreditCard("4012-8888-8888-1881"), string(b))
-
- b, err = creditCard.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("4012-8888-8888-1881"), b)
-
- err = creditCard.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, CreditCard(str), string(b))
-
- b, err = creditCard.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&creditCard)
- assert.NoError(t, err)
-
- var creditCardCopy CreditCard
- err = bson.Unmarshal(bsonData, &creditCardCopy)
- assert.NoError(t, err)
- assert.Equal(t, creditCard, creditCardCopy)
-
- testValid(t, "creditcard", str)
- testInvalid(t, "creditcard", "9999-9999-9999-999") // bad checksum
-}
-
-func TestFormatPassword(t *testing.T) {
- password := Password("super secret stuff here")
- str := string("even more secret")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := password.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, Password("even more secret"), string(b))
-
- b, err = password.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("even more secret"), b)
-
- err = password.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, Password(str), string(b))
-
- b, err = password.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&password)
- assert.NoError(t, err)
-
- var passwordCopy Password
- err = bson.Unmarshal(bsonData, &passwordCopy)
- assert.NoError(t, err)
- assert.Equal(t, password, passwordCopy)
-
- // everything is valid
- testValid(t, "password", str)
-}
-
-func TestFormatBase64(t *testing.T) {
- b64 := Base64("ZWxpemFiZXRocG9zZXk=")
- str := string("ZWxpemFiZXRocG9zZXk=")
- b := []byte(str)
- bj := []byte("\"" + str + "\"")
-
- err := b64.UnmarshalText(b)
- assert.NoError(t, err)
- assert.EqualValues(t, Base64("ZWxpemFiZXRocG9zZXk="), string(b))
-
- b, err = b64.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte("ZWxpemFiZXRocG9zZXk="), b)
-
- err = b64.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, Base64(str), string(b))
-
- b, err = b64.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- bsonData, err := bson.Marshal(&b64)
- assert.NoError(t, err)
-
- var b64Copy Base64
- err = bson.Unmarshal(bsonData, &b64Copy)
- assert.NoError(t, err)
- assert.Equal(t, b64, b64Copy)
-
- testValid(t, "byte", str)
- testInvalid(t, "byte", "ZWxpemFiZXRocG9zZXk") // missing pad char
-}
diff --git a/vendor/github.com/go-openapi/strfmt/doc.go b/vendor/github.com/go-openapi/strfmt/doc.go
deleted file mode 100644
index 41aebe6d5..000000000
--- a/vendor/github.com/go-openapi/strfmt/doc.go
+++ /dev/null
@@ -1,18 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// Package strfmt contains custom string formats
-//
-// TODO: add info on how to define and register a custom format
-package strfmt
diff --git a/vendor/github.com/go-openapi/strfmt/duration.go b/vendor/github.com/go-openapi/strfmt/duration.go
deleted file mode 100644
index 711842ded..000000000
--- a/vendor/github.com/go-openapi/strfmt/duration.go
+++ /dev/null
@@ -1,194 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "errors"
- "fmt"
- "regexp"
- "strconv"
- "strings"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-)
-
-func init() {
- d := Duration(0)
- Default.Add("duration", &d, IsDuration)
-}
-
-var (
- timeUnits = [][]string{
- {"ns", "nano"},
- {"us", "µs", "micro"},
- {"ms", "milli"},
- {"s", "sec"},
- {"m", "min"},
- {"h", "hr", "hour"},
- {"d", "day"},
- {"w", "wk", "week"},
- }
-
- timeMultiplier = map[string]time.Duration{
- "ns": time.Nanosecond,
- "us": time.Microsecond,
- "ms": time.Millisecond,
- "s": time.Second,
- "m": time.Minute,
- "h": time.Hour,
- "d": 24 * time.Hour,
- "w": 7 * 24 * time.Hour,
- }
-
- durationMatcher = regexp.MustCompile(`((\d+)\s*([A-Za-zµ]+))`)
-)
-
-// IsDuration returns true if the provided string is a valid duration
-func IsDuration(str string) bool {
- _, err := ParseDuration(str)
- return err == nil
-}
-
-// Duration represents a duration
-//
-// swagger:strfmt duration
-type Duration time.Duration
-
-// MarshalText turns this instance into text
-func (d Duration) MarshalText() ([]byte, error) {
- return []byte(time.Duration(d).String()), nil
-}
-
-// UnmarshalText hydrates this instance from text
-func (d *Duration) UnmarshalText(data []byte) error { // validation is performed later on
- dd, err := ParseDuration(string(data))
- if err != nil {
- return err
- }
- *d = Duration(dd)
- return nil
-}
-
-// ParseDuration parses a duration from a string, compatible with scala duration syntax
-func ParseDuration(cand string) (time.Duration, error) {
- if dur, err := time.ParseDuration(cand); err == nil {
- return dur, nil
- }
-
- var dur time.Duration
- ok := false
- for _, match := range durationMatcher.FindAllStringSubmatch(cand, -1) {
-
- factor, err := strconv.Atoi(match[2]) // converts string to int
- if err != nil {
- return 0, err
- }
- unit := strings.ToLower(strings.TrimSpace(match[3]))
-
- for _, variants := range timeUnits {
- last := len(variants) - 1
- multiplier := timeMultiplier[variants[0]]
-
- for i, variant := range variants {
- if (last == i && strings.HasPrefix(unit, variant)) || strings.EqualFold(variant, unit) {
- ok = true
- dur += (time.Duration(factor) * multiplier)
- }
- }
- }
- }
-
- if ok {
- return dur, nil
- }
- return 0, fmt.Errorf("Unable to parse %s as duration", cand)
-}
-
-// Scan reads a Duration value from database driver type.
-func (d *Duration) Scan(raw interface{}) error {
- switch v := raw.(type) {
- // TODO: case []byte: // ?
- case int64:
- *d = Duration(v)
- case float64:
- *d = Duration(int64(v))
- case nil:
- *d = Duration(0)
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.Duration from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts Duration to a primitive value ready to written to a database.
-func (d Duration) Value() (driver.Value, error) {
- return driver.Value(int64(d)), nil
-}
-
-// String converts this duration to a string
-func (d Duration) String() string {
- return time.Duration(d).String()
-}
-
-func (d Duration) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- d.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (d Duration) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(time.Duration(d).String())
-}
-
-func (d *Duration) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- d.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (d *Duration) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- tt, err := ParseDuration(data)
- if err != nil {
- in.AddError(err)
- return
- }
- *d = Duration(tt)
- }
-}
-
-func (d *Duration) GetBSON() (interface{}, error) {
- return bson.M{"data": int64(*d)}, nil
-}
-
-func (d *Duration) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(int64); ok {
- *d = Duration(data)
- return nil
- }
-
- return errors.New("couldn't unmarshal bson raw value as Duration")
-}
diff --git a/vendor/github.com/go-openapi/strfmt/duration_test.go b/vendor/github.com/go-openapi/strfmt/duration_test.go
deleted file mode 100644
index 7b78b8023..000000000
--- a/vendor/github.com/go-openapi/strfmt/duration_test.go
+++ /dev/null
@@ -1,143 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "testing"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/stretchr/testify/assert"
-)
-
-func TestDuration(t *testing.T) {
- pp := Duration(0)
-
- err := pp.UnmarshalText([]byte("0ms"))
- assert.NoError(t, err)
- err = pp.UnmarshalText([]byte("yada"))
- assert.Error(t, err)
-
- orig := "2ms"
- b := []byte(orig)
- bj := []byte("\"" + orig + "\"")
-
- err = pp.UnmarshalText(b)
- assert.NoError(t, err)
-
- txt, err := pp.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, orig, string(txt))
-
- err = pp.UnmarshalJSON(bj)
- assert.NoError(t, err)
- assert.EqualValues(t, orig, pp.String())
-
- b, err = pp.MarshalJSON()
- assert.NoError(t, err)
- assert.Equal(t, bj, b)
-
- dur := Duration(42)
- bsonData, err := bson.Marshal(&dur)
- assert.NoError(t, err)
-
- var durCopy Duration
- err = bson.Unmarshal(bsonData, &durCopy)
- assert.NoError(t, err)
- assert.Equal(t, dur, durCopy)
-}
-
-func testDurationParser(t *testing.T, toParse string, expected time.Duration) {
- r, e := ParseDuration(toParse)
- assert.NoError(t, e)
- assert.Equal(t, expected, r)
-}
-
-func testDurationSQLScanner(t *testing.T, dur time.Duration) {
- values := []interface{}{int64(dur), float64(dur)}
- for _, value := range values {
- var result Duration
- err := result.Scan(value)
- assert.NoError(t, err)
- assert.Equal(t, dur, time.Duration(result))
- }
-}
-
-func TestDurationParser(t *testing.T) {
- testcases := map[string]time.Duration{
-
- // parse the short forms without spaces
- "1ns": 1 * time.Nanosecond,
- "1us": 1 * time.Microsecond,
- "1µs": 1 * time.Microsecond,
- "1ms": 1 * time.Millisecond,
- "1s": 1 * time.Second,
- "1m": 1 * time.Minute,
- "1h": 1 * time.Hour,
- "1hr": 1 * time.Hour,
- "1d": 24 * time.Hour,
- "1w": 7 * 24 * time.Hour,
- "1wk": 7 * 24 * time.Hour,
-
- // parse the long forms without spaces
- "1nanoseconds": 1 * time.Nanosecond,
- "1nanos": 1 * time.Nanosecond,
- "1microseconds": 1 * time.Microsecond,
- "1micros": 1 * time.Microsecond,
- "1millis": 1 * time.Millisecond,
- "1milliseconds": 1 * time.Millisecond,
- "1second": 1 * time.Second,
- "1sec": 1 * time.Second,
- "1min": 1 * time.Minute,
- "1minute": 1 * time.Minute,
- "1hour": 1 * time.Hour,
- "1day": 24 * time.Hour,
- "1week": 7 * 24 * time.Hour,
-
- // parse the short forms with spaces
- "1 ns": 1 * time.Nanosecond,
- "1 us": 1 * time.Microsecond,
- "1 µs": 1 * time.Microsecond,
- "1 ms": 1 * time.Millisecond,
- "1 s": 1 * time.Second,
- "1 m": 1 * time.Minute,
- "1 h": 1 * time.Hour,
- "1 hr": 1 * time.Hour,
- "1 d": 24 * time.Hour,
- "1 w": 7 * 24 * time.Hour,
- "1 wk": 7 * 24 * time.Hour,
-
- // parse the long forms without spaces
- "1 nanoseconds": 1 * time.Nanosecond,
- "1 nanos": 1 * time.Nanosecond,
- "1 microseconds": 1 * time.Microsecond,
- "1 micros": 1 * time.Microsecond,
- "1 millis": 1 * time.Millisecond,
- "1 milliseconds": 1 * time.Millisecond,
- "1 second": 1 * time.Second,
- "1 sec": 1 * time.Second,
- "1 min": 1 * time.Minute,
- "1 minute": 1 * time.Minute,
- "1 hour": 1 * time.Hour,
- "1 day": 24 * time.Hour,
- "1 week": 7 * 24 * time.Hour,
- }
-
- for str, dur := range testcases {
- testDurationParser(t, str, dur)
- testDurationSQLScanner(t, dur)
- }
-}
diff --git a/vendor/github.com/go-openapi/strfmt/format.go b/vendor/github.com/go-openapi/strfmt/format.go
deleted file mode 100644
index de9401765..000000000
--- a/vendor/github.com/go-openapi/strfmt/format.go
+++ /dev/null
@@ -1,298 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "encoding"
- "reflect"
- "strings"
- "sync"
- "time"
-
- "github.com/go-openapi/errors"
- "github.com/mitchellh/mapstructure"
-)
-
-// Default is the default formats registry
-var Default = NewSeededFormats(nil, nil)
-
-// Validator represents a validator for a string format
-type Validator func(string) bool
-
-// Format represents a string format
-type Format interface {
- String() string
- encoding.TextMarshaler
- encoding.TextUnmarshaler
-}
-
-// Registry is a registry of string formats
-type Registry interface {
- Add(string, Format, Validator) bool
- DelByName(string) bool
- GetType(string) (reflect.Type, bool)
- ContainsName(string) bool
- Validates(string, string) bool
- Parse(string, string) (interface{}, error)
- MapStructureHookFunc() mapstructure.DecodeHookFunc
-}
-
-type knownFormat struct {
- Name string
- OrigName string
- Type reflect.Type
- Validator Validator
-}
-
-// NameNormalizer is a function that normalizes a format name
-type NameNormalizer func(string) string
-
-// DefaultNameNormalizer removes all dashes
-func DefaultNameNormalizer(name string) string {
- return strings.Replace(name, "-", "", -1)
-}
-
-type defaultFormats struct {
- sync.Mutex
- data []knownFormat
- normalizeName NameNormalizer
-}
-
-// NewFormats creates a new formats registry seeded with the values from the default
-func NewFormats() Registry {
- return NewSeededFormats(Default.(*defaultFormats).data, nil)
-}
-
-// NewSeededFormats creates a new formats registry
-func NewSeededFormats(seeds []knownFormat, normalizer NameNormalizer) Registry {
- if normalizer == nil {
- normalizer = DefaultNameNormalizer
- }
- // copy here, don't modify original
- d := append([]knownFormat(nil), seeds...)
- return &defaultFormats{
- data: d,
- normalizeName: normalizer,
- }
-}
-
-// MapStructureHookFunc is a decode hook function for mapstructure
-func (f *defaultFormats) MapStructureHookFunc() mapstructure.DecodeHookFunc {
- return func(from reflect.Type, to reflect.Type, data interface{}) (interface{}, error) {
- if from.Kind() != reflect.String {
- return data, nil
- }
- for _, v := range f.data {
- tpe, _ := f.GetType(v.Name)
- if to == tpe {
- switch v.Name {
- case "date":
- d, err := time.Parse(RFC3339FullDate, data.(string))
- if err != nil {
- return nil, err
- }
- return Date(d), nil
- case "datetime":
- return ParseDateTime(data.(string))
- case "duration":
- dur, err := ParseDuration(data.(string))
- if err != nil {
- return nil, err
- }
- return Duration(dur), nil
- case "uri":
- return URI(data.(string)), nil
- case "email":
- return Email(data.(string)), nil
- case "uuid":
- return UUID(data.(string)), nil
- case "uuid3":
- return UUID3(data.(string)), nil
- case "uuid4":
- return UUID4(data.(string)), nil
- case "uuid5":
- return UUID5(data.(string)), nil
- case "hostname":
- return Hostname(data.(string)), nil
- case "ipv4":
- return IPv4(data.(string)), nil
- case "ipv6":
- return IPv6(data.(string)), nil
- case "mac":
- return MAC(data.(string)), nil
- case "isbn":
- return ISBN(data.(string)), nil
- case "isbn10":
- return ISBN10(data.(string)), nil
- case "isbn13":
- return ISBN13(data.(string)), nil
- case "creditcard":
- return CreditCard(data.(string)), nil
- case "ssn":
- return SSN(data.(string)), nil
- case "hexcolor":
- return HexColor(data.(string)), nil
- case "rgbcolor":
- return RGBColor(data.(string)), nil
- case "byte":
- return Base64(data.(string)), nil
- case "password":
- return Password(data.(string)), nil
- default:
- return nil, errors.InvalidTypeName(v.Name)
-
- }
- }
- }
- return data, nil
- }
-}
-
-// Add adds a new format, return true if this was a new item instead of a replacement
-func (f *defaultFormats) Add(name string, strfmt Format, validator Validator) bool {
- f.Lock()
- defer f.Unlock()
-
- nme := f.normalizeName(name)
-
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for i := range f.data {
- v := &f.data[i]
- if v.Name == nme {
- v.Type = tpe
- v.Validator = validator
- return false
- }
- }
-
- // turns out it's new after all
- f.data = append(f.data, knownFormat{Name: nme, OrigName: name, Type: tpe, Validator: validator})
- return true
-}
-
-// GetType gets the type for the specified name
-func (f *defaultFormats) GetType(name string) (reflect.Type, bool) {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return v.Type, true
- }
- }
- return nil, false
-}
-
-// DelByName removes the format by the specified name, returns true when an item was actually removed
-func (f *defaultFormats) DelByName(name string) bool {
- f.Lock()
- defer f.Unlock()
-
- nme := f.normalizeName(name)
-
- for i, v := range f.data {
- if v.Name == nme {
- f.data[i] = knownFormat{} // release
- f.data = append(f.data[:i], f.data[i+1:]...)
- return true
- }
- }
- return false
-}
-
-// DelByType removes the specified format, returns true when an item was actually removed
-func (f *defaultFormats) DelByFormat(strfmt Format) bool {
- f.Lock()
- defer f.Unlock()
-
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for i, v := range f.data {
- if v.Type == tpe {
- f.data[i] = knownFormat{} // release
- f.data = append(f.data[:i], f.data[i+1:]...)
- return true
- }
- }
- return false
-}
-
-// ContainsName returns true if this registry contains the specified name
-func (f *defaultFormats) ContainsName(name string) bool {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return true
- }
- }
- return false
-}
-
-// ContainsFormat returns true if this registry contains the specified format
-func (f *defaultFormats) ContainsFormat(strfmt Format) bool {
- f.Lock()
- defer f.Unlock()
- tpe := reflect.TypeOf(strfmt)
- if tpe.Kind() == reflect.Ptr {
- tpe = tpe.Elem()
- }
-
- for _, v := range f.data {
- if v.Type == tpe {
- return true
- }
- }
- return false
-}
-
-func (f *defaultFormats) Validates(name, data string) bool {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- return v.Validator(data)
- }
- }
- return false
-}
-
-func (f *defaultFormats) Parse(name, data string) (interface{}, error) {
- f.Lock()
- defer f.Unlock()
- nme := f.normalizeName(name)
- for _, v := range f.data {
- if v.Name == nme {
- nw := reflect.New(v.Type).Interface()
- if dec, ok := nw.(encoding.TextUnmarshaler); ok {
- if err := dec.UnmarshalText([]byte(data)); err != nil {
- return nil, err
- }
- return nw, nil
- }
- return nil, errors.InvalidTypeName(name)
- }
- }
- return nil, errors.InvalidTypeName(name)
-}
diff --git a/vendor/github.com/go-openapi/strfmt/format_test.go b/vendor/github.com/go-openapi/strfmt/format_test.go
deleted file mode 100644
index 9c9fa3fce..000000000
--- a/vendor/github.com/go-openapi/strfmt/format_test.go
+++ /dev/null
@@ -1,223 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "strings"
- "testing"
- "time"
-
- "github.com/mitchellh/mapstructure"
- "github.com/stretchr/testify/assert"
-)
-
-type testFormat string
-
-func (t testFormat) MarshalText() ([]byte, error) {
- return []byte(string(t)), nil
-}
-
-func (t *testFormat) UnmarshalText(b []byte) error {
- *t = testFormat(string(b))
- return nil
-}
-
-func (t testFormat) String() string {
- return string(t)
-}
-
-func isTestFormat(s string) bool {
- return strings.HasPrefix(s, "tf")
-}
-
-type tf2 string
-
-func (t tf2) MarshalText() ([]byte, error) {
- return []byte(string(t)), nil
-}
-
-func (t *tf2) UnmarshalText(b []byte) error {
- *t = tf2(string(b))
- return nil
-}
-
-func istf2(s string) bool {
- return strings.HasPrefix(s, "af")
-}
-
-func (t tf2) String() string {
- return string(t)
-}
-
-type bf string
-
-func (t bf) MarshalText() ([]byte, error) {
- return []byte(string(t)), nil
-}
-
-func (t *bf) UnmarshalText(b []byte) error {
- *t = bf(string(b))
- return nil
-}
-
-func (t bf) String() string {
- return string(t)
-}
-
-func isbf(s string) bool {
- return strings.HasPrefix(s, "bf")
-}
-
-func istf3(s string) bool {
- return strings.HasPrefix(s, "ff")
-}
-
-func init() {
- tf := testFormat("")
- Default.Add("test-format", &tf, isTestFormat)
-}
-
-func TestFormatRegistry(t *testing.T) {
- f2 := tf2("")
- f3 := bf("")
- registry := NewFormats()
-
- assert.True(t, registry.ContainsName("test-format"))
- assert.True(t, registry.ContainsName("testformat"))
- assert.False(t, registry.ContainsName("ttt"))
-
- assert.True(t, registry.Validates("testformat", "tfa"))
- assert.False(t, registry.Validates("testformat", "ffa"))
-
- assert.True(t, registry.Add("tf2", &f2, istf2))
- assert.True(t, registry.ContainsName("tf2"))
- assert.False(t, registry.ContainsName("tfw"))
- assert.True(t, registry.Validates("tf2", "afa"))
-
- assert.False(t, registry.Add("tf2", &f3, isbf))
- assert.True(t, registry.ContainsName("tf2"))
- assert.False(t, registry.ContainsName("tfw"))
- assert.True(t, registry.Validates("tf2", "bfa"))
- assert.False(t, registry.Validates("tf2", "afa"))
-
- assert.False(t, registry.Add("tf2", &f2, istf2))
- assert.True(t, registry.Add("tf3", &f2, istf3))
- assert.True(t, registry.ContainsName("tf3"))
- assert.True(t, registry.ContainsName("tf2"))
- assert.False(t, registry.ContainsName("tfw"))
- assert.True(t, registry.Validates("tf3", "ffa"))
-
- assert.True(t, registry.DelByName("tf3"))
- assert.True(t, registry.Add("tf3", &f2, istf3))
-
- assert.True(t, registry.DelByName("tf3"))
- assert.False(t, registry.DelByName("unknown"))
- assert.False(t, registry.Validates("unknown", ""))
-}
-
-type testStruct struct {
- D Date `json:"d,omitempty"`
- DT DateTime `json:"dt,omitempty"`
- Dur Duration `json:"dur,omitempty"`
- Uri URI `json:"uri,omitempty"`
- Eml Email `json:"eml,omitempty"`
- Uuid UUID `json:"uuid,omitempty"`
- Uuid3 UUID3 `json:"uuid3,omitempty"`
- Uuid4 UUID4 `json:"uuid4,omitempty"`
- Uuid5 UUID5 `json:"uuid5,omitempty"`
- Hn Hostname `json:"hn,omitempty"`
- Ipv4 IPv4 `json:"ipv4,omitempty"`
- Ipv6 IPv6 `json:"ipv6,omitempty"`
- Mac MAC `json:"mac,omitempty"`
- Isbn ISBN `json:"isbn,omitempty"`
- Isbn10 ISBN10 `json:"isbn10,omitempty"`
- Isbn13 ISBN13 `json:"isbn13,omitempty"`
- Creditcard CreditCard `json:"creditcard,omitempty"`
- Ssn SSN `json:"ssn,omitempty"`
- Hexcolor HexColor `json:"hexcolor,omitempty"`
- Rgbcolor RGBColor `json:"rgbcolor,omitempty"`
- B64 Base64 `json:"b64,omitempty"`
- Pw Password `json:"pw,omitempty"`
-}
-
-func TestDecodeHook(t *testing.T) {
- registry := NewFormats()
- m := map[string]interface{}{
- "d": "2014-12-15",
- "dt": "2012-03-02T15:06:05.999999999Z",
- "dur": "5s",
- "uri": "http://www.dummy.com",
- "eml": "dummy@dummy.com",
- "uuid": "a8098c1a-f86e-11da-bd1a-00112444be1e",
- "uuid3": "bcd02e22-68f0-3046-a512-327cca9def8f",
- "uuid4": "025b0d74-00a2-4048-bf57-227c5111bb34",
- "uuid5": "886313e1-3b8a-5372-9b90-0c9aee199e5d",
- "hn": "somewhere.com",
- "ipv4": "192.168.254.1",
- "ipv6": "::1",
- "mac": "01:02:03:04:05:06",
- "isbn": "0321751043",
- "isbn10": "0321751043",
- "isbn13": "978-0321751041",
- "hexcolor": "#FFFFFF",
- "rgbcolor": "rgb(255,255,255)",
- "pw": "super secret stuff here",
- "ssn": "111-11-1111",
- "creditcard": "4111-1111-1111-1111",
- "b64": "ZWxpemFiZXRocG9zZXk=",
- }
-
- date, _ := time.Parse(RFC3339FullDate, "2014-12-15")
- dur, _ := ParseDuration("5s")
- dt, _ := ParseDateTime("2012-03-02T15:06:05.999999999Z")
-
- exp := &testStruct{
- D: Date(date),
- DT: dt,
- Dur: Duration(dur),
- Uri: URI("http://www.dummy.com"),
- Eml: Email("dummy@dummy.com"),
- Uuid: UUID("a8098c1a-f86e-11da-bd1a-00112444be1e"),
- Uuid3: UUID3("bcd02e22-68f0-3046-a512-327cca9def8f"),
- Uuid4: UUID4("025b0d74-00a2-4048-bf57-227c5111bb34"),
- Uuid5: UUID5("886313e1-3b8a-5372-9b90-0c9aee199e5d"),
- Hn: Hostname("somewhere.com"),
- Ipv4: IPv4("192.168.254.1"),
- Ipv6: IPv6("::1"),
- Mac: MAC("01:02:03:04:05:06"),
- Isbn: ISBN("0321751043"),
- Isbn10: ISBN10("0321751043"),
- Isbn13: ISBN13("978-0321751041"),
- Creditcard: CreditCard("4111-1111-1111-1111"),
- Ssn: SSN("111-11-1111"),
- Hexcolor: HexColor("#FFFFFF"),
- Rgbcolor: RGBColor("rgb(255,255,255)"),
- B64: Base64("ZWxpemFiZXRocG9zZXk="),
- Pw: Password("super secret stuff here"),
- }
-
- test := new(testStruct)
- cfg := &mapstructure.DecoderConfig{
- DecodeHook: registry.MapStructureHookFunc(),
- // weakly typed will pass if this passes
- WeaklyTypedInput: false,
- Result: test,
- }
- d, err := mapstructure.NewDecoder(cfg)
- assert.Nil(t, err)
- err = d.Decode(m)
- assert.Nil(t, err)
- assert.Equal(t, exp, test)
-}
diff --git a/vendor/github.com/go-openapi/strfmt/time.go b/vendor/github.com/go-openapi/strfmt/time.go
deleted file mode 100644
index c1e3745b9..000000000
--- a/vendor/github.com/go-openapi/strfmt/time.go
+++ /dev/null
@@ -1,188 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "database/sql/driver"
- "errors"
- "fmt"
- "regexp"
- "strings"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/mailru/easyjson/jlexer"
- "github.com/mailru/easyjson/jwriter"
-)
-
-func init() {
- dt := DateTime{}
- Default.Add("datetime", &dt, IsDateTime)
-}
-
-// IsDateTime returns true when the string is a valid date-time
-func IsDateTime(str string) bool {
- if len(str) < 4 {
- return false
- }
- s := strings.Split(strings.ToLower(str), "t")
- if len(s) < 2 || !IsDate(s[0]) {
- return false
- }
-
- matches := rxDateTime.FindAllStringSubmatch(s[1], -1)
- if len(matches) == 0 || len(matches[0]) == 0 {
- return false
- }
- m := matches[0]
- res := m[1] <= "23" && m[2] <= "59" && m[3] <= "59"
- return res
-}
-
-const (
- // RFC3339Millis represents a ISO8601 format to millis instead of to nanos
- RFC3339Millis = "2006-01-02T15:04:05.000Z07:00"
- // RFC3339Micro represents a ISO8601 format to micro instead of to nano
- RFC3339Micro = "2006-01-02T15:04:05.000000Z07:00"
- // DateTimePattern pattern to match for the date-time format from http://tools.ietf.org/html/rfc3339#section-5.6
- DateTimePattern = `^([0-9]{2}):([0-9]{2}):([0-9]{2})(.[0-9]+)?(z|([+-][0-9]{2}:[0-9]{2}))$`
-)
-
-var (
- dateTimeFormats = []string{RFC3339Micro, RFC3339Millis, time.RFC3339, time.RFC3339Nano}
- rxDateTime = regexp.MustCompile(DateTimePattern)
- MarshalFormat = RFC3339Millis
-)
-
-// ParseDateTime parses a string that represents an ISO8601 time or a unix epoch
-func ParseDateTime(data string) (DateTime, error) {
- if data == "" {
- return NewDateTime(), nil
- }
- var lastError error
- for _, layout := range dateTimeFormats {
- dd, err := time.Parse(layout, data)
- if err != nil {
- lastError = err
- continue
- }
- lastError = nil
- return DateTime(dd), nil
- }
- return DateTime{}, lastError
-}
-
-// DateTime is a time but it serializes to ISO8601 format with millis
-// It knows how to read 3 different variations of a RFC3339 date time.
-// Most APIs we encounter want either millisecond or second precision times.
-// This just tries to make it worry-free.
-//
-// swagger:strfmt date-time
-type DateTime time.Time
-
-// NewDateTime is a representation of zero value for DateTime type
-func NewDateTime() DateTime {
- return DateTime(time.Unix(0, 0).UTC())
-}
-
-func (t DateTime) String() string {
- return time.Time(t).Format(MarshalFormat)
-}
-
-// MarshalText implements the text marshaller interface
-func (t DateTime) MarshalText() ([]byte, error) {
- return []byte(t.String()), nil
-}
-
-// UnmarshalText implements the text unmarshaller interface
-func (t *DateTime) UnmarshalText(text []byte) error {
- tt, err := ParseDateTime(string(text))
- if err != nil {
- return err
- }
- *t = tt
- return nil
-}
-
-// Scan scans a DateTime value from database driver type.
-func (t *DateTime) Scan(raw interface{}) error {
- // TODO: case int64: and case float64: ?
- switch v := raw.(type) {
- case []byte:
- return t.UnmarshalText(v)
- case string:
- return t.UnmarshalText([]byte(v))
- case time.Time:
- *t = DateTime(v)
- case nil:
- *t = DateTime{}
- default:
- return fmt.Errorf("cannot sql.Scan() strfmt.DateTime from: %#v", v)
- }
-
- return nil
-}
-
-// Value converts DateTime to a primitive value ready to written to a database.
-func (t DateTime) Value() (driver.Value, error) {
- return driver.Value(t.String()), nil
-}
-
-func (t DateTime) MarshalJSON() ([]byte, error) {
- var w jwriter.Writer
- t.MarshalEasyJSON(&w)
- return w.BuildBytes()
-}
-
-func (t DateTime) MarshalEasyJSON(w *jwriter.Writer) {
- w.String(time.Time(t).Format(MarshalFormat))
-}
-
-func (t *DateTime) UnmarshalJSON(data []byte) error {
- l := jlexer.Lexer{Data: data}
- t.UnmarshalEasyJSON(&l)
- return l.Error()
-}
-
-func (t *DateTime) UnmarshalEasyJSON(in *jlexer.Lexer) {
- if data := in.String(); in.Ok() {
- tt, err := ParseDateTime(data)
- if err != nil {
- in.AddError(err)
- return
- }
- *t = tt
- }
-}
-
-func (t *DateTime) GetBSON() (interface{}, error) {
- return bson.M{"data": t.String()}, nil
-}
-
-func (t *DateTime) SetBSON(raw bson.Raw) error {
- var m bson.M
- if err := raw.Unmarshal(&m); err != nil {
- return err
- }
-
- if data, ok := m["data"].(string); ok {
- var err error
- *t, err = ParseDateTime(data)
- return err
- }
-
- return errors.New("couldn't unmarshal bson raw value as Duration")
-}
diff --git a/vendor/github.com/go-openapi/strfmt/time_test.go b/vendor/github.com/go-openapi/strfmt/time_test.go
deleted file mode 100644
index 74a6f696b..000000000
--- a/vendor/github.com/go-openapi/strfmt/time_test.go
+++ /dev/null
@@ -1,154 +0,0 @@
-// Copyright 2015 go-swagger maintainers
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package strfmt
-
-import (
- "bytes"
- "testing"
- "time"
-
- "gopkg.in/mgo.v2/bson"
-
- "github.com/stretchr/testify/assert"
-)
-
-var (
- p, _ = time.Parse(time.RFC3339Nano, "2011-08-18T19:03:37.000000000+01:00")
-
- testCases = []struct {
- in []byte // externally sourced data -- to be unmarshalled
- time time.Time // its representation in time.Time
- str string // its marshalled representation
- }{
- {[]byte("2014-12-15T08:00:00.000Z"), time.Date(2014, 12, 15, 8, 0, 0, 0, time.UTC), "2014-12-15T08:00:00.000Z"},
- {[]byte("2011-08-18T19:03:37.000000000+01:00"), time.Date(2011, 8, 18, 19, 3, 37, 0, p.Location()), "2011-08-18T19:03:37.000+01:00"},
- {[]byte("2014-12-15T19:30:20Z"), time.Date(2014, 12, 15, 19, 30, 20, 0, time.UTC), "2014-12-15T19:30:20.000Z"},
- {[]byte("0001-01-01T00:00:00Z"), time.Time{}.UTC(), "0001-01-01T00:00:00.000Z"},
- {[]byte(""), time.Unix(0, 0).UTC(), "1970-01-01T00:00:00.000Z"},
- {[]byte(nil), time.Unix(0, 0).UTC(), "1970-01-01T00:00:00.000Z"},
- }
-)
-
-func TestNewDateTime(t *testing.T) {
- assert.EqualValues(t, time.Unix(0, 0).UTC(), NewDateTime())
-}
-
-func TestParseDateTime_errorCases(t *testing.T) {
- _, err := ParseDateTime("yada")
- assert.Error(t, err)
-}
-
-// TestParseDateTime tests the full cycle:
-// parsing -> marshalling -> unmarshalling / scanning
-func TestParseDateTime_fullCycle(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- parsed, err := ParseDateTime(string(example.in))
- assert.NoError(t, err)
- assert.EqualValues(t, example.time, parsed)
- mt, err := parsed.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte(example.str), mt)
-
- pp := NewDateTime()
- err = pp.UnmarshalText(mt)
- assert.NoError(t, err)
- assert.EqualValues(t, example.time, pp)
-
- pp = NewDateTime()
- err = pp.Scan(mt)
- assert.NoError(t, err)
- assert.Equal(t, DateTime(example.time), pp)
- }
-}
-
-func TestDateTime_UnmarshalText_errorCases(t *testing.T) {
- pp := NewDateTime()
- err := pp.UnmarshalText([]byte("yada"))
- assert.Error(t, err)
- err = pp.UnmarshalJSON([]byte("yada"))
- assert.Error(t, err)
-}
-
-func TestDateTime_UnmarshalText(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- pp := NewDateTime()
- err := pp.UnmarshalText(example.in)
- assert.NoError(t, err)
- assert.EqualValues(t, example.time, pp)
- }
-}
-func TestDateTime_UnmarshalJSON(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- pp := NewDateTime()
- err := pp.UnmarshalJSON(esc(example.in))
- assert.NoError(t, err)
- assert.EqualValues(t, example.time, pp)
- }
-}
-
-func esc(v []byte) []byte {
- var buf bytes.Buffer
- buf.WriteByte('"')
- buf.Write(v)
- buf.WriteByte('"')
- return buf.Bytes()
-}
-
-func TestDateTime_MarshalText(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- dt := DateTime(example.time)
- mt, err := dt.MarshalText()
- assert.NoError(t, err)
- assert.Equal(t, []byte(example.str), mt)
- }
-}
-func TestDateTime_MarshalJSON(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- dt := DateTime(example.time)
- bb, err := dt.MarshalJSON()
- assert.NoError(t, err)
- assert.EqualValues(t, esc([]byte(example.str)), bb)
- }
-}
-
-func TestDateTime_Scan(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- pp := NewDateTime()
- err := pp.Scan(example.in)
- assert.NoError(t, err)
- assert.Equal(t, DateTime(example.time), pp)
- }
-}
-
-func TestDateTime_BSON(t *testing.T) {
- for caseNum, example := range testCases {
- t.Logf("Case #%d", caseNum)
- dt := DateTime(example.time)
-
- bsonData, err := bson.Marshal(&dt)
- assert.NoError(t, err)
-
- var dtCopy DateTime
- err = bson.Unmarshal(bsonData, &dtCopy)
- assert.NoError(t, err)
- assert.Equal(t, dt, dtCopy)
- }
-}