diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json index 943cc18cfc3..496c68dbfc7 100644 --- a/Godeps/Godeps.json +++ b/Godeps/Godeps.json @@ -181,8 +181,8 @@ }, { "ImportPath": "github.com/asaskevich/govalidator", - "Comment": "v4-12-g593d645", - "Rev": "593d64559f7600f29581a3ee42177f5dbded27a9" + "Comment": "v9-26-gf9ffefc", + "Rev": "f9ffefc3facfbe0caee3fea233cbb6e8208f4541" }, { "ImportPath": "github.com/aws/aws-sdk-go/aws", @@ -1342,157 +1342,157 @@ }, { "ImportPath": "github.com/docker/distribution/digestset", - "Comment": "v2.6.0-rc.1-209-gedc3ab2", + "Comment": "v2.6.0-rc.1-209-gedc3ab29", "Rev": "edc3ab29cdff8694dd6feb85cfeb4b5f1b38ed9c" }, { "ImportPath": "github.com/docker/distribution/reference", - "Comment": "v2.6.0-rc.1-209-gedc3ab2", + "Comment": "v2.6.0-rc.1-209-gedc3ab29", "Rev": "edc3ab29cdff8694dd6feb85cfeb4b5f1b38ed9c" }, { "ImportPath": "github.com/docker/docker/api", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/blkiodev", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/container", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/events", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/filters", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/image", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/mount", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/network", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/registry", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/strslice", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/swarm", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/swarm/runtime", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/time", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/versions", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/api/types/volume", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/client", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/daemon/logger/jsonfilelog/jsonlog", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/jsonmessage", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/mount", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/parsers", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/parsers/operatingsystem", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/stdcopy", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/sysinfo", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/term", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/docker/pkg/term/windows", - "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc", + "Comment": "docs-v1.12.0-rc4-2016-07-15-9510-ga9fbbdc8dd", "Rev": "a9fbbdc8dd8794b20af358382ab780559bca589d" }, { "ImportPath": "github.com/docker/go-connections/nat", - "Comment": "v0.3.0", + "Comment": "v0.2.1-30-g3ede32e", "Rev": "3ede32e2033de7505e6500d6c868c2b9ed9f169d" }, { "ImportPath": "github.com/docker/go-connections/sockets", - "Comment": "v0.3.0", + "Comment": "v0.2.1-30-g3ede32e", "Rev": "3ede32e2033de7505e6500d6c868c2b9ed9f169d" }, { "ImportPath": "github.com/docker/go-connections/tlsconfig", - "Comment": "v0.3.0", + "Comment": "v0.2.1-30-g3ede32e", "Rev": "3ede32e2033de7505e6500d6c868c2b9ed9f169d" }, { @@ -1502,7 +1502,7 @@ }, { "ImportPath": "github.com/docker/libnetwork/ipvs", - "Comment": "v0.8.0-dev.2-1265-ga9cd636", + "Comment": "v0.8.0-dev.2-1265-ga9cd636e", "Rev": "a9cd636e37898226332c439363e2ed0ea185ae92" }, { @@ -1565,6 +1565,16 @@ "Comment": "v1.0.0-4-gc7ce166", "Rev": "c7ce16629ff4cd059ed96ed06419dd3856fd3577" }, + { + "ImportPath": "github.com/globalsign/mgo/bson", + "Comment": "r2018.06.15-9-geeefdec", + "Rev": "eeefdecb41b842af6dc652aaea4026e8403e62df" + }, + { + "ImportPath": "github.com/globalsign/mgo/internal/json", + "Comment": "r2018.06.15-9-geeefdec", + "Rev": "eeefdecb41b842af6dc652aaea4026e8403e62df" + }, { "ImportPath": "github.com/go-ini/ini", "Comment": "v1.25.4", @@ -1572,43 +1582,58 @@ }, { "ImportPath": "github.com/go-openapi/analysis", - "Rev": "b44dc874b601d9e4e2f6e19140e794ba24bead3b" + "Comment": "v0.17.2", + "Rev": "c701774f4e604d952e4e8c56dee260be696e33c3" + }, + { + "ImportPath": "github.com/go-openapi/analysis/internal", + "Comment": "v0.17.2", + "Rev": "c701774f4e604d952e4e8c56dee260be696e33c3" }, { "ImportPath": "github.com/go-openapi/errors", - "Rev": "d24ebc2075bad502fac3a8ae27aa6dd58e1952dc" + "Comment": "v0.17.2", + "Rev": "d9664f9fab8994271e573ed69cf2adfc09b7a800" }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Comment": "v0.17.2", + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Comment": "v0.17.2", + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/loads", - "Rev": "a80dea3052f00e5f032e860dd7355cd0cc67e24d" + "Comment": "v0.17.2", + "Rev": "150d36912387ec2f607be674c5be309ddccc0eed" }, { "ImportPath": "github.com/go-openapi/runtime", - "Rev": "11e322eeecc1032d5a0a96c566ed53f2b5c26e22" + "Comment": "v0.17.2", + "Rev": "231d7876b7019dbcbfc97a7ba764379497b67c1d" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Comment": "v0.17.1", + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/strfmt", - "Rev": "d65c7fdb29eca313476e529628176fe17e58c488" + "Comment": "v0.17.0", + "Rev": "35fe47352985e13cc75f13120d70d26fd764ed51" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Comment": "v0.17.2", + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/go-openapi/validate", - "Rev": "d509235108fcf6ab4913d2dcb3a2260c0db2108e" + "Comment": "v0.17.1", + "Rev": "d2eab7d93009e9215fc85b2faa2c2f2a98c2af48" }, { "ImportPath": "github.com/go-sql-driver/mysql", @@ -2276,7 +2301,6 @@ }, { "ImportPath": "github.com/inconshreveable/mousetrap", - "Comment": "v1.0", "Rev": "76626ae9c91c4f2a10f34cad8ce83ea42c93bb75" }, { @@ -2418,7 +2442,7 @@ }, { "ImportPath": "github.com/matttproud/golang_protobuf_extensions/pbutil", - "Comment": "v1.0.1", + "Comment": "v1.0.0-2-gc12348c", "Rev": "c12348ce28de40eed0136aa2b644d0ee0650e56c" }, { @@ -2755,82 +2779,82 @@ }, { "ImportPath": "github.com/opencontainers/runc/libcontainer", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/apparmor", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups/fs", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/cgroups/systemd", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/configs", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/configs/validate", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/criurpc", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/intelrdt", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/keys", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/mount", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/seccomp", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/stacktrace", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/system", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/user", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { "ImportPath": "github.com/opencontainers/runc/libcontainer/utils", - "Comment": "v1.0.0-rc5-46-g871ba2e", + "Comment": "v1.0.0-rc5-46-g871ba2e5", "Rev": "871ba2e58e24314d1fab4517a80410191ba5ad01" }, { diff --git a/Godeps/LICENSES b/Godeps/LICENSES index 6b2ed80f0b6..633d9324776 100644 --- a/Godeps/LICENSES +++ b/Godeps/LICENSES @@ -45995,6 +45995,72 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ================================================================================ +================================================================================ += vendor/github.com/globalsign/mgo/bson licensed under: = + +mgo - MongoDB driver for Go + +Copyright (c) 2010-2013 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + += vendor/github.com/globalsign/mgo/LICENSE 566e96676859b5704130b80941bc9f1f +================================================================================ + + +================================================================================ += vendor/github.com/globalsign/mgo/internal/json licensed under: = + +mgo - MongoDB driver for Go + +Copyright (c) 2010-2013 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + += vendor/github.com/globalsign/mgo/LICENSE 566e96676859b5704130b80941bc9f1f +================================================================================ + + ================================================================================ = vendor/github.com/go-ini/ini licensed under: = @@ -46404,6 +46470,216 @@ third-party archives. ================================================================================ +================================================================================ += vendor/github.com/go-openapi/analysis/internal licensed under: = + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + += vendor/github.com/go-openapi/analysis/LICENSE 3b83ef96387f14655fc854ddc3c6bd57 +================================================================================ + + ================================================================================ = vendor/github.com/go-openapi/errors licensed under: = diff --git a/staging/src/k8s.io/apiextensions-apiserver/Godeps/Godeps.json b/staging/src/k8s.io/apiextensions-apiserver/Godeps/Godeps.json index fb5f1104f0c..44724d20aa9 100644 --- a/staging/src/k8s.io/apiextensions-apiserver/Godeps/Godeps.json +++ b/staging/src/k8s.io/apiextensions-apiserver/Godeps/Godeps.json @@ -32,7 +32,7 @@ }, { "ImportPath": "github.com/asaskevich/govalidator", - "Rev": "593d64559f7600f29581a3ee42177f5dbded27a9" + "Rev": "f9ffefc3facfbe0caee3fea233cbb6e8208f4541" }, { "ImportPath": "github.com/beorn7/perks/quantile", @@ -390,45 +390,57 @@ "ImportPath": "github.com/ghodss/yaml", "Rev": "c7ce16629ff4cd059ed96ed06419dd3856fd3577" }, + { + "ImportPath": "github.com/globalsign/mgo/bson", + "Rev": "eeefdecb41b842af6dc652aaea4026e8403e62df" + }, + { + "ImportPath": "github.com/globalsign/mgo/internal/json", + "Rev": "eeefdecb41b842af6dc652aaea4026e8403e62df" + }, { "ImportPath": "github.com/go-openapi/analysis", - "Rev": "b44dc874b601d9e4e2f6e19140e794ba24bead3b" + "Rev": "c701774f4e604d952e4e8c56dee260be696e33c3" + }, + { + "ImportPath": "github.com/go-openapi/analysis/internal", + "Rev": "c701774f4e604d952e4e8c56dee260be696e33c3" }, { "ImportPath": "github.com/go-openapi/errors", - "Rev": "d24ebc2075bad502fac3a8ae27aa6dd58e1952dc" + "Rev": "d9664f9fab8994271e573ed69cf2adfc09b7a800" }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/loads", - "Rev": "a80dea3052f00e5f032e860dd7355cd0cc67e24d" + "Rev": "150d36912387ec2f607be674c5be309ddccc0eed" }, { "ImportPath": "github.com/go-openapi/runtime", - "Rev": "11e322eeecc1032d5a0a96c566ed53f2b5c26e22" + "Rev": "231d7876b7019dbcbfc97a7ba764379497b67c1d" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/strfmt", - "Rev": "d65c7fdb29eca313476e529628176fe17e58c488" + "Rev": "35fe47352985e13cc75f13120d70d26fd764ed51" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/go-openapi/validate", - "Rev": "d509235108fcf6ab4913d2dcb3a2260c0db2108e" + "Rev": "d2eab7d93009e9215fc85b2faa2c2f2a98c2af48" }, { "ImportPath": "github.com/gogo/protobuf/gogoproto", @@ -574,6 +586,10 @@ "ImportPath": "github.com/matttproud/golang_protobuf_extensions/pbutil", "Rev": "c12348ce28de40eed0136aa2b644d0ee0650e56c" }, + { + "ImportPath": "github.com/mitchellh/mapstructure", + "Rev": "53818660ed4955e899c0bcafa97299a388bd7c8e" + }, { "ImportPath": "github.com/modern-go/concurrent", "Rev": "bacd9c7ef1dd9b15be4a9909b8ac7a4e313eec94" diff --git a/staging/src/k8s.io/apiserver/Godeps/Godeps.json b/staging/src/k8s.io/apiserver/Godeps/Godeps.json index 2be37f3daf9..4036defd350 100644 --- a/staging/src/k8s.io/apiserver/Godeps/Godeps.json +++ b/staging/src/k8s.io/apiserver/Godeps/Godeps.json @@ -392,19 +392,19 @@ }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/gogo/protobuf/gogoproto", diff --git a/staging/src/k8s.io/code-generator/Godeps/Godeps.json b/staging/src/k8s.io/code-generator/Godeps/Godeps.json index bdbb440ae2f..8d652919fe2 100644 --- a/staging/src/k8s.io/code-generator/Godeps/Godeps.json +++ b/staging/src/k8s.io/code-generator/Godeps/Godeps.json @@ -24,19 +24,19 @@ }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/gogo/protobuf/gogoproto", diff --git a/staging/src/k8s.io/kube-aggregator/Godeps/Godeps.json b/staging/src/k8s.io/kube-aggregator/Godeps/Godeps.json index b8791b9b70b..42d34776254 100644 --- a/staging/src/k8s.io/kube-aggregator/Godeps/Godeps.json +++ b/staging/src/k8s.io/kube-aggregator/Godeps/Godeps.json @@ -116,19 +116,19 @@ }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/gogo/protobuf/gogoproto", diff --git a/staging/src/k8s.io/sample-apiserver/Godeps/Godeps.json b/staging/src/k8s.io/sample-apiserver/Godeps/Godeps.json index 37e2e629e19..529e4e9678e 100644 --- a/staging/src/k8s.io/sample-apiserver/Godeps/Godeps.json +++ b/staging/src/k8s.io/sample-apiserver/Godeps/Godeps.json @@ -108,19 +108,19 @@ }, { "ImportPath": "github.com/go-openapi/jsonpointer", - "Rev": "46af16f9f7b149af66e5d1bd010e3574dc06de98" + "Rev": "ef5f0afec364d3b9396b7b77b43dbe26bf1f8004" }, { "ImportPath": "github.com/go-openapi/jsonreference", - "Rev": "13c6e3589ad90f49bd3e3bbe2c2cb3d7a4142272" + "Rev": "8483a886a90412cd6858df4ea3483dce9c8e35a3" }, { "ImportPath": "github.com/go-openapi/spec", - "Rev": "1de3e0542de65ad8d75452a595886fdd0befb363" + "Rev": "5bae59e25b21498baea7f9d46e9c147ec106a42e" }, { "ImportPath": "github.com/go-openapi/swag", - "Rev": "f3f9494671f93fcff853e3c6e9e948b3eb71e590" + "Rev": "5899d5c5e619fda5fa86e14795a835f473ca284c" }, { "ImportPath": "github.com/gogo/protobuf/gogoproto", diff --git a/vendor/BUILD b/vendor/BUILD index 509bca90f68..03ec272c0fb 100644 --- a/vendor/BUILD +++ b/vendor/BUILD @@ -193,6 +193,8 @@ filegroup( "//vendor/github.com/fatih/camelcase:all-srcs", "//vendor/github.com/fsnotify/fsnotify:all-srcs", "//vendor/github.com/ghodss/yaml:all-srcs", + "//vendor/github.com/globalsign/mgo/bson:all-srcs", + "//vendor/github.com/globalsign/mgo/internal/json:all-srcs", "//vendor/github.com/go-ini/ini:all-srcs", "//vendor/github.com/go-openapi/analysis:all-srcs", "//vendor/github.com/go-openapi/errors:all-srcs", diff --git a/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md new file mode 100644 index 00000000000..f0f7e3a8add --- /dev/null +++ b/vendor/github.com/asaskevich/govalidator/CONTRIBUTING.md @@ -0,0 +1,63 @@ +#### Support +If you do have a contribution to the package, feel free to create a Pull Request or an Issue. + +#### What to contribute +If you don't know what to do, there are some features and functions that need to be done + +- [ ] Refactor code +- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check +- [ ] Create actual list of contributors and projects that currently using this package +- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues) +- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions) +- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new +- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc +- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224) +- [ ] Implement fuzzing testing +- [ ] Implement some struct/map/array utilities +- [ ] Implement map/array validation +- [ ] Implement benchmarking +- [ ] Implement batch of examples +- [ ] Look at forks for new features and fixes + +#### Advice +Feel free to create what you want, but keep in mind when you implement new features: +- Code must be clear and readable, names of variables/constants clearly describes what they are doing +- Public functions must be documented and described in source file and added to README.md to the list of available functions +- There are must be unit-tests for any new functions and improvements + +## Financial contributions + +We also welcome financial contributions in full transparency on our [open collective](https://opencollective.com/govalidator). +Anyone can file an expense. If the expense makes sense for the development of the community, it will be "merged" in the ledger of our open collective by the core contributors and the person who filed the expense will be reimbursed. + + +## Credits + + +### Contributors + +Thank you to all the people who have already contributed to govalidator! + + + +### Backers + +Thank you to all our backers! [[Become a backer](https://opencollective.com/govalidator#backer)] + + + + +### Sponsors + +Thank you to all our sponsors! (please ask your company to also support this open source project by [becoming a sponsor](https://opencollective.com/govalidator#sponsor)) + + + + + + + + + + + \ No newline at end of file diff --git a/vendor/github.com/asaskevich/govalidator/README.md b/vendor/github.com/asaskevich/govalidator/README.md index 57d85fd410c..0e8793f719a 100644 --- a/vendor/github.com/asaskevich/govalidator/README.md +++ b/vendor/github.com/asaskevich/govalidator/README.md @@ -1,7 +1,7 @@ govalidator =========== [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/asaskevich/govalidator?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![GoDoc](https://godoc.org/github.com/asaskevich/govalidator?status.png)](https://godoc.org/github.com/asaskevich/govalidator) [![Coverage Status](https://img.shields.io/coveralls/asaskevich/govalidator.svg)](https://coveralls.io/r/asaskevich/govalidator?branch=master) [![wercker status](https://app.wercker.com/status/1ec990b09ea86c910d5f08b0e02c6043/s "wercker status")](https://app.wercker.com/project/bykey/1ec990b09ea86c910d5f08b0e02c6043) -[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) +[![Build Status](https://travis-ci.org/asaskevich/govalidator.svg?branch=master)](https://travis-ci.org/asaskevich/govalidator) [![Go Report Card](https://goreportcard.com/badge/github.com/asaskevich/govalidator)](https://goreportcard.com/report/github.com/asaskevich/govalidator) [![GoSearch](http://go-search.org/badge?id=github.com%2Fasaskevich%2Fgovalidator)](http://go-search.org/view?id=github.com%2Fasaskevich%2Fgovalidator) [![Backers on Open Collective](https://opencollective.com/govalidator/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/govalidator/sponsors/badge.svg)](#sponsors) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_shield) A package of validators and sanitizers for strings, structs and collections. Based on [validator.js](https://github.com/chriso/validator.js). @@ -33,6 +33,8 @@ import ( #### Activate behavior to require all fields have a validation tag by default `SetFieldsRequiredByDefault` causes validation to fail when struct fields do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). A good place to activate this is a package init function or the main() function. +`SetNilPtrAllowedByRequired` causes validation to pass when struct fields marked by `required` are set to nil. This is disabled by default for consistency, but some packages that need to be able to determine between `nil` and `zero value` state can use this. If disabled, both `nil` and `zero` values cause validation errors. + ```go import "github.com/asaskevich/govalidator" @@ -96,28 +98,27 @@ govalidator.CustomTypeTagMap.Set("customByteArrayValidator", CustomTypeValidator func Abs(value float64) float64 func BlackList(str, chars string) string func ByteLength(str string, params ...string) bool -func StringLength(str string, params ...string) bool -func StringMatches(s string, params ...string) bool func CamelCaseToUnderscore(str string) string func Contains(str, substring string) bool func Count(array []interface{}, iterator ConditionIterator) int func Each(array []interface{}, iterator Iterator) func ErrorByField(e error, field string) string +func ErrorsByField(e error) map[string]string func Filter(array []interface{}, iterator ConditionIterator) []interface{} func Find(array []interface{}, iterator ConditionIterator) interface{} func GetLine(s string, index int) (string, error) func GetLines(s string) []string -func IsHost(s string) bool func InRange(value, left, right float64) bool func IsASCII(str string) bool func IsAlpha(str string) bool func IsAlphanumeric(str string) bool func IsBase64(str string) bool func IsByteLength(str string, min, max int) bool +func IsCIDR(str string) bool func IsCreditCard(str string) bool +func IsDNSName(str string) bool func IsDataURI(str string) bool func IsDialString(str string) bool -func IsDNSName(str string) bool func IsDivisibleBy(str, num string) bool func IsEmail(str string) bool func IsFilePath(str string) (bool, int) @@ -126,6 +127,7 @@ func IsFullWidth(str string) bool func IsHalfWidth(str string) bool func IsHexadecimal(str string) bool func IsHexcolor(str string) bool +func IsHost(str string) bool func IsIP(str string) bool func IsIPv4(str string) bool func IsIPv6(str string) bool @@ -134,6 +136,10 @@ func IsISBN10(str string) bool func IsISBN13(str string) bool func IsISO3166Alpha2(str string) bool func IsISO3166Alpha3(str string) bool +func IsISO693Alpha2(str string) bool +func IsISO693Alpha3b(str string) bool +func IsISO4217(str string) bool +func IsIn(str string, params ...string) bool func IsInt(str string) bool func IsJSON(str string) bool func IsLatitude(str string) bool @@ -151,11 +157,14 @@ func IsNumeric(str string) bool func IsPort(str string) bool func IsPositive(value float64) bool func IsPrintableASCII(str string) bool +func IsRFC3339(str string) bool +func IsRFC3339WithoutZone(str string) bool func IsRGBcolor(str string) bool func IsRequestURI(rawurl string) bool func IsRequestURL(rawurl string) bool func IsSSN(str string) bool func IsSemver(str string) bool +func IsTime(str string, format string) bool func IsURL(str string) bool func IsUTFDigit(str string) bool func IsUTFLetter(str string) bool @@ -172,12 +181,20 @@ func LeftTrim(str, chars string) string func Map(array []interface{}, iterator ResultIterator) []interface{} func Matches(str, pattern string) bool func NormalizeEmail(str string) (string, error) +func PadBoth(str string, padStr string, padLen int) string +func PadLeft(str string, padStr string, padLen int) string +func PadRight(str string, padStr string, padLen int) string +func Range(str string, params ...string) bool func RemoveTags(s string) string func ReplacePattern(str, pattern, replace string) string func Reverse(s string) string func RightTrim(str, chars string) string +func RuneLength(str string, params ...string) bool func SafeFileName(str string) string +func SetFieldsRequiredByDefault(value bool) func Sign(value float64) float64 +func StringLength(str string, params ...string) bool +func StringMatches(s string, params ...string) bool func StripLow(str string, keepNewLines bool) string func ToBoolean(str string) (bool, error) func ToFloat(str string) (float64, error) @@ -190,10 +207,12 @@ func UnderscoreToCamelCase(s string) string func ValidateStruct(s interface{}) (bool, error) func WhiteList(str, chars string) string type ConditionIterator +type CustomTypeValidator type Error func (e Error) Error() string type Errors func (es Errors) Error() string +func (es Errors) Errors() []error type ISO3166Entry type Iterator type ParamValidator @@ -253,59 +272,66 @@ For completely custom validators (interface-based), see below. Here is a list of available validators for struct fields (validator - used function): ```go -"alpha": IsAlpha, -"alphanum": IsAlphanumeric, -"ascii": IsASCII, -"base64": IsBase64, -"creditcard": IsCreditCard, -"datauri": IsDataURI, -"dialstring": IsDialString, -"dns": IsDNSName, -"email": IsEmail, -"float": IsFloat, -"fullwidth": IsFullWidth, -"halfwidth": IsHalfWidth, -"hexadecimal": IsHexadecimal, -"hexcolor": IsHexcolor, -"host": IsHost, -"int": IsInt, -"ip": IsIP, -"ipv4": IsIPv4, -"ipv6": IsIPv6, -"isbn10": IsISBN10, -"isbn13": IsISBN13, -"json": IsJSON, -"latitude": IsLatitude, -"longitude": IsLongitude, -"lowercase": IsLowerCase, -"mac": IsMAC, -"multibyte": IsMultibyte, -"null": IsNull, -"numeric": IsNumeric, -"port": IsPort, -"printableascii": IsPrintableASCII, -"requri": IsRequestURI, -"requrl": IsRequestURL, -"rgbcolor": IsRGBcolor, -"ssn": IsSSN, -"semver": IsSemver, -"uppercase": IsUpperCase, -"url": IsURL, -"utfdigit": IsUTFDigit, -"utfletter": IsUTFLetter, -"utfletternum": IsUTFLetterNumeric, -"utfnumeric": IsUTFNumeric, -"uuid": IsUUID, -"uuidv3": IsUUIDv3, -"uuidv4": IsUUIDv4, -"uuidv5": IsUUIDv5, -"variablewidth": IsVariableWidth, +"email": IsEmail, +"url": IsURL, +"dialstring": IsDialString, +"requrl": IsRequestURL, +"requri": IsRequestURI, +"alpha": IsAlpha, +"utfletter": IsUTFLetter, +"alphanum": IsAlphanumeric, +"utfletternum": IsUTFLetterNumeric, +"numeric": IsNumeric, +"utfnumeric": IsUTFNumeric, +"utfdigit": IsUTFDigit, +"hexadecimal": IsHexadecimal, +"hexcolor": IsHexcolor, +"rgbcolor": IsRGBcolor, +"lowercase": IsLowerCase, +"uppercase": IsUpperCase, +"int": IsInt, +"float": IsFloat, +"null": IsNull, +"uuid": IsUUID, +"uuidv3": IsUUIDv3, +"uuidv4": IsUUIDv4, +"uuidv5": IsUUIDv5, +"creditcard": IsCreditCard, +"isbn10": IsISBN10, +"isbn13": IsISBN13, +"json": IsJSON, +"multibyte": IsMultibyte, +"ascii": IsASCII, +"printableascii": IsPrintableASCII, +"fullwidth": IsFullWidth, +"halfwidth": IsHalfWidth, +"variablewidth": IsVariableWidth, +"base64": IsBase64, +"datauri": IsDataURI, +"ip": IsIP, +"port": IsPort, +"ipv4": IsIPv4, +"ipv6": IsIPv6, +"dns": IsDNSName, +"host": IsHost, +"mac": IsMAC, +"latitude": IsLatitude, +"longitude": IsLongitude, +"ssn": IsSSN, +"semver": IsSemver, +"rfc3339": IsRFC3339, +"rfc3339WithoutZone": IsRFC3339WithoutZone, +"ISO3166Alpha2": IsISO3166Alpha2, +"ISO3166Alpha3": IsISO3166Alpha3, ``` Validators with parameters ```go +"range(min|max)": Range, "length(min|max)": ByteLength, +"runelength(min|max)": RuneLength, "matches(pattern)": StringMatches, +"in(string1|string2|...|stringN)": IsIn, ``` And here is small example of usage: @@ -382,12 +408,50 @@ govalidator.CustomTypeTagMap.Set("customMinLengthValidator", CustomTypeValidator })) ``` +###### Custom error messages +Custom error messages are supported via annotations by adding the `~` separator - here's an example of how to use it: +```go +type Ticket struct { + Id int64 `json:"id"` + FirstName string `json:"firstname" valid:"required~First name is blank"` +} +``` + #### Notes Documentation is available here: [godoc.org](https://godoc.org/github.com/asaskevich/govalidator). Full information about code coverage is also available here: [govalidator on gocover.io](http://gocover.io/github.com/asaskevich/govalidator). #### Support -If you do have a contribution for the package feel free to put up a Pull Request or open Issue. +If you do have a contribution to the package, feel free to create a Pull Request or an Issue. + +#### What to contribute +If you don't know what to do, there are some features and functions that need to be done + +- [ ] Refactor code +- [ ] Edit docs and [README](https://github.com/asaskevich/govalidator/README.md): spellcheck, grammar and typo check +- [ ] Create actual list of contributors and projects that currently using this package +- [ ] Resolve [issues and bugs](https://github.com/asaskevich/govalidator/issues) +- [ ] Update actual [list of functions](https://github.com/asaskevich/govalidator#list-of-functions) +- [ ] Update [list of validators](https://github.com/asaskevich/govalidator#validatestruct-2) that available for `ValidateStruct` and add new +- [ ] Implement new validators: `IsFQDN`, `IsIMEI`, `IsPostalCode`, `IsISIN`, `IsISRC` etc +- [ ] Implement [validation by maps](https://github.com/asaskevich/govalidator/issues/224) +- [ ] Implement fuzzing testing +- [ ] Implement some struct/map/array utilities +- [ ] Implement map/array validation +- [ ] Implement benchmarking +- [ ] Implement batch of examples +- [ ] Look at forks for new features and fixes + +#### Advice +Feel free to create what you want, but keep in mind when you implement new features: +- Code must be clear and readable, names of variables/constants clearly describes what they are doing +- Public functions must be documented and described in source file and added to README.md to the list of available functions +- There are must be unit-tests for any new functions and improvements + +## Credits +### Contributors + +This project exists thanks to all the people who contribute. [[Contribute](CONTRIBUTING.md)]. #### Special thanks to [contributors](https://github.com/asaskevich/govalidator/graphs/contributors) * [Daniel Lohse](https://github.com/annismckenzie) @@ -399,3 +463,34 @@ If you do have a contribution for the package feel free to put up a Pull Request * [Nathan Davies](https://github.com/nathj07) * [Matt Sanford](https://github.com/mzsanford) * [Simon ccl1115](https://github.com/ccl1115) + + + + +### Backers + +Thank you to all our backers! 🙏 [[Become a backer](https://opencollective.com/govalidator#backer)] + + + + +### Sponsors + +Support this project by becoming a sponsor. Your logo will show up here with a link to your website. [[Become a sponsor](https://opencollective.com/govalidator#sponsor)] + + + + + + + + + + + + + + + +## License +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2Fasaskevich%2Fgovalidator?ref=badge_large) \ No newline at end of file diff --git a/vendor/github.com/asaskevich/govalidator/converter.go b/vendor/github.com/asaskevich/govalidator/converter.go index 737a1f1799b..cf1e5d569ba 100644 --- a/vendor/github.com/asaskevich/govalidator/converter.go +++ b/vendor/github.com/asaskevich/govalidator/converter.go @@ -3,6 +3,7 @@ package govalidator import ( "encoding/json" "fmt" + "reflect" "strconv" ) @@ -30,20 +31,34 @@ func ToFloat(str string) (float64, error) { return res, err } -// ToInt convert the input string to an integer, or 0 if the input is not an integer. -func ToInt(str string) (int64, error) { - res, err := strconv.ParseInt(str, 0, 64) - if err != nil { +// ToInt convert the input string or any int type to an integer type 64, or 0 if the input is not an integer. +func ToInt(value interface{}) (res int64, err error) { + val := reflect.ValueOf(value) + + switch value.(type) { + case int, int8, int16, int32, int64: + res = val.Int() + case uint, uint8, uint16, uint32, uint64: + res = int64(val.Uint()) + case string: + if IsInt(val.String()) { + res, err = strconv.ParseInt(val.String(), 0, 64) + if err != nil { + res = 0 + } + } else { + err = fmt.Errorf("math: square root of negative number %g", value) + res = 0 + } + default: + err = fmt.Errorf("math: square root of negative number %g", value) res = 0 } - return res, err + + return } // ToBoolean convert the input string to a boolean. func ToBoolean(str string) (bool, error) { - res, err := strconv.ParseBool(str) - if err != nil { - res = false - } - return res, err + return strconv.ParseBool(str) } diff --git a/vendor/github.com/asaskevich/govalidator/error.go b/vendor/github.com/asaskevich/govalidator/error.go index 280b1c455d1..655b750cb8f 100644 --- a/vendor/github.com/asaskevich/govalidator/error.go +++ b/vendor/github.com/asaskevich/govalidator/error.go @@ -1,5 +1,7 @@ package govalidator +import "strings" + // Errors is an array of multiple errors and conforms to the error interface. type Errors []error @@ -9,11 +11,11 @@ func (es Errors) Errors() []error { } func (es Errors) Error() string { - var err string + var errs []string for _, e := range es { - err += e.Error() + ";" + errs = append(errs, e.Error()) } - return err + return strings.Join(errs, ";") } // Error encapsulates a name, an error and whether there's a custom error message or not. @@ -21,11 +23,21 @@ type Error struct { Name string Err error CustomErrorMessageExists bool + + // Validator indicates the name of the validator that failed + Validator string + Path []string } func (e Error) Error() string { if e.CustomErrorMessageExists { return e.Err.Error() } - return e.Name + ": " + e.Err.Error() + + errName := e.Name + if len(e.Path) > 0 { + errName = strings.Join(append(e.Path, e.Name), ".") + } + + return errName + ": " + e.Err.Error() } diff --git a/vendor/github.com/asaskevich/govalidator/numerics.go b/vendor/github.com/asaskevich/govalidator/numerics.go index 737cd47ca69..7e6c652e140 100644 --- a/vendor/github.com/asaskevich/govalidator/numerics.go +++ b/vendor/github.com/asaskevich/govalidator/numerics.go @@ -1,10 +1,13 @@ package govalidator -import "math" +import ( + "math" + "reflect" +) // Abs returns absolute value of number func Abs(value float64) float64 { - return value * Sign(value) + return math.Abs(value) } // Sign returns signum of number: 1 in case of value > 0, -1 in case of value < 0, 0 otherwise @@ -39,16 +42,53 @@ func IsNonPositive(value float64) bool { } // InRange returns true if value lies between left and right border -func InRange(value, left, right float64) bool { +func InRangeInt(value, left, right interface{}) bool { + value64, _ := ToInt(value) + left64, _ := ToInt(left) + right64, _ := ToInt(right) + if left64 > right64 { + left64, right64 = right64, left64 + } + return value64 >= left64 && value64 <= right64 +} + +// InRange returns true if value lies between left and right border +func InRangeFloat32(value, left, right float32) bool { if left > right { left, right = right, left } return value >= left && value <= right } +// InRange returns true if value lies between left and right border +func InRangeFloat64(value, left, right float64) bool { + if left > right { + left, right = right, left + } + return value >= left && value <= right +} + +// InRange returns true if value lies between left and right border, generic type to handle int, float32 or float64, all types must the same type +func InRange(value interface{}, left interface{}, right interface{}) bool { + + reflectValue := reflect.TypeOf(value).Kind() + reflectLeft := reflect.TypeOf(left).Kind() + reflectRight := reflect.TypeOf(right).Kind() + + if reflectValue == reflect.Int && reflectLeft == reflect.Int && reflectRight == reflect.Int { + return InRangeInt(value.(int), left.(int), right.(int)) + } else if reflectValue == reflect.Float32 && reflectLeft == reflect.Float32 && reflectRight == reflect.Float32 { + return InRangeFloat32(value.(float32), left.(float32), right.(float32)) + } else if reflectValue == reflect.Float64 && reflectLeft == reflect.Float64 && reflectRight == reflect.Float64 { + return InRangeFloat64(value.(float64), left.(float64), right.(float64)) + } else { + return false + } +} + // IsWhole returns true if value is whole number func IsWhole(value float64) bool { - return Abs(math.Remainder(value, 1)) == 0 + return math.Remainder(value, 1) == 0 } // IsNatural returns true if value is natural number (positive and whole) diff --git a/vendor/github.com/asaskevich/govalidator/patterns.go b/vendor/github.com/asaskevich/govalidator/patterns.go index 8761224cd34..61a05d438e1 100644 --- a/vendor/github.com/asaskevich/govalidator/patterns.go +++ b/vendor/github.com/asaskevich/govalidator/patterns.go @@ -4,38 +4,49 @@ import "regexp" // Basic regular expressions for validating strings const ( - Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" - CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$" - ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$" - ISBN13 string = "^(?:[0-9]{13})$" - UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$" - UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" - UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" - UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" - Alpha string = "^[a-zA-Z]+$" - Alphanumeric string = "^[a-zA-Z0-9]+$" - Numeric string = "^[-+]?[0-9]+$" - Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$" - Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$" - Hexadecimal string = "^[0-9a-fA-F]+$" - Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$" - RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$" - ASCII string = "^[\x00-\x7F]+$" - Multibyte string = "[^\x00-\x7F]" - FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" - HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" - Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$" - PrintableASCII string = "^[\x20-\x7E]+$" - DataURI string = "^data:.+\\/(.+);base64$" - Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$" - Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$" - DNSName string = `^([a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62}){1}(\.[a-zA-Z0-9]{1}[a-zA-Z0-9_-]{1,62})*$` - URL string = `^((ftp|https?):\/\/)?(\S+(:\S*)?@)?((([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))|(([a-zA-Z0-9]([a-zA-Z0-9-]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|((www\.)?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))(:(\d{1,5}))?((\/|\?|#)[^\s]*)?$` - SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$` - WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$` - UnixPath string = `^((?:\/[a-zA-Z0-9\.\:]+(?:_[a-zA-Z0-9\:\.]+)*(?:\-[\:a-zA-Z0-9\.]+)*)+\/?)$` - Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$" - tagName string = "valid" + Email string = "^(((([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+(\\.([a-zA-Z]|\\d|[!#\\$%&'\\*\\+\\-\\/=\\?\\^_`{\\|}~]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])+)*)|((\\x22)((((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(([\\x01-\\x08\\x0b\\x0c\\x0e-\\x1f\\x7f]|\\x21|[\\x23-\\x5b]|[\\x5d-\\x7e]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(\\([\\x01-\\x09\\x0b\\x0c\\x0d-\\x7f]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}]))))*(((\\x20|\\x09)*(\\x0d\\x0a))?(\\x20|\\x09)+)?(\\x22)))@((([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|\\.|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|\\d|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.)+(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])|(([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])([a-zA-Z]|\\d|-|_|~|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])*([a-zA-Z]|[\\x{00A0}-\\x{D7FF}\\x{F900}-\\x{FDCF}\\x{FDF0}-\\x{FFEF}])))\\.?$" + CreditCard string = "^(?:4[0-9]{12}(?:[0-9]{3})?|5[1-5][0-9]{14}|6(?:011|5[0-9][0-9])[0-9]{12}|3[47][0-9]{13}|3(?:0[0-5]|[68][0-9])[0-9]{11}|(?:2131|1800|35\\d{3})\\d{11})$" + ISBN10 string = "^(?:[0-9]{9}X|[0-9]{10})$" + ISBN13 string = "^(?:[0-9]{13})$" + UUID3 string = "^[0-9a-f]{8}-[0-9a-f]{4}-3[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$" + UUID4 string = "^[0-9a-f]{8}-[0-9a-f]{4}-4[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" + UUID5 string = "^[0-9a-f]{8}-[0-9a-f]{4}-5[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$" + UUID string = "^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$" + Alpha string = "^[a-zA-Z]+$" + Alphanumeric string = "^[a-zA-Z0-9]+$" + Numeric string = "^[0-9]+$" + Int string = "^(?:[-+]?(?:0|[1-9][0-9]*))$" + Float string = "^(?:[-+]?(?:[0-9]+))?(?:\\.[0-9]*)?(?:[eE][\\+\\-]?(?:[0-9]+))?$" + Hexadecimal string = "^[0-9a-fA-F]+$" + Hexcolor string = "^#?([0-9a-fA-F]{3}|[0-9a-fA-F]{6})$" + RGBcolor string = "^rgb\\(\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*,\\s*(0|[1-9]\\d?|1\\d\\d?|2[0-4]\\d|25[0-5])\\s*\\)$" + ASCII string = "^[\x00-\x7F]+$" + Multibyte string = "[^\x00-\x7F]" + FullWidth string = "[^\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" + HalfWidth string = "[\u0020-\u007E\uFF61-\uFF9F\uFFA0-\uFFDC\uFFE8-\uFFEE0-9a-zA-Z]" + Base64 string = "^(?:[A-Za-z0-9+\\/]{4})*(?:[A-Za-z0-9+\\/]{2}==|[A-Za-z0-9+\\/]{3}=|[A-Za-z0-9+\\/]{4})$" + PrintableASCII string = "^[\x20-\x7E]+$" + DataURI string = "^data:.+\\/(.+);base64$" + Latitude string = "^[-+]?([1-8]?\\d(\\.\\d+)?|90(\\.0+)?)$" + Longitude string = "^[-+]?(180(\\.0+)?|((1[0-7]\\d)|([1-9]?\\d))(\\.\\d+)?)$" + DNSName string = `^([a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62}){1}(\.[a-zA-Z0-9_]{1}[a-zA-Z0-9_-]{0,62})*[\._]?$` + IP string = `(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))` + URLSchema string = `((ftp|tcp|udp|wss?|https?):\/\/)` + URLUsername string = `(\S+(:\S*)?@)` + URLPath string = `((\/|\?|#)[^\s]*)` + URLPort string = `(:(\d{1,5}))` + URLIP string = `([1-9]\d?|1\d\d|2[01]\d|22[0-3])(\.(1?\d{1,2}|2[0-4]\d|25[0-5])){2}(?:\.([0-9]\d?|1\d\d|2[0-4]\d|25[0-4]))` + URLSubdomain string = `((www\.)|([a-zA-Z0-9]+([-_\.]?[a-zA-Z0-9])*[a-zA-Z0-9]\.[a-zA-Z0-9]+))` + URL string = `^` + URLSchema + `?` + URLUsername + `?` + `((` + URLIP + `|(\[` + IP + `\])|(([a-zA-Z0-9]([a-zA-Z0-9-_]+)?[a-zA-Z0-9]([-\.][a-zA-Z0-9]+)*)|(` + URLSubdomain + `?))?(([a-zA-Z\x{00a1}-\x{ffff}0-9]+-?-?)*[a-zA-Z\x{00a1}-\x{ffff}0-9]+)(?:\.([a-zA-Z\x{00a1}-\x{ffff}]{1,}))?))\.?` + URLPort + `?` + URLPath + `?$` + SSN string = `^\d{3}[- ]?\d{2}[- ]?\d{4}$` + WinPath string = `^[a-zA-Z]:\\(?:[^\\/:*?"<>|\r\n]+\\)*[^\\/:*?"<>|\r\n]*$` + UnixPath string = `^(/[^/\x00]*)+/?$` + Semver string = "^v?(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)\\.(?:0|[1-9]\\d*)(-(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*)(\\.(0|[1-9]\\d*|\\d*[a-zA-Z-][0-9a-zA-Z-]*))*)?(\\+[0-9a-zA-Z-]+(\\.[0-9a-zA-Z-]+)*)?$" + tagName string = "valid" + hasLowerCase string = ".*[[:lower:]]" + hasUpperCase string = ".*[[:upper:]]" + hasWhitespace string = ".*[[:space:]]" + hasWhitespaceOnly string = "^[[:space:]]+$" ) // Used by IsFilePath func @@ -49,35 +60,42 @@ const ( ) var ( - rxEmail = regexp.MustCompile(Email) - rxCreditCard = regexp.MustCompile(CreditCard) - rxISBN10 = regexp.MustCompile(ISBN10) - rxISBN13 = regexp.MustCompile(ISBN13) - rxUUID3 = regexp.MustCompile(UUID3) - rxUUID4 = regexp.MustCompile(UUID4) - rxUUID5 = regexp.MustCompile(UUID5) - rxUUID = regexp.MustCompile(UUID) - rxAlpha = regexp.MustCompile(Alpha) - rxAlphanumeric = regexp.MustCompile(Alphanumeric) - rxNumeric = regexp.MustCompile(Numeric) - rxInt = regexp.MustCompile(Int) - rxFloat = regexp.MustCompile(Float) - rxHexadecimal = regexp.MustCompile(Hexadecimal) - rxHexcolor = regexp.MustCompile(Hexcolor) - rxRGBcolor = regexp.MustCompile(RGBcolor) - rxASCII = regexp.MustCompile(ASCII) - rxPrintableASCII = regexp.MustCompile(PrintableASCII) - rxMultibyte = regexp.MustCompile(Multibyte) - rxFullWidth = regexp.MustCompile(FullWidth) - rxHalfWidth = regexp.MustCompile(HalfWidth) - rxBase64 = regexp.MustCompile(Base64) - rxDataURI = regexp.MustCompile(DataURI) - rxLatitude = regexp.MustCompile(Latitude) - rxLongitude = regexp.MustCompile(Longitude) - rxDNSName = regexp.MustCompile(DNSName) - rxURL = regexp.MustCompile(URL) - rxSSN = regexp.MustCompile(SSN) - rxWinPath = regexp.MustCompile(WinPath) - rxUnixPath = regexp.MustCompile(UnixPath) - rxSemver = regexp.MustCompile(Semver) + userRegexp = regexp.MustCompile("^[a-zA-Z0-9!#$%&'*+/=?^_`{|}~.-]+$") + hostRegexp = regexp.MustCompile("^[^\\s]+\\.[^\\s]+$") + userDotRegexp = regexp.MustCompile("(^[.]{1})|([.]{1}$)|([.]{2,})") + rxEmail = regexp.MustCompile(Email) + rxCreditCard = regexp.MustCompile(CreditCard) + rxISBN10 = regexp.MustCompile(ISBN10) + rxISBN13 = regexp.MustCompile(ISBN13) + rxUUID3 = regexp.MustCompile(UUID3) + rxUUID4 = regexp.MustCompile(UUID4) + rxUUID5 = regexp.MustCompile(UUID5) + rxUUID = regexp.MustCompile(UUID) + rxAlpha = regexp.MustCompile(Alpha) + rxAlphanumeric = regexp.MustCompile(Alphanumeric) + rxNumeric = regexp.MustCompile(Numeric) + rxInt = regexp.MustCompile(Int) + rxFloat = regexp.MustCompile(Float) + rxHexadecimal = regexp.MustCompile(Hexadecimal) + rxHexcolor = regexp.MustCompile(Hexcolor) + rxRGBcolor = regexp.MustCompile(RGBcolor) + rxASCII = regexp.MustCompile(ASCII) + rxPrintableASCII = regexp.MustCompile(PrintableASCII) + rxMultibyte = regexp.MustCompile(Multibyte) + rxFullWidth = regexp.MustCompile(FullWidth) + rxHalfWidth = regexp.MustCompile(HalfWidth) + rxBase64 = regexp.MustCompile(Base64) + rxDataURI = regexp.MustCompile(DataURI) + rxLatitude = regexp.MustCompile(Latitude) + rxLongitude = regexp.MustCompile(Longitude) + rxDNSName = regexp.MustCompile(DNSName) + rxURL = regexp.MustCompile(URL) + rxSSN = regexp.MustCompile(SSN) + rxWinPath = regexp.MustCompile(WinPath) + rxUnixPath = regexp.MustCompile(UnixPath) + rxSemver = regexp.MustCompile(Semver) + rxHasLowerCase = regexp.MustCompile(hasLowerCase) + rxHasUpperCase = regexp.MustCompile(hasUpperCase) + rxHasWhitespace = regexp.MustCompile(hasWhitespace) + rxHasWhitespaceOnly = regexp.MustCompile(hasWhitespaceOnly) ) diff --git a/vendor/github.com/asaskevich/govalidator/types.go b/vendor/github.com/asaskevich/govalidator/types.go index b7156127623..4f7e9274ade 100644 --- a/vendor/github.com/asaskevich/govalidator/types.go +++ b/vendor/github.com/asaskevich/govalidator/types.go @@ -3,6 +3,7 @@ package govalidator import ( "reflect" "regexp" + "sort" "sync" ) @@ -15,7 +16,26 @@ type CustomTypeValidator func(i interface{}, o interface{}) bool // ParamValidator is a wrapper for validator functions that accepts additional parameters. type ParamValidator func(str string, params ...string) bool -type tagOptionsMap map[string]string +type tagOptionsMap map[string]tagOption + +func (t tagOptionsMap) orderedKeys() []string { + var keys []string + for k := range t { + keys = append(keys, k) + } + + sort.Slice(keys, func(a, b int) bool { + return t[keys[a]].order < t[keys[b]].order + }) + + return keys +} + +type tagOption struct { + name string + customErrorMessage string + order int +} // UnsupportedTypeError is a wrapper for reflect.Type type UnsupportedTypeError struct { @@ -29,15 +49,23 @@ type stringValues []reflect.Value // ParamTagMap is a map of functions accept variants parameters var ParamTagMap = map[string]ParamValidator{ "length": ByteLength, + "range": Range, + "runelength": RuneLength, "stringlength": StringLength, "matches": StringMatches, + "in": isInRaw, + "rsapub": IsRsaPub, } // ParamTagRegexMap maps param tags to their respective regexes. var ParamTagRegexMap = map[string]*regexp.Regexp{ + "range": regexp.MustCompile("^range\\((\\d+)\\|(\\d+)\\)$"), "length": regexp.MustCompile("^length\\((\\d+)\\|(\\d+)\\)$"), + "runelength": regexp.MustCompile("^runelength\\((\\d+)\\|(\\d+)\\)$"), "stringlength": regexp.MustCompile("^stringlength\\((\\d+)\\|(\\d+)\\)$"), - "matches": regexp.MustCompile(`matches\(([^)]+)\)`), + "in": regexp.MustCompile(`^in\((.*)\)`), + "matches": regexp.MustCompile(`^matches\((.+)\)$`), + "rsapub": regexp.MustCompile("^rsapub\\((\\d+)\\)$"), } type customTypeTagMap struct { @@ -66,53 +94,58 @@ var CustomTypeTagMap = &customTypeTagMap{validators: make(map[string]CustomTypeV // TagMap is a map of functions, that can be used as tags for ValidateStruct function. var TagMap = map[string]Validator{ - "email": IsEmail, - "url": IsURL, - "dialstring": IsDialString, - "requrl": IsRequestURL, - "requri": IsRequestURI, - "alpha": IsAlpha, - "utfletter": IsUTFLetter, - "alphanum": IsAlphanumeric, - "utfletternum": IsUTFLetterNumeric, - "numeric": IsNumeric, - "utfnumeric": IsUTFNumeric, - "utfdigit": IsUTFDigit, - "hexadecimal": IsHexadecimal, - "hexcolor": IsHexcolor, - "rgbcolor": IsRGBcolor, - "lowercase": IsLowerCase, - "uppercase": IsUpperCase, - "int": IsInt, - "float": IsFloat, - "null": IsNull, - "uuid": IsUUID, - "uuidv3": IsUUIDv3, - "uuidv4": IsUUIDv4, - "uuidv5": IsUUIDv5, - "creditcard": IsCreditCard, - "isbn10": IsISBN10, - "isbn13": IsISBN13, - "json": IsJSON, - "multibyte": IsMultibyte, - "ascii": IsASCII, - "printableascii": IsPrintableASCII, - "fullwidth": IsFullWidth, - "halfwidth": IsHalfWidth, - "variablewidth": IsVariableWidth, - "base64": IsBase64, - "datauri": IsDataURI, - "ip": IsIP, - "port": IsPort, - "ipv4": IsIPv4, - "ipv6": IsIPv6, - "dns": IsDNSName, - "host": IsHost, - "mac": IsMAC, - "latitude": IsLatitude, - "longitude": IsLongitude, - "ssn": IsSSN, - "semver": IsSemver, + "email": IsEmail, + "url": IsURL, + "dialstring": IsDialString, + "requrl": IsRequestURL, + "requri": IsRequestURI, + "alpha": IsAlpha, + "utfletter": IsUTFLetter, + "alphanum": IsAlphanumeric, + "utfletternum": IsUTFLetterNumeric, + "numeric": IsNumeric, + "utfnumeric": IsUTFNumeric, + "utfdigit": IsUTFDigit, + "hexadecimal": IsHexadecimal, + "hexcolor": IsHexcolor, + "rgbcolor": IsRGBcolor, + "lowercase": IsLowerCase, + "uppercase": IsUpperCase, + "int": IsInt, + "float": IsFloat, + "null": IsNull, + "uuid": IsUUID, + "uuidv3": IsUUIDv3, + "uuidv4": IsUUIDv4, + "uuidv5": IsUUIDv5, + "creditcard": IsCreditCard, + "isbn10": IsISBN10, + "isbn13": IsISBN13, + "json": IsJSON, + "multibyte": IsMultibyte, + "ascii": IsASCII, + "printableascii": IsPrintableASCII, + "fullwidth": IsFullWidth, + "halfwidth": IsHalfWidth, + "variablewidth": IsVariableWidth, + "base64": IsBase64, + "datauri": IsDataURI, + "ip": IsIP, + "port": IsPort, + "ipv4": IsIPv4, + "ipv6": IsIPv6, + "dns": IsDNSName, + "host": IsHost, + "mac": IsMAC, + "latitude": IsLatitude, + "longitude": IsLongitude, + "ssn": IsSSN, + "semver": IsSemver, + "rfc3339": IsRFC3339, + "rfc3339WithoutZone": IsRFC3339WithoutZone, + "ISO3166Alpha2": IsISO3166Alpha2, + "ISO3166Alpha3": IsISO3166Alpha3, + "ISO4217": IsISO4217, } // ISO3166Entry stores country codes @@ -376,3 +409,228 @@ var ISO3166List = []ISO3166Entry{ {"Yemen", "Yémen (le)", "YE", "YEM", "887"}, {"Zambia", "Zambie (la)", "ZM", "ZMB", "894"}, } + +// ISO4217List is the list of ISO currency codes +var ISO4217List = []string{ + "AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", + "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BOV", "BRL", "BSD", "BTN", "BWP", "BYN", "BZD", + "CAD", "CDF", "CHE", "CHF", "CHW", "CLF", "CLP", "CNY", "COP", "COU", "CRC", "CUC", "CUP", "CVE", "CZK", + "DJF", "DKK", "DOP", "DZD", + "EGP", "ERN", "ETB", "EUR", + "FJD", "FKP", + "GBP", "GEL", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", + "HKD", "HNL", "HRK", "HTG", "HUF", + "IDR", "ILS", "INR", "IQD", "IRR", "ISK", + "JMD", "JOD", "JPY", + "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", + "LAK", "LBP", "LKR", "LRD", "LSL", "LYD", + "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MXV", "MYR", "MZN", + "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", + "OMR", + "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", + "QAR", + "RON", "RSD", "RUB", "RWF", + "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SRD", "SSP", "STD", "SVC", "SYP", "SZL", + "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TWD", "TZS", + "UAH", "UGX", "USD", "USN", "UYI", "UYU", "UZS", + "VEF", "VND", "VUV", + "WST", + "XAF", "XAG", "XAU", "XBA", "XBB", "XBC", "XBD", "XCD", "XDR", "XOF", "XPD", "XPF", "XPT", "XSU", "XTS", "XUA", "XXX", + "YER", + "ZAR", "ZMW", "ZWL", +} + +// ISO693Entry stores ISO language codes +type ISO693Entry struct { + Alpha3bCode string + Alpha2Code string + English string +} + +//ISO693List based on http://data.okfn.org/data/core/language-codes/r/language-codes-3b2.json +var ISO693List = []ISO693Entry{ + {Alpha3bCode: "aar", Alpha2Code: "aa", English: "Afar"}, + {Alpha3bCode: "abk", Alpha2Code: "ab", English: "Abkhazian"}, + {Alpha3bCode: "afr", Alpha2Code: "af", English: "Afrikaans"}, + {Alpha3bCode: "aka", Alpha2Code: "ak", English: "Akan"}, + {Alpha3bCode: "alb", Alpha2Code: "sq", English: "Albanian"}, + {Alpha3bCode: "amh", Alpha2Code: "am", English: "Amharic"}, + {Alpha3bCode: "ara", Alpha2Code: "ar", English: "Arabic"}, + {Alpha3bCode: "arg", Alpha2Code: "an", English: "Aragonese"}, + {Alpha3bCode: "arm", Alpha2Code: "hy", English: "Armenian"}, + {Alpha3bCode: "asm", Alpha2Code: "as", English: "Assamese"}, + {Alpha3bCode: "ava", Alpha2Code: "av", English: "Avaric"}, + {Alpha3bCode: "ave", Alpha2Code: "ae", English: "Avestan"}, + {Alpha3bCode: "aym", Alpha2Code: "ay", English: "Aymara"}, + {Alpha3bCode: "aze", Alpha2Code: "az", English: "Azerbaijani"}, + {Alpha3bCode: "bak", Alpha2Code: "ba", English: "Bashkir"}, + {Alpha3bCode: "bam", Alpha2Code: "bm", English: "Bambara"}, + {Alpha3bCode: "baq", Alpha2Code: "eu", English: "Basque"}, + {Alpha3bCode: "bel", Alpha2Code: "be", English: "Belarusian"}, + {Alpha3bCode: "ben", Alpha2Code: "bn", English: "Bengali"}, + {Alpha3bCode: "bih", Alpha2Code: "bh", English: "Bihari languages"}, + {Alpha3bCode: "bis", Alpha2Code: "bi", English: "Bislama"}, + {Alpha3bCode: "bos", Alpha2Code: "bs", English: "Bosnian"}, + {Alpha3bCode: "bre", Alpha2Code: "br", English: "Breton"}, + {Alpha3bCode: "bul", Alpha2Code: "bg", English: "Bulgarian"}, + {Alpha3bCode: "bur", Alpha2Code: "my", English: "Burmese"}, + {Alpha3bCode: "cat", Alpha2Code: "ca", English: "Catalan; Valencian"}, + {Alpha3bCode: "cha", Alpha2Code: "ch", English: "Chamorro"}, + {Alpha3bCode: "che", Alpha2Code: "ce", English: "Chechen"}, + {Alpha3bCode: "chi", Alpha2Code: "zh", English: "Chinese"}, + {Alpha3bCode: "chu", Alpha2Code: "cu", English: "Church Slavic; Old Slavonic; Church Slavonic; Old Bulgarian; Old Church Slavonic"}, + {Alpha3bCode: "chv", Alpha2Code: "cv", English: "Chuvash"}, + {Alpha3bCode: "cor", Alpha2Code: "kw", English: "Cornish"}, + {Alpha3bCode: "cos", Alpha2Code: "co", English: "Corsican"}, + {Alpha3bCode: "cre", Alpha2Code: "cr", English: "Cree"}, + {Alpha3bCode: "cze", Alpha2Code: "cs", English: "Czech"}, + {Alpha3bCode: "dan", Alpha2Code: "da", English: "Danish"}, + {Alpha3bCode: "div", Alpha2Code: "dv", English: "Divehi; Dhivehi; Maldivian"}, + {Alpha3bCode: "dut", Alpha2Code: "nl", English: "Dutch; Flemish"}, + {Alpha3bCode: "dzo", Alpha2Code: "dz", English: "Dzongkha"}, + {Alpha3bCode: "eng", Alpha2Code: "en", English: "English"}, + {Alpha3bCode: "epo", Alpha2Code: "eo", English: "Esperanto"}, + {Alpha3bCode: "est", Alpha2Code: "et", English: "Estonian"}, + {Alpha3bCode: "ewe", Alpha2Code: "ee", English: "Ewe"}, + {Alpha3bCode: "fao", Alpha2Code: "fo", English: "Faroese"}, + {Alpha3bCode: "fij", Alpha2Code: "fj", English: "Fijian"}, + {Alpha3bCode: "fin", Alpha2Code: "fi", English: "Finnish"}, + {Alpha3bCode: "fre", Alpha2Code: "fr", English: "French"}, + {Alpha3bCode: "fry", Alpha2Code: "fy", English: "Western Frisian"}, + {Alpha3bCode: "ful", Alpha2Code: "ff", English: "Fulah"}, + {Alpha3bCode: "geo", Alpha2Code: "ka", English: "Georgian"}, + {Alpha3bCode: "ger", Alpha2Code: "de", English: "German"}, + {Alpha3bCode: "gla", Alpha2Code: "gd", English: "Gaelic; Scottish Gaelic"}, + {Alpha3bCode: "gle", Alpha2Code: "ga", English: "Irish"}, + {Alpha3bCode: "glg", Alpha2Code: "gl", English: "Galician"}, + {Alpha3bCode: "glv", Alpha2Code: "gv", English: "Manx"}, + {Alpha3bCode: "gre", Alpha2Code: "el", English: "Greek, Modern (1453-)"}, + {Alpha3bCode: "grn", Alpha2Code: "gn", English: "Guarani"}, + {Alpha3bCode: "guj", Alpha2Code: "gu", English: "Gujarati"}, + {Alpha3bCode: "hat", Alpha2Code: "ht", English: "Haitian; Haitian Creole"}, + {Alpha3bCode: "hau", Alpha2Code: "ha", English: "Hausa"}, + {Alpha3bCode: "heb", Alpha2Code: "he", English: "Hebrew"}, + {Alpha3bCode: "her", Alpha2Code: "hz", English: "Herero"}, + {Alpha3bCode: "hin", Alpha2Code: "hi", English: "Hindi"}, + {Alpha3bCode: "hmo", Alpha2Code: "ho", English: "Hiri Motu"}, + {Alpha3bCode: "hrv", Alpha2Code: "hr", English: "Croatian"}, + {Alpha3bCode: "hun", Alpha2Code: "hu", English: "Hungarian"}, + {Alpha3bCode: "ibo", Alpha2Code: "ig", English: "Igbo"}, + {Alpha3bCode: "ice", Alpha2Code: "is", English: "Icelandic"}, + {Alpha3bCode: "ido", Alpha2Code: "io", English: "Ido"}, + {Alpha3bCode: "iii", Alpha2Code: "ii", English: "Sichuan Yi; Nuosu"}, + {Alpha3bCode: "iku", Alpha2Code: "iu", English: "Inuktitut"}, + {Alpha3bCode: "ile", Alpha2Code: "ie", English: "Interlingue; Occidental"}, + {Alpha3bCode: "ina", Alpha2Code: "ia", English: "Interlingua (International Auxiliary Language Association)"}, + {Alpha3bCode: "ind", Alpha2Code: "id", English: "Indonesian"}, + {Alpha3bCode: "ipk", Alpha2Code: "ik", English: "Inupiaq"}, + {Alpha3bCode: "ita", Alpha2Code: "it", English: "Italian"}, + {Alpha3bCode: "jav", Alpha2Code: "jv", English: "Javanese"}, + {Alpha3bCode: "jpn", Alpha2Code: "ja", English: "Japanese"}, + {Alpha3bCode: "kal", Alpha2Code: "kl", English: "Kalaallisut; Greenlandic"}, + {Alpha3bCode: "kan", Alpha2Code: "kn", English: "Kannada"}, + {Alpha3bCode: "kas", Alpha2Code: "ks", English: "Kashmiri"}, + {Alpha3bCode: "kau", Alpha2Code: "kr", English: "Kanuri"}, + {Alpha3bCode: "kaz", Alpha2Code: "kk", English: "Kazakh"}, + {Alpha3bCode: "khm", Alpha2Code: "km", English: "Central Khmer"}, + {Alpha3bCode: "kik", Alpha2Code: "ki", English: "Kikuyu; Gikuyu"}, + {Alpha3bCode: "kin", Alpha2Code: "rw", English: "Kinyarwanda"}, + {Alpha3bCode: "kir", Alpha2Code: "ky", English: "Kirghiz; Kyrgyz"}, + {Alpha3bCode: "kom", Alpha2Code: "kv", English: "Komi"}, + {Alpha3bCode: "kon", Alpha2Code: "kg", English: "Kongo"}, + {Alpha3bCode: "kor", Alpha2Code: "ko", English: "Korean"}, + {Alpha3bCode: "kua", Alpha2Code: "kj", English: "Kuanyama; Kwanyama"}, + {Alpha3bCode: "kur", Alpha2Code: "ku", English: "Kurdish"}, + {Alpha3bCode: "lao", Alpha2Code: "lo", English: "Lao"}, + {Alpha3bCode: "lat", Alpha2Code: "la", English: "Latin"}, + {Alpha3bCode: "lav", Alpha2Code: "lv", English: "Latvian"}, + {Alpha3bCode: "lim", Alpha2Code: "li", English: "Limburgan; Limburger; Limburgish"}, + {Alpha3bCode: "lin", Alpha2Code: "ln", English: "Lingala"}, + {Alpha3bCode: "lit", Alpha2Code: "lt", English: "Lithuanian"}, + {Alpha3bCode: "ltz", Alpha2Code: "lb", English: "Luxembourgish; Letzeburgesch"}, + {Alpha3bCode: "lub", Alpha2Code: "lu", English: "Luba-Katanga"}, + {Alpha3bCode: "lug", Alpha2Code: "lg", English: "Ganda"}, + {Alpha3bCode: "mac", Alpha2Code: "mk", English: "Macedonian"}, + {Alpha3bCode: "mah", Alpha2Code: "mh", English: "Marshallese"}, + {Alpha3bCode: "mal", Alpha2Code: "ml", English: "Malayalam"}, + {Alpha3bCode: "mao", Alpha2Code: "mi", English: "Maori"}, + {Alpha3bCode: "mar", Alpha2Code: "mr", English: "Marathi"}, + {Alpha3bCode: "may", Alpha2Code: "ms", English: "Malay"}, + {Alpha3bCode: "mlg", Alpha2Code: "mg", English: "Malagasy"}, + {Alpha3bCode: "mlt", Alpha2Code: "mt", English: "Maltese"}, + {Alpha3bCode: "mon", Alpha2Code: "mn", English: "Mongolian"}, + {Alpha3bCode: "nau", Alpha2Code: "na", English: "Nauru"}, + {Alpha3bCode: "nav", Alpha2Code: "nv", English: "Navajo; Navaho"}, + {Alpha3bCode: "nbl", Alpha2Code: "nr", English: "Ndebele, South; South Ndebele"}, + {Alpha3bCode: "nde", Alpha2Code: "nd", English: "Ndebele, North; North Ndebele"}, + {Alpha3bCode: "ndo", Alpha2Code: "ng", English: "Ndonga"}, + {Alpha3bCode: "nep", Alpha2Code: "ne", English: "Nepali"}, + {Alpha3bCode: "nno", Alpha2Code: "nn", English: "Norwegian Nynorsk; Nynorsk, Norwegian"}, + {Alpha3bCode: "nob", Alpha2Code: "nb", English: "Bokmål, Norwegian; Norwegian Bokmål"}, + {Alpha3bCode: "nor", Alpha2Code: "no", English: "Norwegian"}, + {Alpha3bCode: "nya", Alpha2Code: "ny", English: "Chichewa; Chewa; Nyanja"}, + {Alpha3bCode: "oci", Alpha2Code: "oc", English: "Occitan (post 1500); Provençal"}, + {Alpha3bCode: "oji", Alpha2Code: "oj", English: "Ojibwa"}, + {Alpha3bCode: "ori", Alpha2Code: "or", English: "Oriya"}, + {Alpha3bCode: "orm", Alpha2Code: "om", English: "Oromo"}, + {Alpha3bCode: "oss", Alpha2Code: "os", English: "Ossetian; Ossetic"}, + {Alpha3bCode: "pan", Alpha2Code: "pa", English: "Panjabi; Punjabi"}, + {Alpha3bCode: "per", Alpha2Code: "fa", English: "Persian"}, + {Alpha3bCode: "pli", Alpha2Code: "pi", English: "Pali"}, + {Alpha3bCode: "pol", Alpha2Code: "pl", English: "Polish"}, + {Alpha3bCode: "por", Alpha2Code: "pt", English: "Portuguese"}, + {Alpha3bCode: "pus", Alpha2Code: "ps", English: "Pushto; Pashto"}, + {Alpha3bCode: "que", Alpha2Code: "qu", English: "Quechua"}, + {Alpha3bCode: "roh", Alpha2Code: "rm", English: "Romansh"}, + {Alpha3bCode: "rum", Alpha2Code: "ro", English: "Romanian; Moldavian; Moldovan"}, + {Alpha3bCode: "run", Alpha2Code: "rn", English: "Rundi"}, + {Alpha3bCode: "rus", Alpha2Code: "ru", English: "Russian"}, + {Alpha3bCode: "sag", Alpha2Code: "sg", English: "Sango"}, + {Alpha3bCode: "san", Alpha2Code: "sa", English: "Sanskrit"}, + {Alpha3bCode: "sin", Alpha2Code: "si", English: "Sinhala; Sinhalese"}, + {Alpha3bCode: "slo", Alpha2Code: "sk", English: "Slovak"}, + {Alpha3bCode: "slv", Alpha2Code: "sl", English: "Slovenian"}, + {Alpha3bCode: "sme", Alpha2Code: "se", English: "Northern Sami"}, + {Alpha3bCode: "smo", Alpha2Code: "sm", English: "Samoan"}, + {Alpha3bCode: "sna", Alpha2Code: "sn", English: "Shona"}, + {Alpha3bCode: "snd", Alpha2Code: "sd", English: "Sindhi"}, + {Alpha3bCode: "som", Alpha2Code: "so", English: "Somali"}, + {Alpha3bCode: "sot", Alpha2Code: "st", English: "Sotho, Southern"}, + {Alpha3bCode: "spa", Alpha2Code: "es", English: "Spanish; Castilian"}, + {Alpha3bCode: "srd", Alpha2Code: "sc", English: "Sardinian"}, + {Alpha3bCode: "srp", Alpha2Code: "sr", English: "Serbian"}, + {Alpha3bCode: "ssw", Alpha2Code: "ss", English: "Swati"}, + {Alpha3bCode: "sun", Alpha2Code: "su", English: "Sundanese"}, + {Alpha3bCode: "swa", Alpha2Code: "sw", English: "Swahili"}, + {Alpha3bCode: "swe", Alpha2Code: "sv", English: "Swedish"}, + {Alpha3bCode: "tah", Alpha2Code: "ty", English: "Tahitian"}, + {Alpha3bCode: "tam", Alpha2Code: "ta", English: "Tamil"}, + {Alpha3bCode: "tat", Alpha2Code: "tt", English: "Tatar"}, + {Alpha3bCode: "tel", Alpha2Code: "te", English: "Telugu"}, + {Alpha3bCode: "tgk", Alpha2Code: "tg", English: "Tajik"}, + {Alpha3bCode: "tgl", Alpha2Code: "tl", English: "Tagalog"}, + {Alpha3bCode: "tha", Alpha2Code: "th", English: "Thai"}, + {Alpha3bCode: "tib", Alpha2Code: "bo", English: "Tibetan"}, + {Alpha3bCode: "tir", Alpha2Code: "ti", English: "Tigrinya"}, + {Alpha3bCode: "ton", Alpha2Code: "to", English: "Tonga (Tonga Islands)"}, + {Alpha3bCode: "tsn", Alpha2Code: "tn", English: "Tswana"}, + {Alpha3bCode: "tso", Alpha2Code: "ts", English: "Tsonga"}, + {Alpha3bCode: "tuk", Alpha2Code: "tk", English: "Turkmen"}, + {Alpha3bCode: "tur", Alpha2Code: "tr", English: "Turkish"}, + {Alpha3bCode: "twi", Alpha2Code: "tw", English: "Twi"}, + {Alpha3bCode: "uig", Alpha2Code: "ug", English: "Uighur; Uyghur"}, + {Alpha3bCode: "ukr", Alpha2Code: "uk", English: "Ukrainian"}, + {Alpha3bCode: "urd", Alpha2Code: "ur", English: "Urdu"}, + {Alpha3bCode: "uzb", Alpha2Code: "uz", English: "Uzbek"}, + {Alpha3bCode: "ven", Alpha2Code: "ve", English: "Venda"}, + {Alpha3bCode: "vie", Alpha2Code: "vi", English: "Vietnamese"}, + {Alpha3bCode: "vol", Alpha2Code: "vo", English: "Volapük"}, + {Alpha3bCode: "wel", Alpha2Code: "cy", English: "Welsh"}, + {Alpha3bCode: "wln", Alpha2Code: "wa", English: "Walloon"}, + {Alpha3bCode: "wol", Alpha2Code: "wo", English: "Wolof"}, + {Alpha3bCode: "xho", Alpha2Code: "xh", English: "Xhosa"}, + {Alpha3bCode: "yid", Alpha2Code: "yi", English: "Yiddish"}, + {Alpha3bCode: "yor", Alpha2Code: "yo", English: "Yoruba"}, + {Alpha3bCode: "zha", Alpha2Code: "za", English: "Zhuang; Chuang"}, + {Alpha3bCode: "zul", Alpha2Code: "zu", English: "Zulu"}, +} diff --git a/vendor/github.com/asaskevich/govalidator/utils.go b/vendor/github.com/asaskevich/govalidator/utils.go index 200b9131def..a0b706a743c 100644 --- a/vendor/github.com/asaskevich/govalidator/utils.go +++ b/vendor/github.com/asaskevich/govalidator/utils.go @@ -4,10 +4,12 @@ import ( "errors" "fmt" "html" + "math" "path" "regexp" "strings" "unicode" + "unicode/utf8" ) // Contains check if the string contains the substring. @@ -25,27 +27,21 @@ func Matches(str, pattern string) bool { // LeftTrim trim characters from the left-side of the input. // If second argument is empty, it's will be remove leading spaces. func LeftTrim(str, chars string) string { - pattern := "" if chars == "" { - pattern = "^\\s+" - } else { - pattern = "^[" + chars + "]+" + return strings.TrimLeftFunc(str, unicode.IsSpace) } - r, _ := regexp.Compile(pattern) - return string(r.ReplaceAll([]byte(str), []byte(""))) + r, _ := regexp.Compile("^[" + chars + "]+") + return r.ReplaceAllString(str, "") } // RightTrim trim characters from the right-side of the input. // If second argument is empty, it's will be remove spaces. func RightTrim(str, chars string) string { - pattern := "" if chars == "" { - pattern = "\\s+$" - } else { - pattern = "[" + chars + "]+$" + return strings.TrimRightFunc(str, unicode.IsSpace) } - r, _ := regexp.Compile(pattern) - return string(r.ReplaceAll([]byte(str), []byte(""))) + r, _ := regexp.Compile("[" + chars + "]+$") + return r.ReplaceAllString(str, "") } // Trim trim characters from both sides of the input. @@ -58,14 +54,14 @@ func Trim(str, chars string) string { func WhiteList(str, chars string) string { pattern := "[^" + chars + "]+" r, _ := regexp.Compile(pattern) - return string(r.ReplaceAll([]byte(str), []byte(""))) + return r.ReplaceAllString(str, "") } // BlackList remove characters that appear in the blacklist. func BlackList(str, chars string) string { pattern := "[" + chars + "]+" r, _ := regexp.Compile(pattern) - return string(r.ReplaceAll([]byte(str), []byte(""))) + return r.ReplaceAllString(str, "") } // StripLow remove characters with a numerical value < 32 and 127, mostly control characters. @@ -83,7 +79,7 @@ func StripLow(str string, keepNewLines bool) string { // ReplacePattern replace regular expression pattern in string func ReplacePattern(str, pattern, replace string) string { r, _ := regexp.Compile(pattern) - return string(r.ReplaceAll([]byte(str), []byte(replace))) + return r.ReplaceAllString(str, replace) } // Escape replace <, >, & and " with HTML entities. @@ -112,7 +108,9 @@ func CamelCaseToUnderscore(str string) string { var output []rune var segment []rune for _, r := range str { - if !unicode.IsLower(r) { + + // not treat number as separate segment + if !unicode.IsLower(r) && string(r) != "_" && !unicode.IsNumber(r) { output = addSegment(output, segment) segment = nil } @@ -211,3 +209,62 @@ func Truncate(str string, length int, ending string) string { return str } + +// PadLeft pad left side of string if size of string is less then indicated pad length +func PadLeft(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, true, false) +} + +// PadRight pad right side of string if size of string is less then indicated pad length +func PadRight(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, false, true) +} + +// PadBoth pad sides of string if size of string is less then indicated pad length +func PadBoth(str string, padStr string, padLen int) string { + return buildPadStr(str, padStr, padLen, true, true) +} + +// PadString either left, right or both sides, not the padding string can be unicode and more then one +// character +func buildPadStr(str string, padStr string, padLen int, padLeft bool, padRight bool) string { + + // When padded length is less then the current string size + if padLen < utf8.RuneCountInString(str) { + return str + } + + padLen -= utf8.RuneCountInString(str) + + targetLen := padLen + + targetLenLeft := targetLen + targetLenRight := targetLen + if padLeft && padRight { + targetLenLeft = padLen / 2 + targetLenRight = padLen - targetLenLeft + } + + strToRepeatLen := utf8.RuneCountInString(padStr) + + repeatTimes := int(math.Ceil(float64(targetLen) / float64(strToRepeatLen))) + repeatedString := strings.Repeat(padStr, repeatTimes) + + leftSide := "" + if padLeft { + leftSide = repeatedString[0:targetLenLeft] + } + + rightSide := "" + if padRight { + rightSide = repeatedString[0:targetLenRight] + } + + return leftSide + str + rightSide +} + +// TruncatingErrorf removes extra args from fmt.Errorf if not formatted in the str object +func TruncatingErrorf(str string, args ...interface{}) error { + n := strings.Count(str, "%s") + return fmt.Errorf(str, args[:n]...) +} diff --git a/vendor/github.com/asaskevich/govalidator/validator.go b/vendor/github.com/asaskevich/govalidator/validator.go index a752624fc3b..b18bbcb4c99 100644 --- a/vendor/github.com/asaskevich/govalidator/validator.go +++ b/vendor/github.com/asaskevich/govalidator/validator.go @@ -2,8 +2,14 @@ package govalidator import ( + "bytes" + "crypto/rsa" + "crypto/x509" + "encoding/base64" "encoding/json" + "encoding/pem" "fmt" + "io/ioutil" "net" "net/url" "reflect" @@ -11,12 +17,22 @@ import ( "sort" "strconv" "strings" - + "time" "unicode" "unicode/utf8" ) -var fieldsRequiredByDefault bool +var ( + fieldsRequiredByDefault bool + nilPtrAllowedByRequired = false + notNumberRegexp = regexp.MustCompile("[^0-9]+") + whiteSpacesAndMinus = regexp.MustCompile(`[\s-]+`) + paramsRegexp = regexp.MustCompile(`\(.*\)$`) +) + +const maxURLRuneCount = 2083 +const minURLRuneCount = 3 +const RF3339WithoutZone = "2006-01-02T15:04:05" // SetFieldsRequiredByDefault causes validation to fail when struct fields // do not include validations or are not explicitly marked as exempt (using `valid:"-"` or `valid:"email,optional"`). @@ -36,18 +52,66 @@ func SetFieldsRequiredByDefault(value bool) { fieldsRequiredByDefault = value } +// SetNilPtrAllowedByRequired causes validation to pass for nil ptrs when a field is set to required. +// The validation will still reject ptr fields in their zero value state. Example with this enabled: +// type exampleStruct struct { +// Name *string `valid:"required"` +// With `Name` set to "", this will be considered invalid input and will cause a validation error. +// With `Name` set to nil, this will be considered valid by validation. +// By default this is disabled. +func SetNilPtrAllowedByRequired(value bool) { + nilPtrAllowedByRequired = value +} + // IsEmail check if the string is an email. func IsEmail(str string) bool { // TODO uppercase letters are not supported return rxEmail.MatchString(str) } -// IsURL check if the string is an URL. -func IsURL(str string) bool { - if str == "" || len(str) >= 2083 || len(str) <= 3 || strings.HasPrefix(str, ".") { +// IsExistingEmail check if the string is an email of existing domain +func IsExistingEmail(email string) bool { + + if len(email) < 6 || len(email) > 254 { return false } - u, err := url.Parse(str) + at := strings.LastIndex(email, "@") + if at <= 0 || at > len(email)-3 { + return false + } + user := email[:at] + host := email[at+1:] + if len(user) > 64 { + return false + } + if userDotRegexp.MatchString(user) || !userRegexp.MatchString(user) || !hostRegexp.MatchString(host) { + return false + } + switch host { + case "localhost", "example.com": + return true + } + if _, err := net.LookupMX(host); err != nil { + if _, err := net.LookupIP(host); err != nil { + return false + } + } + + return true +} + +// IsURL check if the string is an URL. +func IsURL(str string) bool { + if str == "" || utf8.RuneCountInString(str) >= maxURLRuneCount || len(str) <= minURLRuneCount || strings.HasPrefix(str, ".") { + return false + } + strTemp := str + if strings.Contains(str, ":") && !strings.Contains(str, "://") { + // support no indicated urlscheme but with colon for port number + // http:// is appended so url.Parse will succeed, strTemp used so it does not impact rxURL.MatchString + strTemp = "http://" + str + } + u, err := url.Parse(strTemp) if err != nil { return false } @@ -58,11 +122,10 @@ func IsURL(str string) bool { return false } return rxURL.MatchString(str) - } // IsRequestURL check if the string rawurl, assuming -// it was recieved in an HTTP request, is a valid +// it was received in an HTTP request, is a valid // URL confirm to RFC 3986 func IsRequestURL(rawurl string) bool { url, err := url.ParseRequestURI(rawurl) @@ -76,7 +139,7 @@ func IsRequestURL(rawurl string) bool { } // IsRequestURI check if the string rawurl, assuming -// it was recieved in an HTTP request, is an +// it was received in an HTTP request, is an // absolute URI or an absolute path. func IsRequestURI(rawurl string) bool { _, err := url.ParseRequestURI(rawurl) @@ -151,7 +214,7 @@ func IsUTFNumeric(str string) bool { str = strings.TrimPrefix(str, "+") } for _, c := range str { - if unicode.IsNumber(c) == false { //numbers && minus sign are ok + if !unicode.IsNumber(c) { //numbers && minus sign are ok return false } } @@ -211,6 +274,22 @@ func IsUpperCase(str string) bool { return str == strings.ToUpper(str) } +// HasLowerCase check if the string contains at least 1 lowercase. Empty string is valid. +func HasLowerCase(str string) bool { + if IsNull(str) { + return true + } + return rxHasLowerCase.MatchString(str) +} + +// HasUpperCase check if the string contians as least 1 uppercase. Empty string is valid. +func HasUpperCase(str string) bool { + if IsNull(str) { + return true + } + return rxHasUpperCase.MatchString(str) +} + // IsInt check if the string is an integer. Empty string is valid. func IsInt(str string) bool { if IsNull(str) { @@ -242,6 +321,16 @@ func IsNull(str string) bool { return len(str) == 0 } +// HasWhitespaceOnly checks the string only contains whitespace +func HasWhitespaceOnly(str string) bool { + return len(str) > 0 && rxHasWhitespaceOnly.MatchString(str) +} + +// HasWhitespace checks if the string contains any whitespace +func HasWhitespace(str string) bool { + return len(str) > 0 && rxHasWhitespace.MatchString(str) +} + // IsByteLength check if the string's length (in bytes) falls in a range. func IsByteLength(str string, min, max int) bool { return len(str) >= min && len(str) <= max @@ -269,9 +358,8 @@ func IsUUID(str string) bool { // IsCreditCard check if the string is a credit card. func IsCreditCard(str string) bool { - r, _ := regexp.Compile("[^0-9]+") - sanitized := r.ReplaceAll([]byte(str), []byte("")) - if !rxCreditCard.MatchString(string(sanitized)) { + sanitized := notNumberRegexp.ReplaceAllString(str, "") + if !rxCreditCard.MatchString(sanitized) { return false } var sum int64 @@ -279,7 +367,7 @@ func IsCreditCard(str string) bool { var tmpNum int64 var shouldDouble bool for i := len(sanitized) - 1; i >= 0; i-- { - digit = string(sanitized[i:(i + 1)]) + digit = sanitized[i:(i + 1)] tmpNum, _ = ToInt(digit) if shouldDouble { tmpNum *= 2 @@ -294,10 +382,7 @@ func IsCreditCard(str string) bool { shouldDouble = !shouldDouble } - if sum%10 == 0 { - return true - } - return false + return sum%10 == 0 } // IsISBN10 check if the string is an ISBN version 10. @@ -313,12 +398,11 @@ func IsISBN13(str string) bool { // IsISBN check if the string is an ISBN (version 10 or 13). // If version value is not equal to 10 or 13, it will be check both variants. func IsISBN(str string, version int) bool { - r, _ := regexp.Compile("[\\s-]+") - sanitized := r.ReplaceAll([]byte(str), []byte("")) + sanitized := whiteSpacesAndMinus.ReplaceAllString(str, "") var checksum int32 var i int32 if version == 10 { - if !rxISBN10.MatchString(string(sanitized)) { + if !rxISBN10.MatchString(sanitized) { return false } for i = 0; i < 9; i++ { @@ -334,17 +418,14 @@ func IsISBN(str string, version int) bool { } return false } else if version == 13 { - if !rxISBN13.MatchString(string(sanitized)) { + if !rxISBN13.MatchString(sanitized) { return false } factor := []int32{1, 3} for i = 0; i < 12; i++ { checksum += factor[i%2] * int32(sanitized[i]-'0') } - if (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0 { - return true - } - return false + return (int32(sanitized[12]-'0'))-((10-(checksum%10))%10) == 0 } return IsISBN(str, 10) || IsISBN(str, 13) } @@ -452,13 +533,60 @@ func IsISO3166Alpha3(str string) bool { return false } +// IsISO693Alpha2 checks if a string is valid two-letter language code +func IsISO693Alpha2(str string) bool { + for _, entry := range ISO693List { + if str == entry.Alpha2Code { + return true + } + } + return false +} + +// IsISO693Alpha3b checks if a string is valid three-letter language code +func IsISO693Alpha3b(str string) bool { + for _, entry := range ISO693List { + if str == entry.Alpha3bCode { + return true + } + } + return false +} + // IsDNSName will validate the given string as a DNS name func IsDNSName(str string) bool { if str == "" || len(strings.Replace(str, ".", "", -1)) > 255 { // constraints already violated return false } - return rxDNSName.MatchString(str) + return !IsIP(str) && rxDNSName.MatchString(str) +} + +// IsHash checks if a string is a hash of type algorithm. +// Algorithm is one of ['md4', 'md5', 'sha1', 'sha256', 'sha384', 'sha512', 'ripemd128', 'ripemd160', 'tiger128', 'tiger160', 'tiger192', 'crc32', 'crc32b'] +func IsHash(str string, algorithm string) bool { + len := "0" + algo := strings.ToLower(algorithm) + + if algo == "crc32" || algo == "crc32b" { + len = "8" + } else if algo == "md5" || algo == "md4" || algo == "ripemd128" || algo == "tiger128" { + len = "32" + } else if algo == "sha1" || algo == "ripemd160" || algo == "tiger160" { + len = "40" + } else if algo == "tiger192" { + len = "48" + } else if algo == "sha256" { + len = "64" + } else if algo == "sha384" { + len = "96" + } else if algo == "sha512" { + len = "128" + } else { + return false + } + + return Matches(str, "^[a-f0-9]{"+len+"}$") } // IsDialString validates the given string for usage with the various Dial() functions @@ -496,6 +624,12 @@ func IsIPv6(str string) bool { return ip != nil && strings.Contains(str, ":") } +// IsCIDR check if the string is an valid CIDR notiation (IPV4 & IPV6) +func IsCIDR(str string) bool { + _, _, err := net.ParseCIDR(str) + return err == nil +} + // IsMAC check if a string is valid MAC address. // Possible MAC formats: // 01:23:45:67:89:ab @@ -529,6 +663,76 @@ func IsLongitude(str string) bool { return rxLongitude.MatchString(str) } +// IsRsaPublicKey check if a string is valid public key with provided length +func IsRsaPublicKey(str string, keylen int) bool { + bb := bytes.NewBufferString(str) + pemBytes, err := ioutil.ReadAll(bb) + if err != nil { + return false + } + block, _ := pem.Decode(pemBytes) + if block != nil && block.Type != "PUBLIC KEY" { + return false + } + var der []byte + + if block != nil { + der = block.Bytes + } else { + der, err = base64.StdEncoding.DecodeString(str) + if err != nil { + return false + } + } + + key, err := x509.ParsePKIXPublicKey(der) + if err != nil { + return false + } + pubkey, ok := key.(*rsa.PublicKey) + if !ok { + return false + } + bitlen := len(pubkey.N.Bytes()) * 8 + return bitlen == int(keylen) +} + +func toJSONName(tag string) string { + if tag == "" { + return "" + } + + // JSON name always comes first. If there's no options then split[0] is + // JSON name, if JSON name is not set, then split[0] is an empty string. + split := strings.SplitN(tag, ",", 2) + + name := split[0] + + // However it is possible that the field is skipped when + // (de-)serializing from/to JSON, in which case assume that there is no + // tag name to use + if name == "-" { + return "" + } + return name +} + +func PrependPathToErrors(err error, path string) error { + switch err2 := err.(type) { + case Error: + err2.Path = append([]string{path}, err2.Path...) + return err2 + case Errors: + errors := err2.Errors() + for i, err3 := range errors { + errors[i] = PrependPathToErrors(err3, path) + } + return err2 + } + fmt.Println(err) + return err +} + // ValidateStruct use tags for fields. // result will be equal to `false` if there are any errors. func ValidateStruct(s interface{}) (bool, error) { @@ -552,11 +756,46 @@ func ValidateStruct(s interface{}) (bool, error) { if typeField.PkgPath != "" { continue // Private field } - resultField, err2 := typeCheck(valueField, typeField, val) + structResult := true + if valueField.Kind() == reflect.Interface { + valueField = valueField.Elem() + } + if (valueField.Kind() == reflect.Struct || + (valueField.Kind() == reflect.Ptr && valueField.Elem().Kind() == reflect.Struct)) && + typeField.Tag.Get(tagName) != "-" { + var err error + structResult, err = ValidateStruct(valueField.Interface()) + if err != nil { + err = PrependPathToErrors(err, typeField.Name) + errs = append(errs, err) + } + } + resultField, err2 := typeCheck(valueField, typeField, val, nil) if err2 != nil { + + // Replace structure name with JSON name if there is a tag on the variable + jsonTag := toJSONName(typeField.Tag.Get("json")) + if jsonTag != "" { + switch jsonError := err2.(type) { + case Error: + jsonError.Name = jsonTag + err2 = jsonError + case Errors: + for i2, err3 := range jsonError { + switch customErr := err3.(type) { + case Error: + customErr.Name = jsonTag + jsonError[i2] = customErr + } + } + + err2 = jsonError + } + } + errs = append(errs, err2) } - result = result && resultField + result = result && resultField && structResult } if len(errs) > 0 { err = errs @@ -567,16 +806,19 @@ func ValidateStruct(s interface{}) (bool, error) { // parseTagIntoMap parses a struct tag `valid:required~Some error message,length(2|3)` into map[string]string{"required": "Some error message", "length(2|3)": ""} func parseTagIntoMap(tag string) tagOptionsMap { optionsMap := make(tagOptionsMap) - options := strings.SplitN(tag, ",", -1) - for _, option := range options { + options := strings.Split(tag, ",") + + for i, option := range options { + option = strings.TrimSpace(option) + validationOptions := strings.Split(option, "~") if !isValidTag(validationOptions[0]) { continue } if len(validationOptions) == 2 { - optionsMap[validationOptions[0]] = validationOptions[1] + optionsMap[validationOptions[0]] = tagOption{validationOptions[0], validationOptions[1], i} } else { - optionsMap[validationOptions[0]] = "" + optionsMap[validationOptions[0]] = tagOption{validationOptions[0], "", i} } } return optionsMap @@ -588,7 +830,7 @@ func isValidTag(s string) bool { } for _, c := range s { switch { - case strings.ContainsRune("!#$%&()*+-./:<=>?@[]^_{|}~ ", c): + case strings.ContainsRune("\\'\"!#$%&()*+-./:<=>?@[]^_{|}~ ", c): // Backslash and quote chars are reserved, but // otherwise any punctuation chars are allowed // in a tag name. @@ -614,6 +856,33 @@ func IsSemver(str string) bool { return rxSemver.MatchString(str) } +// IsTime check if string is valid according to given format +func IsTime(str string, format string) bool { + _, err := time.Parse(format, str) + return err == nil +} + +// IsRFC3339 check if string is valid timestamp value according to RFC3339 +func IsRFC3339(str string) bool { + return IsTime(str, time.RFC3339) +} + +// IsRFC3339WithoutZone check if string is valid timestamp value according to RFC3339 which excludes the timezone. +func IsRFC3339WithoutZone(str string) bool { + return IsTime(str, RF3339WithoutZone) +} + +// IsISO4217 check if string is valid ISO currency code +func IsISO4217(str string) bool { + for _, currency := range ISO4217List { + if str == currency { + return true + } + } + + return false +} + // ByteLength check string's length func ByteLength(str string, params ...string) bool { if len(params) == 2 { @@ -625,6 +894,23 @@ func ByteLength(str string, params ...string) bool { return false } +// RuneLength check string's length +// Alias for StringLength +func RuneLength(str string, params ...string) bool { + return StringLength(str, params...) +} + +// IsRsaPub check whether string is valid RSA key +// Alias for IsRsaPublicKey +func IsRsaPub(str string, params ...string) bool { + if len(params) == 1 { + len, _ := ToInt(params[0]) + return IsRsaPublicKey(str, int(len)) + } + + return false +} + // StringMatches checks if a string matches a given pattern. func StringMatches(s string, params ...string) bool { if len(params) == 1 { @@ -647,20 +933,62 @@ func StringLength(str string, params ...string) bool { return false } -func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) { - if requiredOption, isRequired := options["required"]; isRequired { - if len(requiredOption) > 0 { - return false, Error{t.Name, fmt.Errorf(requiredOption), true} +// Range check string's length +func Range(str string, params ...string) bool { + if len(params) == 2 { + value, _ := ToFloat(str) + min, _ := ToFloat(params[0]) + max, _ := ToFloat(params[1]) + return InRange(value, min, max) + } + + return false +} + +func isInRaw(str string, params ...string) bool { + if len(params) == 1 { + rawParams := params[0] + + parsedParams := strings.Split(rawParams, "|") + + return IsIn(str, parsedParams...) + } + + return false +} + +// IsIn check if string str is a member of the set of strings params +func IsIn(str string, params ...string) bool { + for _, param := range params { + if str == param { + return true } - return false, Error{t.Name, fmt.Errorf("non zero value required"), false} + } + + return false +} + +func checkRequired(v reflect.Value, t reflect.StructField, options tagOptionsMap) (bool, error) { + if nilPtrAllowedByRequired { + k := v.Kind() + if (k == reflect.Ptr || k == reflect.Interface) && v.IsNil() { + return true, nil + } + } + + if requiredOption, isRequired := options["required"]; isRequired { + if len(requiredOption.customErrorMessage) > 0 { + return false, Error{t.Name, fmt.Errorf(requiredOption.customErrorMessage), true, "required", []string{}} + } + return false, Error{t.Name, fmt.Errorf("non zero value required"), false, "required", []string{}} } else if _, isOptional := options["optional"]; fieldsRequiredByDefault && !isOptional { - return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false} + return false, Error{t.Name, fmt.Errorf("Missing required field"), false, "required", []string{}} } // not required and empty is valid return true, nil } -func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, error) { +func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value, options tagOptionsMap) (isValid bool, resultErr error) { if !v.IsValid() { return false, nil } @@ -670,39 +998,68 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e // Check if the field should be ignored switch tag { case "": - if !fieldsRequiredByDefault { - return true, nil + if v.Kind() != reflect.Slice && v.Kind() != reflect.Map { + if !fieldsRequiredByDefault { + return true, nil + } + return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false, "required", []string{}} } - return false, Error{t.Name, fmt.Errorf("All fields are required to at least have one validation defined"), false} case "-": return true, nil } - options := parseTagIntoMap(tag) - var customTypeErrors Errors - var customTypeValidatorsExist bool - for validatorName, customErrorMessage := range options { - if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok { - customTypeValidatorsExist = true - if result := validatefunc(v.Interface(), o.Interface()); !result { - if len(customErrorMessage) > 0 { - customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf(customErrorMessage), CustomErrorMessageExists: true}) - continue - } - customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), validatorName), CustomErrorMessageExists: false}) - } - } - } - if customTypeValidatorsExist { - if len(customTypeErrors.Errors()) > 0 { - return false, customTypeErrors - } - return true, nil + isRootType := false + if options == nil { + isRootType = true + options = parseTagIntoMap(tag) } if isEmptyValue(v) { // an empty value is not validated, check only required - return checkRequired(v, t, options) + isValid, resultErr = checkRequired(v, t, options) + for key := range options { + delete(options, key) + } + return isValid, resultErr + } + + var customTypeErrors Errors + optionsOrder := options.orderedKeys() + for _, validatorName := range optionsOrder { + validatorStruct := options[validatorName] + if validatefunc, ok := CustomTypeTagMap.Get(validatorName); ok { + delete(options, validatorName) + + if result := validatefunc(v.Interface(), o.Interface()); !result { + if len(validatorStruct.customErrorMessage) > 0 { + customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: TruncatingErrorf(validatorStruct.customErrorMessage, fmt.Sprint(v), validatorName), CustomErrorMessageExists: true, Validator: stripParams(validatorName)}) + continue + } + customTypeErrors = append(customTypeErrors, Error{Name: t.Name, Err: fmt.Errorf("%s does not validate as %s", fmt.Sprint(v), validatorName), CustomErrorMessageExists: false, Validator: stripParams(validatorName)}) + } + } + } + + if len(customTypeErrors.Errors()) > 0 { + return false, customTypeErrors + } + + if isRootType { + // Ensure that we've checked the value by all specified validators before report that the value is valid + defer func() { + delete(options, "optional") + delete(options, "required") + + if isValid && resultErr == nil && len(options) != 0 { + optionsOrder := options.orderedKeys() + for _, validator := range optionsOrder { + isValid = false + resultErr = Error{t.Name, fmt.Errorf( + "The following validator is invalid or can't be applied to the field: %q", validator), false, stripParams(validator), []string{}} + return + } + } + }() } switch v.Kind() { @@ -712,75 +1069,76 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e reflect.Float32, reflect.Float64, reflect.String: // for each tag option check the map of validator functions - for validator, customErrorMessage := range options { + for _, validatorSpec := range optionsOrder { + validatorStruct := options[validatorSpec] var negate bool - customMsgExists := (len(customErrorMessage) > 0) - // Check wether the tag looks like '!something' or 'something' + validator := validatorSpec + customMsgExists := len(validatorStruct.customErrorMessage) > 0 + + // Check whether the tag looks like '!something' or 'something' if validator[0] == '!' { - validator = string(validator[1:]) + validator = validator[1:] negate = true } // Check for param validators for key, value := range ParamTagRegexMap { ps := value.FindStringSubmatch(validator) - if len(ps) > 0 { - if validatefunc, ok := ParamTagMap[key]; ok { - switch v.Kind() { - case reflect.String: - field := fmt.Sprint(v) // make value into string, then validate with regex - if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) { - var err error - if !negate { - if customMsgExists { - err = fmt.Errorf(customErrorMessage) - } else { - err = fmt.Errorf("%s does not validate as %s", field, validator) - } + if len(ps) == 0 { + continue + } - } else { - if customMsgExists { - err = fmt.Errorf(customErrorMessage) - } else { - err = fmt.Errorf("%s does validate as %s", field, validator) - } - } - return false, Error{t.Name, err, customMsgExists} - } - default: - // type not yet supported, fail - return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false} + validatefunc, ok := ParamTagMap[key] + if !ok { + continue + } + + delete(options, validatorSpec) + + switch v.Kind() { + case reflect.String, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64: + + field := fmt.Sprint(v) // make value into string, then validate with regex + if result := validatefunc(field, ps[1:]...); (!result && !negate) || (result && negate) { + if customMsgExists { + return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}} } + if negate { + return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} } + default: + // type not yet supported, fail + return false, Error{t.Name, fmt.Errorf("Validator %s doesn't support kind %s", validator, v.Kind()), false, stripParams(validatorSpec), []string{}} } } if validatefunc, ok := TagMap[validator]; ok { + delete(options, validatorSpec) + switch v.Kind() { - case reflect.String: + case reflect.String, + reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, + reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, + reflect.Float32, reflect.Float64: field := fmt.Sprint(v) // make value into string, then validate with regex if result := validatefunc(field); !result && !negate || result && negate { - var err error - - if !negate { - if customMsgExists { - err = fmt.Errorf(customErrorMessage) - } else { - err = fmt.Errorf("%s does not validate as %s", field, validator) - } - } else { - if customMsgExists { - err = fmt.Errorf(customErrorMessage) - } else { - err = fmt.Errorf("%s does validate as %s", field, validator) - } + if customMsgExists { + return false, Error{t.Name, TruncatingErrorf(validatorStruct.customErrorMessage, field, validator), customMsgExists, stripParams(validatorSpec), []string{}} } - return false, Error{t.Name, err, customMsgExists} + if negate { + return false, Error{t.Name, fmt.Errorf("%s does validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} + } + return false, Error{t.Name, fmt.Errorf("%s does not validate as %s", field, validator), customMsgExists, stripParams(validatorSpec), []string{}} } default: //Not Yet Supported Types (Fail here!) err := fmt.Errorf("Validator %s doesn't support kind %s for value %v", validator, v.Kind(), v) - return false, Error{t.Name, err, false} + return false, Error{t.Name, err, false, stripParams(validatorSpec), []string{}} } } } @@ -793,46 +1151,38 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e sv = v.MapKeys() sort.Sort(sv) result := true - for _, k := range sv { - resultItem, err := ValidateStruct(v.MapIndex(k).Interface()) - if err != nil { - return false, err - } - result = result && resultItem - } - return result, nil - case reflect.Slice: - result := true - for i := 0; i < v.Len(); i++ { + for i, k := range sv { var resultItem bool var err error - if v.Index(i).Kind() != reflect.Struct { - resultItem, err = typeCheck(v.Index(i), t, o) + if v.MapIndex(k).Kind() != reflect.Struct { + resultItem, err = typeCheck(v.MapIndex(k), t, o, options) if err != nil { return false, err } } else { - resultItem, err = ValidateStruct(v.Index(i).Interface()) + resultItem, err = ValidateStruct(v.MapIndex(k).Interface()) if err != nil { + err = PrependPathToErrors(err, t.Name+"."+sv[i].Interface().(string)) return false, err } } result = result && resultItem } return result, nil - case reflect.Array: + case reflect.Slice, reflect.Array: result := true for i := 0; i < v.Len(); i++ { var resultItem bool var err error if v.Index(i).Kind() != reflect.Struct { - resultItem, err = typeCheck(v.Index(i), t, o) + resultItem, err = typeCheck(v.Index(i), t, o, options) if err != nil { return false, err } } else { resultItem, err = ValidateStruct(v.Index(i).Interface()) if err != nil { + err = PrependPathToErrors(err, t.Name+"."+strconv.Itoa(i)) return false, err } } @@ -850,7 +1200,7 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e if v.IsNil() { return true, nil } - return typeCheck(v.Elem(), t, o) + return typeCheck(v.Elem(), t, o, options) case reflect.Struct: return ValidateStruct(v.Interface()) default: @@ -858,6 +1208,10 @@ func typeCheck(v reflect.Value, t reflect.StructField, o reflect.Value) (bool, e } } +func stripParams(validatorString string) string { + return paramsRegexp.ReplaceAllString(validatorString, "") +} + func isEmptyValue(v reflect.Value) bool { switch v.Kind() { case reflect.String, reflect.Array: diff --git a/vendor/github.com/asaskevich/govalidator/wercker.yml b/vendor/github.com/asaskevich/govalidator/wercker.yml index 48404490996..cac7a5fcf06 100644 --- a/vendor/github.com/asaskevich/govalidator/wercker.yml +++ b/vendor/github.com/asaskevich/govalidator/wercker.yml @@ -1,4 +1,4 @@ -box: wercker/golang +box: golang build: steps: - setup-go-workspace diff --git a/vendor/github.com/globalsign/mgo/LICENSE b/vendor/github.com/globalsign/mgo/LICENSE new file mode 100644 index 00000000000..770c7672b45 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/LICENSE @@ -0,0 +1,25 @@ +mgo - MongoDB driver for Go + +Copyright (c) 2010-2013 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/globalsign/mgo/bson/BUILD b/vendor/github.com/globalsign/mgo/bson/BUILD new file mode 100644 index 00000000000..6e50a6d65fb --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/BUILD @@ -0,0 +1,32 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "bson.go", + "compatibility.go", + "decimal.go", + "decode.go", + "encode.go", + "json.go", + "stream.go", + ], + importmap = "k8s.io/kubernetes/vendor/github.com/globalsign/mgo/bson", + importpath = "github.com/globalsign/mgo/bson", + visibility = ["//visibility:public"], + deps = ["//vendor/github.com/globalsign/mgo/internal/json:go_default_library"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/globalsign/mgo/bson/LICENSE b/vendor/github.com/globalsign/mgo/bson/LICENSE new file mode 100644 index 00000000000..890326017b8 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/LICENSE @@ -0,0 +1,25 @@ +BSON library for Go + +Copyright (c) 2010-2012 - Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/globalsign/mgo/bson/README.md b/vendor/github.com/globalsign/mgo/bson/README.md new file mode 100644 index 00000000000..5c5819e612b --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/README.md @@ -0,0 +1,12 @@ +[![GoDoc](https://godoc.org/github.com/globalsign/mgo/bson?status.svg)](https://godoc.org/github.com/globalsign/mgo/bson) + +An Implementation of BSON for Go +-------------------------------- + +Package bson is an implementation of the [BSON specification](http://bsonspec.org) for Go. + +While the BSON package implements the BSON spec as faithfully as possible, there +is some MongoDB specific behaviour (such as map keys `$in`, `$all`, etc) in the +`bson` package. The priority is for backwards compatibility for the `mgo` +driver, though fixes for obviously buggy behaviour is welcome (and features, etc +behind feature flags). diff --git a/vendor/github.com/globalsign/mgo/bson/bson.go b/vendor/github.com/globalsign/mgo/bson/bson.go new file mode 100644 index 00000000000..eb87ef6208a --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/bson.go @@ -0,0 +1,836 @@ +// BSON library for Go +// +// Copyright (c) 2010-2012 - Gustavo Niemeyer +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Package bson is an implementation of the BSON specification for Go: +// +// http://bsonspec.org +// +// It was created as part of the mgo MongoDB driver for Go, but is standalone +// and may be used on its own without the driver. +package bson + +import ( + "bytes" + "crypto/md5" + "crypto/rand" + "encoding/binary" + "encoding/hex" + "encoding/json" + "errors" + "fmt" + "io" + "math" + "os" + "reflect" + "runtime" + "strings" + "sync" + "sync/atomic" + "time" +) + +//go:generate go run bson_corpus_spec_test_generator.go + +// -------------------------------------------------------------------------- +// The public API. + +// Element types constants from BSON specification. +const ( + ElementFloat64 byte = 0x01 + ElementString byte = 0x02 + ElementDocument byte = 0x03 + ElementArray byte = 0x04 + ElementBinary byte = 0x05 + Element06 byte = 0x06 + ElementObjectId byte = 0x07 + ElementBool byte = 0x08 + ElementDatetime byte = 0x09 + ElementNil byte = 0x0A + ElementRegEx byte = 0x0B + ElementDBPointer byte = 0x0C + ElementJavaScriptWithoutScope byte = 0x0D + ElementSymbol byte = 0x0E + ElementJavaScriptWithScope byte = 0x0F + ElementInt32 byte = 0x10 + ElementTimestamp byte = 0x11 + ElementInt64 byte = 0x12 + ElementDecimal128 byte = 0x13 + ElementMinKey byte = 0xFF + ElementMaxKey byte = 0x7F + + BinaryGeneric byte = 0x00 + BinaryFunction byte = 0x01 + BinaryBinaryOld byte = 0x02 + BinaryUUIDOld byte = 0x03 + BinaryUUID byte = 0x04 + BinaryMD5 byte = 0x05 + BinaryUserDefined byte = 0x80 +) + +// Getter interface: a value implementing the bson.Getter interface will have its GetBSON +// method called when the given value has to be marshalled, and the result +// of this method will be marshaled in place of the actual object. +// +// If GetBSON returns return a non-nil error, the marshalling procedure +// will stop and error out with the provided value. +type Getter interface { + GetBSON() (interface{}, error) +} + +// Setter interface: a value implementing the bson.Setter interface will receive the BSON +// value via the SetBSON method during unmarshaling, and the object +// itself will not be changed as usual. +// +// If setting the value works, the method should return nil or alternatively +// bson.ErrSetZero to set the respective field to its zero value (nil for +// pointer types). If SetBSON returns a value of type bson.TypeError, the +// BSON value will be omitted from a map or slice being decoded and the +// unmarshalling will continue. If it returns any other non-nil error, the +// unmarshalling procedure will stop and error out with the provided value. +// +// This interface is generally useful in pointer receivers, since the method +// will want to change the receiver. A type field that implements the Setter +// interface doesn't have to be a pointer, though. +// +// Unlike the usual behavior, unmarshalling onto a value that implements a +// Setter interface will NOT reset the value to its zero state. This allows +// the value to decide by itself how to be unmarshalled. +// +// For example: +// +// type MyString string +// +// func (s *MyString) SetBSON(raw bson.Raw) error { +// return raw.Unmarshal(s) +// } +// +type Setter interface { + SetBSON(raw Raw) error +} + +// ErrSetZero may be returned from a SetBSON method to have the value set to +// its respective zero value. When used in pointer values, this will set the +// field to nil rather than to the pre-allocated value. +var ErrSetZero = errors.New("set to zero") + +// M is a convenient alias for a map[string]interface{} map, useful for +// dealing with BSON in a native way. For instance: +// +// bson.M{"a": 1, "b": true} +// +// There's no special handling for this type in addition to what's done anyway +// for an equivalent map type. Elements in the map will be dumped in an +// undefined ordered. See also the bson.D type for an ordered alternative. +type M map[string]interface{} + +// D represents a BSON document containing ordered elements. For example: +// +// bson.D{{"a", 1}, {"b", true}} +// +// In some situations, such as when creating indexes for MongoDB, the order in +// which the elements are defined is important. If the order is not important, +// using a map is generally more comfortable. See bson.M and bson.RawD. +type D []DocElem + +// DocElem is an element of the bson.D document representation. +type DocElem struct { + Name string + Value interface{} +} + +// Map returns a map out of the ordered element name/value pairs in d. +func (d D) Map() (m M) { + m = make(M, len(d)) + for _, item := range d { + m[item.Name] = item.Value + } + return m +} + +// The Raw type represents raw unprocessed BSON documents and elements. +// Kind is the kind of element as defined per the BSON specification, and +// Data is the raw unprocessed data for the respective element. +// Using this type it is possible to unmarshal or marshal values partially. +// +// Relevant documentation: +// +// http://bsonspec.org/#/specification +// +type Raw struct { + Kind byte + Data []byte +} + +// RawD represents a BSON document containing raw unprocessed elements. +// This low-level representation may be useful when lazily processing +// documents of uncertain content, or when manipulating the raw content +// documents in general. +type RawD []RawDocElem + +// RawDocElem elements of RawD type. +type RawDocElem struct { + Name string + Value Raw +} + +// ObjectId is a unique ID identifying a BSON value. It must be exactly 12 bytes +// long. MongoDB objects by default have such a property set in their "_id" +// property. +// +// http://www.mongodb.org/display/DOCS/Object+Ids +type ObjectId string + +// ObjectIdHex returns an ObjectId from the provided hex representation. +// Calling this function with an invalid hex representation will +// cause a runtime panic. See the IsObjectIdHex function. +func ObjectIdHex(s string) ObjectId { + d, err := hex.DecodeString(s) + if err != nil || len(d) != 12 { + panic(fmt.Sprintf("invalid input to ObjectIdHex: %q", s)) + } + return ObjectId(d) +} + +// IsObjectIdHex returns whether s is a valid hex representation of +// an ObjectId. See the ObjectIdHex function. +func IsObjectIdHex(s string) bool { + if len(s) != 24 { + return false + } + _, err := hex.DecodeString(s) + return err == nil +} + +// objectIdCounter is atomically incremented when generating a new ObjectId +// using NewObjectId() function. It's used as a counter part of an id. +var objectIdCounter = readRandomUint32() + +// readRandomUint32 returns a random objectIdCounter. +func readRandomUint32() uint32 { + var b [4]byte + _, err := io.ReadFull(rand.Reader, b[:]) + if err != nil { + panic(fmt.Errorf("cannot read random object id: %v", err)) + } + return uint32((uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24)) +} + +// machineId stores machine id generated once and used in subsequent calls +// to NewObjectId function. +var machineId = readMachineId() +var processId = os.Getpid() + +// readMachineId generates and returns a machine id. +// If this function fails to get the hostname it will cause a runtime error. +func readMachineId() []byte { + var sum [3]byte + id := sum[:] + hostname, err1 := os.Hostname() + if err1 != nil { + _, err2 := io.ReadFull(rand.Reader, id) + if err2 != nil { + panic(fmt.Errorf("cannot get hostname: %v; %v", err1, err2)) + } + return id + } + hw := md5.New() + hw.Write([]byte(hostname)) + copy(id, hw.Sum(nil)) + return id +} + +// NewObjectId returns a new unique ObjectId. +func NewObjectId() ObjectId { + var b [12]byte + // Timestamp, 4 bytes, big endian + binary.BigEndian.PutUint32(b[:], uint32(time.Now().Unix())) + // Machine, first 3 bytes of md5(hostname) + b[4] = machineId[0] + b[5] = machineId[1] + b[6] = machineId[2] + // Pid, 2 bytes, specs don't specify endianness, but we use big endian. + b[7] = byte(processId >> 8) + b[8] = byte(processId) + // Increment, 3 bytes, big endian + i := atomic.AddUint32(&objectIdCounter, 1) + b[9] = byte(i >> 16) + b[10] = byte(i >> 8) + b[11] = byte(i) + return ObjectId(b[:]) +} + +// NewObjectIdWithTime returns a dummy ObjectId with the timestamp part filled +// with the provided number of seconds from epoch UTC, and all other parts +// filled with zeroes. It's not safe to insert a document with an id generated +// by this method, it is useful only for queries to find documents with ids +// generated before or after the specified timestamp. +func NewObjectIdWithTime(t time.Time) ObjectId { + var b [12]byte + binary.BigEndian.PutUint32(b[:4], uint32(t.Unix())) + return ObjectId(string(b[:])) +} + +// String returns a hex string representation of the id. +// Example: ObjectIdHex("4d88e15b60f486e428412dc9"). +func (id ObjectId) String() string { + return fmt.Sprintf(`ObjectIdHex("%x")`, string(id)) +} + +// Hex returns a hex representation of the ObjectId. +func (id ObjectId) Hex() string { + return hex.EncodeToString([]byte(id)) +} + +// MarshalJSON turns a bson.ObjectId into a json.Marshaller. +func (id ObjectId) MarshalJSON() ([]byte, error) { + return []byte(fmt.Sprintf(`"%x"`, string(id))), nil +} + +var nullBytes = []byte("null") + +// UnmarshalJSON turns *bson.ObjectId into a json.Unmarshaller. +func (id *ObjectId) UnmarshalJSON(data []byte) error { + if len(data) > 0 && (data[0] == '{' || data[0] == 'O') { + var v struct { + Id json.RawMessage `json:"$oid"` + Func struct { + Id json.RawMessage + } `json:"$oidFunc"` + } + err := jdec(data, &v) + if err == nil { + if len(v.Id) > 0 { + data = []byte(v.Id) + } else { + data = []byte(v.Func.Id) + } + } + } + if len(data) == 2 && data[0] == '"' && data[1] == '"' || bytes.Equal(data, nullBytes) { + *id = "" + return nil + } + if len(data) != 26 || data[0] != '"' || data[25] != '"' { + return fmt.Errorf("invalid ObjectId in JSON: %s", string(data)) + } + var buf [12]byte + _, err := hex.Decode(buf[:], data[1:25]) + if err != nil { + return fmt.Errorf("invalid ObjectId in JSON: %s (%s)", string(data), err) + } + *id = ObjectId(string(buf[:])) + return nil +} + +// MarshalText turns bson.ObjectId into an encoding.TextMarshaler. +func (id ObjectId) MarshalText() ([]byte, error) { + return []byte(fmt.Sprintf("%x", string(id))), nil +} + +// UnmarshalText turns *bson.ObjectId into an encoding.TextUnmarshaler. +func (id *ObjectId) UnmarshalText(data []byte) error { + if len(data) == 1 && data[0] == ' ' || len(data) == 0 { + *id = "" + return nil + } + if len(data) != 24 { + return fmt.Errorf("invalid ObjectId: %s", data) + } + var buf [12]byte + _, err := hex.Decode(buf[:], data[:]) + if err != nil { + return fmt.Errorf("invalid ObjectId: %s (%s)", data, err) + } + *id = ObjectId(string(buf[:])) + return nil +} + +// Valid returns true if id is valid. A valid id must contain exactly 12 bytes. +func (id ObjectId) Valid() bool { + return len(id) == 12 +} + +// byteSlice returns byte slice of id from start to end. +// Calling this function with an invalid id will cause a runtime panic. +func (id ObjectId) byteSlice(start, end int) []byte { + if len(id) != 12 { + panic(fmt.Sprintf("invalid ObjectId: %q", string(id))) + } + return []byte(string(id)[start:end]) +} + +// Time returns the timestamp part of the id. +// It's a runtime error to call this method with an invalid id. +func (id ObjectId) Time() time.Time { + // First 4 bytes of ObjectId is 32-bit big-endian seconds from epoch. + secs := int64(binary.BigEndian.Uint32(id.byteSlice(0, 4))) + return time.Unix(secs, 0) +} + +// Machine returns the 3-byte machine id part of the id. +// It's a runtime error to call this method with an invalid id. +func (id ObjectId) Machine() []byte { + return id.byteSlice(4, 7) +} + +// Pid returns the process id part of the id. +// It's a runtime error to call this method with an invalid id. +func (id ObjectId) Pid() uint16 { + return binary.BigEndian.Uint16(id.byteSlice(7, 9)) +} + +// Counter returns the incrementing value part of the id. +// It's a runtime error to call this method with an invalid id. +func (id ObjectId) Counter() int32 { + b := id.byteSlice(9, 12) + // Counter is stored as big-endian 3-byte value + return int32(uint32(b[0])<<16 | uint32(b[1])<<8 | uint32(b[2])) +} + +// The Symbol type is similar to a string and is used in languages with a +// distinct symbol type. +type Symbol string + +// Now returns the current time with millisecond precision. MongoDB stores +// timestamps with the same precision, so a Time returned from this method +// will not change after a roundtrip to the database. That's the only reason +// why this function exists. Using the time.Now function also works fine +// otherwise. +func Now() time.Time { + return time.Unix(0, time.Now().UnixNano()/1e6*1e6) +} + +// MongoTimestamp is a special internal type used by MongoDB that for some +// strange reason has its own datatype defined in BSON. +type MongoTimestamp int64 + +// Time returns the time part of ts which is stored with second precision. +func (ts MongoTimestamp) Time() time.Time { + return time.Unix(int64(uint64(ts)>>32), 0) +} + +// Counter returns the counter part of ts. +func (ts MongoTimestamp) Counter() uint32 { + return uint32(ts) +} + +// NewMongoTimestamp creates a timestamp using the given +// date `t` (with second precision) and counter `c` (unique for `t`). +// +// Returns an error if time `t` is not between 1970-01-01T00:00:00Z +// and 2106-02-07T06:28:15Z (inclusive). +// +// Note that two MongoTimestamps should never have the same (time, counter) combination: +// the caller must ensure the counter `c` is increased if creating multiple MongoTimestamp +// values for the same time `t` (ignoring fractions of seconds). +func NewMongoTimestamp(t time.Time, c uint32) (MongoTimestamp, error) { + u := t.Unix() + if u < 0 || u > math.MaxUint32 { + return -1, errors.New("invalid value for time") + } + + i := int64(u<<32 | int64(c)) + + return MongoTimestamp(i), nil +} + +type orderKey int64 + +// MaxKey is a special value that compares higher than all other possible BSON +// values in a MongoDB database. +var MaxKey = orderKey(1<<63 - 1) + +// MinKey is a special value that compares lower than all other possible BSON +// values in a MongoDB database. +var MinKey = orderKey(-1 << 63) + +type undefined struct{} + +// Undefined represents the undefined BSON value. +var Undefined undefined + +// Binary is a representation for non-standard binary values. Any kind should +// work, but the following are known as of this writing: +// +// 0x00 - Generic. This is decoded as []byte(data), not Binary{0x00, data}. +// 0x01 - Function (!?) +// 0x02 - Obsolete generic. +// 0x03 - UUID +// 0x05 - MD5 +// 0x80 - User defined. +// +type Binary struct { + Kind byte + Data []byte +} + +// RegEx represents a regular expression. The Options field may contain +// individual characters defining the way in which the pattern should be +// applied, and must be sorted. Valid options as of this writing are 'i' for +// case insensitive matching, 'm' for multi-line matching, 'x' for verbose +// mode, 'l' to make \w, \W, and similar be locale-dependent, 's' for dot-all +// mode (a '.' matches everything), and 'u' to make \w, \W, and similar match +// unicode. The value of the Options parameter is not verified before being +// marshaled into the BSON format. +type RegEx struct { + Pattern string + Options string +} + +// JavaScript is a type that holds JavaScript code. If Scope is non-nil, it +// will be marshaled as a mapping from identifiers to values that may be +// used when evaluating the provided Code. +type JavaScript struct { + Code string + Scope interface{} +} + +// DBPointer refers to a document id in a namespace. +// +// This type is deprecated in the BSON specification and should not be used +// except for backwards compatibility with ancient applications. +type DBPointer struct { + Namespace string + Id ObjectId +} + +const initialBufferSize = 64 + +func handleErr(err *error) { + if r := recover(); r != nil { + if _, ok := r.(runtime.Error); ok { + panic(r) + } else if _, ok := r.(externalPanic); ok { + panic(r) + } else if s, ok := r.(string); ok { + *err = errors.New(s) + } else if e, ok := r.(error); ok { + *err = e + } else { + panic(r) + } + } +} + +// Marshal serializes the in value, which may be a map or a struct value. +// In the case of struct values, only exported fields will be serialized, +// and the order of serialized fields will match that of the struct itself. +// The lowercased field name is used as the key for each exported field, +// but this behavior may be changed using the respective field tag. +// The tag may also contain flags to tweak the marshalling behavior for +// the field. The tag formats accepted are: +// +// "[][,[,]]" +// +// `(...) bson:"[][,[,]]" (...)` +// +// The following flags are currently supported: +// +// omitempty Only include the field if it's not set to the zero +// value for the type or to empty slices or maps. +// +// minsize Marshal an int64 value as an int32, if that's feasible +// while preserving the numeric value. +// +// inline Inline the field, which must be a struct or a map, +// causing all of its fields or keys to be processed as if +// they were part of the outer struct. For maps, keys must +// not conflict with the bson keys of other struct fields. +// +// Some examples: +// +// type T struct { +// A bool +// B int "myb" +// C string "myc,omitempty" +// D string `bson:",omitempty" json:"jsonkey"` +// E int64 ",minsize" +// F int64 "myf,omitempty,minsize" +// } +// +func Marshal(in interface{}) (out []byte, err error) { + return MarshalBuffer(in, make([]byte, 0, initialBufferSize)) +} + +// MarshalBuffer behaves the same way as Marshal, except that instead of +// allocating a new byte slice it tries to use the received byte slice and +// only allocates more memory if necessary to fit the marshaled value. +func MarshalBuffer(in interface{}, buf []byte) (out []byte, err error) { + defer handleErr(&err) + e := &encoder{buf} + e.addDoc(reflect.ValueOf(in)) + return e.out, nil +} + +// Unmarshal deserializes data from in into the out value. The out value +// must be a map, a pointer to a struct, or a pointer to a bson.D value. +// In the case of struct values, only exported fields will be deserialized. +// The lowercased field name is used as the key for each exported field, +// but this behavior may be changed using the respective field tag. +// The tag may also contain flags to tweak the marshalling behavior for +// the field. The tag formats accepted are: +// +// "[][,[,]]" +// +// `(...) bson:"[][,[,]]" (...)` +// +// The following flags are currently supported during unmarshal (see the +// Marshal method for other flags): +// +// inline Inline the field, which must be a struct or a map. +// Inlined structs are handled as if its fields were part +// of the outer struct. An inlined map causes keys that do +// not match any other struct field to be inserted in the +// map rather than being discarded as usual. +// +// The target field or element types of out may not necessarily match +// the BSON values of the provided data. The following conversions are +// made automatically: +// +// - Numeric types are converted if at least the integer part of the +// value would be preserved correctly +// - Bools are converted to numeric types as 1 or 0 +// - Numeric types are converted to bools as true if not 0 or false otherwise +// - Binary and string BSON data is converted to a string, array or byte slice +// +// If the value would not fit the type and cannot be converted, it's +// silently skipped. +// +// Pointer values are initialized when necessary. +func Unmarshal(in []byte, out interface{}) (err error) { + if raw, ok := out.(*Raw); ok { + raw.Kind = 3 + raw.Data = in + return nil + } + defer handleErr(&err) + v := reflect.ValueOf(out) + switch v.Kind() { + case reflect.Ptr: + fallthrough + case reflect.Map: + d := newDecoder(in) + d.readDocTo(v) + if d.i < len(d.in) { + return errors.New("document is corrupted") + } + case reflect.Struct: + return errors.New("unmarshal can't deal with struct values. Use a pointer") + default: + return errors.New("unmarshal needs a map or a pointer to a struct") + } + return nil +} + +// Unmarshal deserializes raw into the out value. If the out value type +// is not compatible with raw, a *bson.TypeError is returned. +// +// See the Unmarshal function documentation for more details on the +// unmarshalling process. +func (raw Raw) Unmarshal(out interface{}) (err error) { + defer handleErr(&err) + v := reflect.ValueOf(out) + switch v.Kind() { + case reflect.Ptr: + v = v.Elem() + fallthrough + case reflect.Map: + d := newDecoder(raw.Data) + good := d.readElemTo(v, raw.Kind) + if !good { + return &TypeError{v.Type(), raw.Kind} + } + case reflect.Struct: + return errors.New("raw Unmarshal can't deal with struct values. Use a pointer") + default: + return errors.New("raw Unmarshal needs a map or a valid pointer") + } + return nil +} + +// TypeError store details for type error occuring +// during unmarshaling +type TypeError struct { + Type reflect.Type + Kind byte +} + +func (e *TypeError) Error() string { + return fmt.Sprintf("BSON kind 0x%02x isn't compatible with type %s", e.Kind, e.Type.String()) +} + +// -------------------------------------------------------------------------- +// Maintain a mapping of keys to structure field indexes + +type structInfo struct { + FieldsMap map[string]fieldInfo + FieldsList []fieldInfo + InlineMap int + Zero reflect.Value +} + +type fieldInfo struct { + Key string + Num int + OmitEmpty bool + MinSize bool + Inline []int +} + +var structMap = make(map[reflect.Type]*structInfo) +var structMapMutex sync.RWMutex + +type externalPanic string + +func (e externalPanic) String() string { + return string(e) +} + +func getStructInfo(st reflect.Type) (*structInfo, error) { + structMapMutex.RLock() + sinfo, found := structMap[st] + structMapMutex.RUnlock() + if found { + return sinfo, nil + } + n := st.NumField() + fieldsMap := make(map[string]fieldInfo) + fieldsList := make([]fieldInfo, 0, n) + inlineMap := -1 + for i := 0; i != n; i++ { + field := st.Field(i) + if field.PkgPath != "" && !field.Anonymous { + continue // Private field + } + + info := fieldInfo{Num: i} + + tag := field.Tag.Get("bson") + + // Fall-back to JSON struct tag, if feature flag is set. + if tag == "" && useJSONTagFallback { + tag = field.Tag.Get("json") + } + + // If there's no bson/json tag available. + if tag == "" { + // If there's no tag, and also no tag: value splits (i.e. no colon) + // then assume the entire tag is the value + if strings.Index(string(field.Tag), ":") < 0 { + tag = string(field.Tag) + } + } + + if tag == "-" { + continue + } + + inline := false + fields := strings.Split(tag, ",") + if len(fields) > 1 { + for _, flag := range fields[1:] { + switch flag { + case "omitempty": + info.OmitEmpty = true + case "minsize": + info.MinSize = true + case "inline": + inline = true + default: + msg := fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st) + panic(externalPanic(msg)) + } + } + tag = fields[0] + } + + if inline { + switch field.Type.Kind() { + case reflect.Map: + if inlineMap >= 0 { + return nil, errors.New("Multiple ,inline maps in struct " + st.String()) + } + if field.Type.Key() != reflect.TypeOf("") { + return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) + } + inlineMap = info.Num + case reflect.Ptr: + // allow only pointer to struct + if kind := field.Type.Elem().Kind(); kind != reflect.Struct { + return nil, errors.New("Option ,inline allows a pointer only to a struct, was given pointer to " + kind.String()) + } + + field.Type = field.Type.Elem() + fallthrough + case reflect.Struct: + sinfo, err := getStructInfo(field.Type) + if err != nil { + return nil, err + } + for _, finfo := range sinfo.FieldsList { + if _, found := fieldsMap[finfo.Key]; found { + msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + if finfo.Inline == nil { + finfo.Inline = []int{i, finfo.Num} + } else { + finfo.Inline = append([]int{i}, finfo.Inline...) + } + fieldsMap[finfo.Key] = finfo + fieldsList = append(fieldsList, finfo) + } + default: + panic("Option ,inline needs a struct value or a pointer to a struct or map field") + } + continue + } + + if tag != "" { + info.Key = tag + } else { + info.Key = strings.ToLower(field.Name) + } + + if _, found = fieldsMap[info.Key]; found { + msg := "Duplicated key '" + info.Key + "' in struct " + st.String() + return nil, errors.New(msg) + } + + fieldsList = append(fieldsList, info) + fieldsMap[info.Key] = info + } + sinfo = &structInfo{ + fieldsMap, + fieldsList, + inlineMap, + reflect.New(st).Elem(), + } + structMapMutex.Lock() + structMap[st] = sinfo + structMapMutex.Unlock() + return sinfo, nil +} diff --git a/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go b/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go new file mode 100644 index 00000000000..3525a004b6c --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/bson_corpus_spec_test_generator.go @@ -0,0 +1,294 @@ +// +build ignore + +package main + +import ( + "bytes" + "fmt" + "go/format" + "html/template" + "io/ioutil" + "log" + "path/filepath" + "strings" + + "github.com/globalsign/mgo/internal/json" +) + +func main() { + log.SetFlags(0) + log.SetPrefix(name + ": ") + + var g Generator + + fmt.Fprintf(&g, "// Code generated by \"%s.go\"; DO NOT EDIT\n\n", name) + + src := g.generate() + + err := ioutil.WriteFile(fmt.Sprintf("%s.go", strings.TrimSuffix(name, "_generator")), src, 0644) + if err != nil { + log.Fatalf("writing output: %s", err) + } +} + +// Generator holds the state of the analysis. Primarily used to buffer +// the output for format.Source. +type Generator struct { + bytes.Buffer // Accumulated output. +} + +// format returns the gofmt-ed contents of the Generator's buffer. +func (g *Generator) format() []byte { + src, err := format.Source(g.Bytes()) + if err != nil { + // Should never happen, but can arise when developing this code. + // The user can compile the output to see the error. + log.Printf("warning: internal error: invalid Go generated: %s", err) + log.Printf("warning: compile the package to analyze the error") + return g.Bytes() + } + return src +} + +// EVERYTHING ABOVE IS CONSTANT BETWEEN THE GENERATORS + +const name = "bson_corpus_spec_test_generator" + +func (g *Generator) generate() []byte { + + testFiles, err := filepath.Glob("./specdata/specifications/source/bson-corpus/tests/*.json") + if err != nil { + log.Fatalf("error reading bson-corpus files: %s", err) + } + + tests, err := g.loadTests(testFiles) + if err != nil { + log.Fatalf("error loading tests: %s", err) + } + + tmpl, err := g.getTemplate() + if err != nil { + log.Fatalf("error loading template: %s", err) + } + + tmpl.Execute(&g.Buffer, tests) + + return g.format() +} + +func (g *Generator) loadTests(filenames []string) ([]*testDef, error) { + var tests []*testDef + for _, filename := range filenames { + test, err := g.loadTest(filename) + if err != nil { + return nil, err + } + + tests = append(tests, test) + } + + return tests, nil +} + +func (g *Generator) loadTest(filename string) (*testDef, error) { + content, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + var testDef testDef + err = json.Unmarshal(content, &testDef) + if err != nil { + return nil, err + } + + names := make(map[string]struct{}) + + for i := len(testDef.Valid) - 1; i >= 0; i-- { + if testDef.BsonType == "0x05" && testDef.Valid[i].Description == "subtype 0x02" { + testDef.Valid = append(testDef.Valid[:i], testDef.Valid[i+1:]...) + continue + } + + name := cleanupFuncName(testDef.Description + "_" + testDef.Valid[i].Description) + nameIdx := name + j := 1 + for { + if _, ok := names[nameIdx]; !ok { + break + } + + nameIdx = fmt.Sprintf("%s_%d", name, j) + } + + names[nameIdx] = struct{}{} + + testDef.Valid[i].TestDef = &testDef + testDef.Valid[i].Name = nameIdx + testDef.Valid[i].StructTest = testDef.TestKey != "" && + (testDef.BsonType != "0x05" || strings.Contains(testDef.Valid[i].Description, "0x00")) && + !testDef.Deprecated + } + + for i := len(testDef.DecodeErrors) - 1; i >= 0; i-- { + if strings.Contains(testDef.DecodeErrors[i].Description, "UTF-8") { + testDef.DecodeErrors = append(testDef.DecodeErrors[:i], testDef.DecodeErrors[i+1:]...) + continue + } + + name := cleanupFuncName(testDef.Description + "_" + testDef.DecodeErrors[i].Description) + nameIdx := name + j := 1 + for { + if _, ok := names[nameIdx]; !ok { + break + } + + nameIdx = fmt.Sprintf("%s_%d", name, j) + } + names[nameIdx] = struct{}{} + + testDef.DecodeErrors[i].Name = nameIdx + } + + return &testDef, nil +} + +func (g *Generator) getTemplate() (*template.Template, error) { + content := `package bson_test + +import ( + "encoding/hex" + "time" + + . "gopkg.in/check.v1" + "github.com/globalsign/mgo/bson" +) + +func testValid(c *C, in []byte, expected []byte, result interface{}) { + err := bson.Unmarshal(in, result) + c.Assert(err, IsNil) + + out, err := bson.Marshal(result) + c.Assert(err, IsNil) + + c.Assert(string(expected), Equals, string(out), Commentf("roundtrip failed for %T, expected '%x' but got '%x'", result, expected, out)) +} + +func testDecodeSkip(c *C, in []byte) { + err := bson.Unmarshal(in, &struct{}{}) + c.Assert(err, IsNil) +} + +func testDecodeError(c *C, in []byte, result interface{}) { + err := bson.Unmarshal(in, result) + c.Assert(err, Not(IsNil)) +} + +{{range .}} +{{range .Valid}} +func (s *S) Test{{.Name}}(c *C) { + b, err := hex.DecodeString("{{.Bson}}") + c.Assert(err, IsNil) + + {{if .CanonicalBson}} + cb, err := hex.DecodeString("{{.CanonicalBson}}") + c.Assert(err, IsNil) + {{else}} + cb := b + {{end}} + + var resultD bson.D + testValid(c, b, cb, &resultD) + {{if .StructTest}}var resultS struct { + Element {{.TestDef.GoType}} ` + "`bson:\"{{.TestDef.TestKey}}\"`" + ` + } + testValid(c, b, cb, &resultS){{end}} + + testDecodeSkip(c, b) +} +{{end}} + +{{range .DecodeErrors}} +func (s *S) Test{{.Name}}(c *C) { + b, err := hex.DecodeString("{{.Bson}}") + c.Assert(err, IsNil) + + var resultD bson.D + testDecodeError(c, b, &resultD) +} +{{end}} +{{end}} +` + tmpl, err := template.New("").Parse(content) + if err != nil { + return nil, err + } + return tmpl, nil +} + +func cleanupFuncName(name string) string { + return strings.Map(func(r rune) rune { + if (r >= 48 && r <= 57) || (r >= 65 && r <= 90) || (r >= 97 && r <= 122) { + return r + } + return '_' + }, name) +} + +type testDef struct { + Description string `json:"description"` + BsonType string `json:"bson_type"` + TestKey string `json:"test_key"` + Valid []*valid `json:"valid"` + DecodeErrors []*decodeError `json:"decodeErrors"` + Deprecated bool `json:"deprecated"` +} + +func (t *testDef) GoType() string { + switch t.BsonType { + case "0x01": + return "float64" + case "0x02": + return "string" + case "0x03": + return "bson.D" + case "0x04": + return "[]interface{}" + case "0x05": + return "[]byte" + case "0x07": + return "bson.ObjectId" + case "0x08": + return "bool" + case "0x09": + return "time.Time" + case "0x0E": + return "string" + case "0x10": + return "int32" + case "0x12": + return "int64" + case "0x13": + return "bson.Decimal" + default: + return "interface{}" + } +} + +type valid struct { + Description string `json:"description"` + Bson string `json:"bson"` + CanonicalBson string `json:"canonical_bson"` + + Name string + StructTest bool + TestDef *testDef +} + +type decodeError struct { + Description string `json:"description"` + Bson string `json:"bson"` + + Name string +} diff --git a/vendor/github.com/globalsign/mgo/bson/compatibility.go b/vendor/github.com/globalsign/mgo/bson/compatibility.go new file mode 100644 index 00000000000..66efd465fac --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/compatibility.go @@ -0,0 +1,29 @@ +package bson + +// Current state of the JSON tag fallback option. +var useJSONTagFallback = false +var useRespectNilValues = false + +// SetJSONTagFallback enables or disables the JSON-tag fallback for structure tagging. When this is enabled, structures +// without BSON tags on a field will fall-back to using the JSON tag (if present). +func SetJSONTagFallback(state bool) { + useJSONTagFallback = state +} + +// JSONTagFallbackState returns the current status of the JSON tag fallback compatability option. See SetJSONTagFallback +// for more information. +func JSONTagFallbackState() bool { + return useJSONTagFallback +} + +// SetRespectNilValues enables or disables serializing nil slices or maps to `null` values. +// In other words it enables `encoding/json` compatible behaviour. +func SetRespectNilValues(state bool) { + useRespectNilValues = state +} + +// RespectNilValuesState returns the current status of the JSON nil slices and maps fallback compatibility option. +// See SetRespectNilValues for more information. +func RespectNilValuesState() bool { + return useRespectNilValues +} diff --git a/vendor/github.com/globalsign/mgo/bson/decimal.go b/vendor/github.com/globalsign/mgo/bson/decimal.go new file mode 100644 index 00000000000..672ba182594 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/decimal.go @@ -0,0 +1,312 @@ +// BSON library for Go +// +// Copyright (c) 2010-2012 - Gustavo Niemeyer +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package bson + +import ( + "fmt" + "strconv" + "strings" +) + +// Decimal128 holds decimal128 BSON values. +type Decimal128 struct { + h, l uint64 +} + +func (d Decimal128) String() string { + var pos int // positive sign + var e int // exponent + var h, l uint64 // significand high/low + + if d.h>>63&1 == 0 { + pos = 1 + } + + switch d.h >> 58 & (1<<5 - 1) { + case 0x1F: + return "NaN" + case 0x1E: + return "-Inf"[pos:] + } + + l = d.l + if d.h>>61&3 == 3 { + // Bits: 1*sign 2*ignored 14*exponent 111*significand. + // Implicit 0b100 prefix in significand. + e = int(d.h>>47&(1<<14-1)) - 6176 + //h = 4<<47 | d.h&(1<<47-1) + // Spec says all of these values are out of range. + h, l = 0, 0 + } else { + // Bits: 1*sign 14*exponent 113*significand + e = int(d.h>>49&(1<<14-1)) - 6176 + h = d.h & (1<<49 - 1) + } + + // Would be handled by the logic below, but that's trivial and common. + if h == 0 && l == 0 && e == 0 { + return "-0"[pos:] + } + + var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero. + var last = len(repr) + var i = len(repr) + var dot = len(repr) + e + var rem uint32 +Loop: + for d9 := 0; d9 < 5; d9++ { + h, l, rem = divmod(h, l, 1e9) + for d1 := 0; d1 < 9; d1++ { + // Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc. + if i < len(repr) && (dot == i || l == 0 && h == 0 && rem > 0 && rem < 10 && (dot < i-6 || e > 0)) { + e += len(repr) - i + i-- + repr[i] = '.' + last = i - 1 + dot = len(repr) // Unmark. + } + c := '0' + byte(rem%10) + rem /= 10 + i-- + repr[i] = c + // Handle "0E+3", "1E+3", etc. + if l == 0 && h == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || e > 0) { + last = i + break Loop + } + if c != '0' { + last = i + } + // Break early. Works without it, but why. + if dot > i && l == 0 && h == 0 && rem == 0 { + break Loop + } + } + } + repr[last-1] = '-' + last-- + + if e > 0 { + return string(repr[last+pos:]) + "E+" + strconv.Itoa(e) + } + if e < 0 { + return string(repr[last+pos:]) + "E" + strconv.Itoa(e) + } + return string(repr[last+pos:]) +} + +func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) { + div64 := uint64(div) + a := h >> 32 + aq := a / div64 + ar := a % div64 + b := ar<<32 + h&(1<<32-1) + bq := b / div64 + br := b % div64 + c := br<<32 + l>>32 + cq := c / div64 + cr := c % div64 + d := cr<<32 + l&(1<<32-1) + dq := d / div64 + dr := d % div64 + return (aq<<32 | bq), (cq<<32 | dq), uint32(dr) +} + +var dNaN = Decimal128{0x1F << 58, 0} +var dPosInf = Decimal128{0x1E << 58, 0} +var dNegInf = Decimal128{0x3E << 58, 0} + +func dErr(s string) (Decimal128, error) { + return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s) +} + +// ParseDecimal128 parse a string and return the corresponding value as +// a decimal128 +func ParseDecimal128(s string) (Decimal128, error) { + orig := s + if s == "" { + return dErr(orig) + } + neg := s[0] == '-' + if neg || s[0] == '+' { + s = s[1:] + } + + if (len(s) == 3 || len(s) == 8) && (s[0] == 'N' || s[0] == 'n' || s[0] == 'I' || s[0] == 'i') { + if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") { + return dNaN, nil + } + if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") { + if neg { + return dNegInf, nil + } + return dPosInf, nil + } + return dErr(orig) + } + + var h, l uint64 + var e int + + var add, ovr uint32 + var mul uint32 = 1 + var dot = -1 + var digits = 0 + var i = 0 + for i < len(s) { + c := s[i] + if mul == 1e9 { + h, l, ovr = muladd(h, l, mul, add) + mul, add = 1, 0 + if ovr > 0 || h&((1<<15-1)<<49) > 0 { + return dErr(orig) + } + } + if c >= '0' && c <= '9' { + i++ + if c > '0' || digits > 0 { + digits++ + } + if digits > 34 { + if c == '0' { + // Exact rounding. + e++ + continue + } + return dErr(orig) + } + mul *= 10 + add *= 10 + add += uint32(c - '0') + continue + } + if c == '.' { + i++ + if dot >= 0 || i == 1 && len(s) == 1 { + return dErr(orig) + } + if i == len(s) { + break + } + if s[i] < '0' || s[i] > '9' || e > 0 { + return dErr(orig) + } + dot = i + continue + } + break + } + if i == 0 { + return dErr(orig) + } + if mul > 1 { + h, l, ovr = muladd(h, l, mul, add) + if ovr > 0 || h&((1<<15-1)<<49) > 0 { + return dErr(orig) + } + } + if dot >= 0 { + e += dot - i + } + if i+1 < len(s) && (s[i] == 'E' || s[i] == 'e') { + i++ + eneg := s[i] == '-' + if eneg || s[i] == '+' { + i++ + if i == len(s) { + return dErr(orig) + } + } + n := 0 + for i < len(s) && n < 1e4 { + c := s[i] + i++ + if c < '0' || c > '9' { + return dErr(orig) + } + n *= 10 + n += int(c - '0') + } + if eneg { + n = -n + } + e += n + for e < -6176 { + // Subnormal. + var div uint32 = 1 + for div < 1e9 && e < -6176 { + div *= 10 + e++ + } + var rem uint32 + h, l, rem = divmod(h, l, div) + if rem > 0 { + return dErr(orig) + } + } + for e > 6111 { + // Clamped. + var mul uint32 = 1 + for mul < 1e9 && e > 6111 { + mul *= 10 + e-- + } + h, l, ovr = muladd(h, l, mul, 0) + if ovr > 0 || h&((1<<15-1)<<49) > 0 { + return dErr(orig) + } + } + if e < -6176 || e > 6111 { + return dErr(orig) + } + } + + if i < len(s) { + return dErr(orig) + } + + h |= uint64(e+6176) & uint64(1<<14-1) << 49 + if neg { + h |= 1 << 63 + } + return Decimal128{h, l}, nil +} + +func muladd(h, l uint64, mul uint32, add uint32) (resh, resl uint64, overflow uint32) { + mul64 := uint64(mul) + a := mul64 * (l & (1<<32 - 1)) + b := a>>32 + mul64*(l>>32) + c := b>>32 + mul64*(h&(1<<32-1)) + d := c>>32 + mul64*(h>>32) + + a = a&(1<<32-1) + uint64(add) + b = b&(1<<32-1) + a>>32 + c = c&(1<<32-1) + b>>32 + d = d&(1<<32-1) + c>>32 + + return (d<<32 | c&(1<<32-1)), (b<<32 | a&(1<<32-1)), uint32(d >> 32) +} diff --git a/vendor/github.com/globalsign/mgo/bson/decode.go b/vendor/github.com/globalsign/mgo/bson/decode.go new file mode 100644 index 00000000000..658856add04 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/decode.go @@ -0,0 +1,1055 @@ +// BSON library for Go +// +// Copyright (c) 2010-2012 - Gustavo Niemeyer +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// gobson - BSON library for Go. + +package bson + +import ( + "errors" + "fmt" + "io" + "math" + "net/url" + "reflect" + "strconv" + "sync" + "time" +) + +type decoder struct { + in []byte + i int + docType reflect.Type +} + +var typeM = reflect.TypeOf(M{}) + +func newDecoder(in []byte) *decoder { + return &decoder{in, 0, typeM} +} + +// -------------------------------------------------------------------------- +// Some helper functions. + +func corrupted() { + panic("Document is corrupted") +} + +// -------------------------------------------------------------------------- +// Unmarshaling of documents. + +const ( + setterUnknown = iota + setterNone + setterType + setterAddr +) + +var setterStyles map[reflect.Type]int +var setterIface reflect.Type +var setterMutex sync.RWMutex + +func init() { + var iface Setter + setterIface = reflect.TypeOf(&iface).Elem() + setterStyles = make(map[reflect.Type]int) +} + +func setterStyle(outt reflect.Type) int { + setterMutex.RLock() + style := setterStyles[outt] + setterMutex.RUnlock() + if style != setterUnknown { + return style + } + + setterMutex.Lock() + defer setterMutex.Unlock() + if outt.Implements(setterIface) { + style = setterType + } else if reflect.PtrTo(outt).Implements(setterIface) { + style = setterAddr + } else { + style = setterNone + } + setterStyles[outt] = style + return style +} + +func getSetter(outt reflect.Type, out reflect.Value) Setter { + style := setterStyle(outt) + if style == setterNone { + return nil + } + if style == setterAddr { + if !out.CanAddr() { + return nil + } + out = out.Addr() + } else if outt.Kind() == reflect.Ptr && out.IsNil() { + out.Set(reflect.New(outt.Elem())) + } + return out.Interface().(Setter) +} + +func clearMap(m reflect.Value) { + var none reflect.Value + for _, k := range m.MapKeys() { + m.SetMapIndex(k, none) + } +} + +func (d *decoder) readDocTo(out reflect.Value) { + var elemType reflect.Type + outt := out.Type() + outk := outt.Kind() + + for { + if outk == reflect.Ptr && out.IsNil() { + out.Set(reflect.New(outt.Elem())) + } + if setter := getSetter(outt, out); setter != nil { + raw := d.readRaw(ElementDocument) + err := setter.SetBSON(raw) + if _, ok := err.(*TypeError); err != nil && !ok { + panic(err) + } + return + } + if outk == reflect.Ptr { + out = out.Elem() + outt = out.Type() + outk = out.Kind() + continue + } + break + } + + var fieldsMap map[string]fieldInfo + var inlineMap reflect.Value + if outt == typeRaw { + out.Set(reflect.ValueOf(d.readRaw(ElementDocument))) + return + } + + origout := out + if outk == reflect.Interface { + if d.docType.Kind() == reflect.Map { + mv := reflect.MakeMap(d.docType) + out.Set(mv) + out = mv + } else { + dv := reflect.New(d.docType).Elem() + out.Set(dv) + out = dv + } + outt = out.Type() + outk = outt.Kind() + } + + docType := d.docType + keyType := typeString + convertKey := false + switch outk { + case reflect.Map: + keyType = outt.Key() + if keyType != typeString { + convertKey = true + } + elemType = outt.Elem() + if elemType == typeIface { + d.docType = outt + } + if out.IsNil() { + out.Set(reflect.MakeMap(out.Type())) + } else if out.Len() > 0 { + clearMap(out) + } + case reflect.Struct: + sinfo, err := getStructInfo(out.Type()) + if err != nil { + panic(err) + } + fieldsMap = sinfo.FieldsMap + out.Set(sinfo.Zero) + if sinfo.InlineMap != -1 { + inlineMap = out.Field(sinfo.InlineMap) + if !inlineMap.IsNil() && inlineMap.Len() > 0 { + clearMap(inlineMap) + } + elemType = inlineMap.Type().Elem() + if elemType == typeIface { + d.docType = inlineMap.Type() + } + } + case reflect.Slice: + switch outt.Elem() { + case typeDocElem: + origout.Set(d.readDocElems(outt)) + return + case typeRawDocElem: + origout.Set(d.readRawDocElems(outt)) + return + } + fallthrough + default: + panic("Unsupported document type for unmarshalling: " + out.Type().String()) + } + + end := int(d.readInt32()) + end += d.i - 4 + if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { + corrupted() + } + for d.in[d.i] != '\x00' { + kind := d.readByte() + name := d.readCStr() + if d.i >= end { + corrupted() + } + + switch outk { + case reflect.Map: + e := reflect.New(elemType).Elem() + if d.readElemTo(e, kind) { + k := reflect.ValueOf(name) + if convertKey { + mapKeyType := out.Type().Key() + mapKeyKind := mapKeyType.Kind() + + switch mapKeyKind { + case reflect.Int: + fallthrough + case reflect.Int8: + fallthrough + case reflect.Int16: + fallthrough + case reflect.Int32: + fallthrough + case reflect.Int64: + fallthrough + case reflect.Uint: + fallthrough + case reflect.Uint8: + fallthrough + case reflect.Uint16: + fallthrough + case reflect.Uint32: + fallthrough + case reflect.Uint64: + fallthrough + case reflect.Float32: + fallthrough + case reflect.Float64: + parsed := d.parseMapKeyAsFloat(k, mapKeyKind) + k = reflect.ValueOf(parsed) + case reflect.String: + mapKeyType = keyType + default: + panic("BSON map must have string or decimal keys. Got: " + outt.String()) + } + + k = k.Convert(mapKeyType) + } + out.SetMapIndex(k, e) + } + case reflect.Struct: + if info, ok := fieldsMap[name]; ok { + if info.Inline == nil { + d.readElemTo(out.Field(info.Num), kind) + } else { + d.readElemTo(out.FieldByIndex(info.Inline), kind) + } + } else if inlineMap.IsValid() { + if inlineMap.IsNil() { + inlineMap.Set(reflect.MakeMap(inlineMap.Type())) + } + e := reflect.New(elemType).Elem() + if d.readElemTo(e, kind) { + inlineMap.SetMapIndex(reflect.ValueOf(name), e) + } + } else { + d.dropElem(kind) + } + case reflect.Slice: + } + + if d.i >= end { + corrupted() + } + } + d.i++ // '\x00' + if d.i != end { + corrupted() + } + d.docType = docType +} + +func (decoder) parseMapKeyAsFloat(k reflect.Value, mapKeyKind reflect.Kind) float64 { + parsed, err := strconv.ParseFloat(k.String(), 64) + if err != nil { + panic("Map key is defined to be a decimal type (" + mapKeyKind.String() + ") but got error " + + err.Error()) + } + + return parsed +} + +func (d *decoder) readArrayDocTo(out reflect.Value) { + end := int(d.readInt32()) + end += d.i - 4 + if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { + corrupted() + } + i := 0 + l := out.Len() + for d.in[d.i] != '\x00' { + if i >= l { + panic("Length mismatch on array field") + } + kind := d.readByte() + for d.i < end && d.in[d.i] != '\x00' { + d.i++ + } + if d.i >= end { + corrupted() + } + d.i++ + d.readElemTo(out.Index(i), kind) + if d.i >= end { + corrupted() + } + i++ + } + if i != l { + panic("Length mismatch on array field") + } + d.i++ // '\x00' + if d.i != end { + corrupted() + } +} + +func (d *decoder) readSliceDoc(t reflect.Type) interface{} { + tmp := make([]reflect.Value, 0, 8) + elemType := t.Elem() + if elemType == typeRawDocElem { + d.dropElem(ElementArray) + return reflect.Zero(t).Interface() + } + if elemType == typeRaw { + return d.readSliceOfRaw() + } + + end := int(d.readInt32()) + end += d.i - 4 + if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { + corrupted() + } + for d.in[d.i] != '\x00' { + kind := d.readByte() + for d.i < end && d.in[d.i] != '\x00' { + d.i++ + } + if d.i >= end { + corrupted() + } + d.i++ + e := reflect.New(elemType).Elem() + if d.readElemTo(e, kind) { + tmp = append(tmp, e) + } + if d.i >= end { + corrupted() + } + } + d.i++ // '\x00' + if d.i != end { + corrupted() + } + + n := len(tmp) + slice := reflect.MakeSlice(t, n, n) + for i := 0; i != n; i++ { + slice.Index(i).Set(tmp[i]) + } + return slice.Interface() +} + +func BSONElementSize(kind byte, offset int, buffer []byte) (int, error) { + switch kind { + case ElementFloat64: // Float64 + return 8, nil + case ElementJavaScriptWithoutScope: // JavaScript without scope + fallthrough + case ElementSymbol: // Symbol + fallthrough + case ElementString: // UTF-8 string + size, err := getSize(offset, buffer) + if err != nil { + return 0, err + } + if size < 1 { + return 0, errors.New("String size can't be less then one byte") + } + size += 4 + if offset+size > len(buffer) { + return 0, io.ErrUnexpectedEOF + } + if buffer[offset+size-1] != 0 { + return 0, errors.New("Invalid string: non zero-terminated") + } + return size, nil + case ElementArray: // Array + fallthrough + case ElementDocument: // Document + size, err := getSize(offset, buffer) + if err != nil { + return 0, err + } + if size < 5 { + return 0, errors.New("Declared document size is too small") + } + return size, nil + case ElementBinary: // Binary + size, err := getSize(offset, buffer) + if err != nil { + return 0, err + } + if size < 0 { + return 0, errors.New("Binary data size can't be negative") + } + return size + 5, nil + case Element06: // Undefined (obsolete, but still seen in the wild) + return 0, nil + case ElementObjectId: // ObjectId + return 12, nil + case ElementBool: // Bool + return 1, nil + case ElementDatetime: // Timestamp + return 8, nil + case ElementNil: // Nil + return 0, nil + case ElementRegEx: // RegEx + end := offset + for i := 0; i < 2; i++ { + for end < len(buffer) && buffer[end] != '\x00' { + end++ + } + end++ + } + if end > len(buffer) { + return 0, io.ErrUnexpectedEOF + } + return end - offset, nil + case ElementDBPointer: // DBPointer + size, err := getSize(offset, buffer) + if err != nil { + return 0, err + } + if size < 1 { + return 0, errors.New("String size can't be less then one byte") + } + return size + 12 + 4, nil + case ElementJavaScriptWithScope: // JavaScript with scope + size, err := getSize(offset, buffer) + if err != nil { + return 0, err + } + if size < 4+5+5 { + return 0, errors.New("Declared document element is too small") + } + return size, nil + case ElementInt32: // Int32 + return 4, nil + case ElementTimestamp: // Mongo-specific timestamp + return 8, nil + case ElementInt64: // Int64 + return 8, nil + case ElementDecimal128: // Decimal128 + return 16, nil + case ElementMaxKey: // Max key + return 0, nil + case ElementMinKey: // Min key + return 0, nil + default: + return 0, errors.New(fmt.Sprintf("Unknown element kind (0x%02X)", kind)) + } +} + +func (d *decoder) readRaw(kind byte) Raw { + size, err := BSONElementSize(kind, d.i, d.in) + if err != nil { + corrupted() + } + if d.i+size > len(d.in) { + corrupted() + } + d.i += size + return Raw{ + Kind: kind, + Data: d.in[d.i-size : d.i], + } +} + +func (d *decoder) readSliceOfRaw() interface{} { + tmp := make([]Raw, 0, 8) + end := int(d.readInt32()) + end += d.i - 4 + if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { + corrupted() + } + for d.in[d.i] != '\x00' { + kind := d.readByte() + for d.i < end && d.in[d.i] != '\x00' { + d.i++ + } + if d.i >= end { + corrupted() + } + d.i++ + e := d.readRaw(kind) + tmp = append(tmp, e) + if d.i >= end { + corrupted() + } + } + d.i++ // '\x00' + if d.i != end { + corrupted() + } + return tmp +} + +var typeSlice = reflect.TypeOf([]interface{}{}) +var typeIface = typeSlice.Elem() + +func (d *decoder) readDocElems(typ reflect.Type) reflect.Value { + docType := d.docType + d.docType = typ + slice := make([]DocElem, 0, 8) + d.readDocWith(func(kind byte, name string) { + e := DocElem{Name: name} + v := reflect.ValueOf(&e.Value) + if d.readElemTo(v.Elem(), kind) { + slice = append(slice, e) + } + }) + slicev := reflect.New(typ).Elem() + slicev.Set(reflect.ValueOf(slice)) + d.docType = docType + return slicev +} + +func (d *decoder) readRawDocElems(typ reflect.Type) reflect.Value { + docType := d.docType + d.docType = typ + slice := make([]RawDocElem, 0, 8) + d.readDocWith(func(kind byte, name string) { + e := RawDocElem{Name: name, Value: d.readRaw(kind)} + slice = append(slice, e) + }) + slicev := reflect.New(typ).Elem() + slicev.Set(reflect.ValueOf(slice)) + d.docType = docType + return slicev +} + +func (d *decoder) readDocWith(f func(kind byte, name string)) { + end := int(d.readInt32()) + end += d.i - 4 + if end <= d.i || end > len(d.in) || d.in[end-1] != '\x00' { + corrupted() + } + for d.in[d.i] != '\x00' { + kind := d.readByte() + name := d.readCStr() + if d.i >= end { + corrupted() + } + f(kind, name) + if d.i >= end { + corrupted() + } + } + d.i++ // '\x00' + if d.i != end { + corrupted() + } +} + +// -------------------------------------------------------------------------- +// Unmarshaling of individual elements within a document. +func (d *decoder) dropElem(kind byte) { + size, err := BSONElementSize(kind, d.i, d.in) + if err != nil { + corrupted() + } + if d.i+size > len(d.in) { + corrupted() + } + d.i += size +} + +// Attempt to decode an element from the document and put it into out. +// If the types are not compatible, the returned ok value will be +// false and out will be unchanged. +func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) { + outt := out.Type() + + if outt == typeRaw { + out.Set(reflect.ValueOf(d.readRaw(kind))) + return true + } + + if outt == typeRawPtr { + raw := d.readRaw(kind) + out.Set(reflect.ValueOf(&raw)) + return true + } + + if kind == ElementDocument { + // Delegate unmarshaling of documents. + outt := out.Type() + outk := out.Kind() + switch outk { + case reflect.Interface, reflect.Ptr, reflect.Struct, reflect.Map: + d.readDocTo(out) + return true + } + if setterStyle(outt) != setterNone { + d.readDocTo(out) + return true + } + if outk == reflect.Slice { + switch outt.Elem() { + case typeDocElem: + out.Set(d.readDocElems(outt)) + case typeRawDocElem: + out.Set(d.readRawDocElems(outt)) + default: + d.dropElem(kind) + } + return true + } + d.dropElem(kind) + return true + } + + if setter := getSetter(outt, out); setter != nil { + err := setter.SetBSON(d.readRaw(kind)) + if err == ErrSetZero { + out.Set(reflect.Zero(outt)) + return true + } + if err == nil { + return true + } + if _, ok := err.(*TypeError); !ok { + panic(err) + } + return false + } + + var in interface{} + + switch kind { + case ElementFloat64: + in = d.readFloat64() + case ElementString: + in = d.readStr() + case ElementDocument: + panic("Can't happen. Handled above.") + case ElementArray: + outt := out.Type() + if setterStyle(outt) != setterNone { + // Skip the value so its data is handed to the setter below. + d.dropElem(kind) + break + } + for outt.Kind() == reflect.Ptr { + outt = outt.Elem() + } + switch outt.Kind() { + case reflect.Array: + d.readArrayDocTo(out) + return true + case reflect.Slice: + in = d.readSliceDoc(outt) + default: + in = d.readSliceDoc(typeSlice) + } + case ElementBinary: + b := d.readBinary() + if b.Kind == BinaryGeneric || b.Kind == BinaryBinaryOld { + in = b.Data + } else { + in = b + } + case Element06: // Undefined (obsolete, but still seen in the wild) + in = Undefined + case ElementObjectId: + in = ObjectId(d.readBytes(12)) + case ElementBool: + in = d.readBool() + case ElementDatetime: // Timestamp + // MongoDB handles timestamps as milliseconds. + i := d.readInt64() + if i == -62135596800000 { + in = time.Time{} // In UTC for convenience. + } else { + in = time.Unix(i/1e3, i%1e3*1e6).UTC() + } + case ElementNil: + in = nil + case ElementRegEx: + in = d.readRegEx() + case ElementDBPointer: + in = DBPointer{Namespace: d.readStr(), Id: ObjectId(d.readBytes(12))} + case ElementJavaScriptWithoutScope: + in = JavaScript{Code: d.readStr()} + case ElementSymbol: + in = Symbol(d.readStr()) + case ElementJavaScriptWithScope: + start := d.i + l := int(d.readInt32()) + js := JavaScript{d.readStr(), make(M)} + d.readDocTo(reflect.ValueOf(js.Scope)) + if d.i != start+l { + corrupted() + } + in = js + case ElementInt32: + in = int(d.readInt32()) + case ElementTimestamp: // Mongo-specific timestamp + in = MongoTimestamp(d.readInt64()) + case ElementInt64: + switch out.Type() { + case typeTimeDuration: + in = time.Duration(time.Duration(d.readInt64()) * time.Millisecond) + default: + in = d.readInt64() + } + case ElementDecimal128: + in = Decimal128{ + l: uint64(d.readInt64()), + h: uint64(d.readInt64()), + } + case ElementMaxKey: + in = MaxKey + case ElementMinKey: + in = MinKey + default: + panic(fmt.Sprintf("Unknown element kind (0x%02X)", kind)) + } + + if in == nil { + out.Set(reflect.Zero(outt)) + return true + } + + outk := outt.Kind() + + // Dereference and initialize pointer if necessary. + first := true + for outk == reflect.Ptr { + if !out.IsNil() { + out = out.Elem() + } else { + elem := reflect.New(outt.Elem()) + if first { + // Only set if value is compatible. + first = false + defer func(out, elem reflect.Value) { + if good { + out.Set(elem) + } + }(out, elem) + } else { + out.Set(elem) + } + out = elem + } + outt = out.Type() + outk = outt.Kind() + } + + inv := reflect.ValueOf(in) + if outt == inv.Type() { + out.Set(inv) + return true + } + + switch outk { + case reflect.Interface: + out.Set(inv) + return true + case reflect.String: + switch inv.Kind() { + case reflect.String: + out.SetString(inv.String()) + return true + case reflect.Slice: + if b, ok := in.([]byte); ok { + out.SetString(string(b)) + return true + } + case reflect.Int, reflect.Int64: + if outt == typeJSONNumber { + out.SetString(strconv.FormatInt(inv.Int(), 10)) + return true + } + case reflect.Float64: + if outt == typeJSONNumber { + out.SetString(strconv.FormatFloat(inv.Float(), 'f', -1, 64)) + return true + } + } + case reflect.Slice, reflect.Array: + // Remember, array (0x04) slices are built with the correct + // element type. If we are here, must be a cross BSON kind + // conversion (e.g. 0x05 unmarshalling on string). + if outt.Elem().Kind() != reflect.Uint8 { + break + } + switch inv.Kind() { + case reflect.String: + slice := []byte(inv.String()) + out.Set(reflect.ValueOf(slice)) + return true + case reflect.Slice: + switch outt.Kind() { + case reflect.Array: + reflect.Copy(out, inv) + case reflect.Slice: + out.SetBytes(inv.Bytes()) + } + return true + } + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + switch inv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + out.SetInt(inv.Int()) + return true + case reflect.Float32, reflect.Float64: + out.SetInt(int64(inv.Float())) + return true + case reflect.Bool: + if inv.Bool() { + out.SetInt(1) + } else { + out.SetInt(0) + } + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + panic("can't happen: no uint types in BSON (!?)") + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + switch inv.Kind() { + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + out.SetUint(uint64(inv.Int())) + return true + case reflect.Float32, reflect.Float64: + out.SetUint(uint64(inv.Float())) + return true + case reflect.Bool: + if inv.Bool() { + out.SetUint(1) + } else { + out.SetUint(0) + } + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + panic("Can't happen. No uint types in BSON.") + } + case reflect.Float32, reflect.Float64: + switch inv.Kind() { + case reflect.Float32, reflect.Float64: + out.SetFloat(inv.Float()) + return true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + out.SetFloat(float64(inv.Int())) + return true + case reflect.Bool: + if inv.Bool() { + out.SetFloat(1) + } else { + out.SetFloat(0) + } + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + panic("Can't happen. No uint types in BSON?") + } + case reflect.Bool: + switch inv.Kind() { + case reflect.Bool: + out.SetBool(inv.Bool()) + return true + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + out.SetBool(inv.Int() != 0) + return true + case reflect.Float32, reflect.Float64: + out.SetBool(inv.Float() != 0) + return true + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + panic("Can't happen. No uint types in BSON?") + } + case reflect.Struct: + if outt == typeURL && inv.Kind() == reflect.String { + u, err := url.Parse(inv.String()) + if err != nil { + panic(err) + } + out.Set(reflect.ValueOf(u).Elem()) + return true + } + if outt == typeBinary { + if b, ok := in.([]byte); ok { + out.Set(reflect.ValueOf(Binary{Data: b})) + return true + } + } + } + + return false +} + +// -------------------------------------------------------------------------- +// Parsers of basic types. + +func (d *decoder) readRegEx() RegEx { + re := RegEx{} + re.Pattern = d.readCStr() + re.Options = d.readCStr() + return re +} + +func (d *decoder) readBinary() Binary { + l := d.readInt32() + b := Binary{} + b.Kind = d.readByte() + if b.Kind == BinaryBinaryOld && l > 4 { + // Weird obsolete format with redundant length. + rl := d.readInt32() + if rl != l-4 { + corrupted() + } + l = rl + } + b.Data = d.readBytes(l) + return b +} + +func (d *decoder) readStr() string { + l := d.readInt32() + b := d.readBytes(l - 1) + if d.readByte() != '\x00' { + corrupted() + } + return string(b) +} + +func (d *decoder) readCStr() string { + start := d.i + end := start + l := len(d.in) + for ; end != l; end++ { + if d.in[end] == '\x00' { + break + } + } + d.i = end + 1 + if d.i > l { + corrupted() + } + return string(d.in[start:end]) +} + +func (d *decoder) readBool() bool { + b := d.readByte() + if b == 0 { + return false + } + if b == 1 { + return true + } + panic(fmt.Sprintf("encoded boolean must be 1 or 0, found %d", b)) +} + +func (d *decoder) readFloat64() float64 { + return math.Float64frombits(uint64(d.readInt64())) +} + +func (d *decoder) readInt32() int32 { + b := d.readBytes(4) + return int32((uint32(b[0]) << 0) | + (uint32(b[1]) << 8) | + (uint32(b[2]) << 16) | + (uint32(b[3]) << 24)) +} + +func getSize(offset int, b []byte) (int, error) { + if offset+4 > len(b) { + return 0, io.ErrUnexpectedEOF + } + return int((uint32(b[offset]) << 0) | + (uint32(b[offset+1]) << 8) | + (uint32(b[offset+2]) << 16) | + (uint32(b[offset+3]) << 24)), nil +} + +func (d *decoder) readInt64() int64 { + b := d.readBytes(8) + return int64((uint64(b[0]) << 0) | + (uint64(b[1]) << 8) | + (uint64(b[2]) << 16) | + (uint64(b[3]) << 24) | + (uint64(b[4]) << 32) | + (uint64(b[5]) << 40) | + (uint64(b[6]) << 48) | + (uint64(b[7]) << 56)) +} + +func (d *decoder) readByte() byte { + i := d.i + d.i++ + if d.i > len(d.in) { + corrupted() + } + return d.in[i] +} + +func (d *decoder) readBytes(length int32) []byte { + if length < 0 { + corrupted() + } + start := d.i + d.i += int(length) + if d.i < start || d.i > len(d.in) { + corrupted() + } + return d.in[start : start+int(length)] +} diff --git a/vendor/github.com/globalsign/mgo/bson/encode.go b/vendor/github.com/globalsign/mgo/bson/encode.go new file mode 100644 index 00000000000..d0c6b2a855f --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/encode.go @@ -0,0 +1,645 @@ +// BSON library for Go +// +// Copyright (c) 2010-2012 - Gustavo Niemeyer +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, this +// list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +// gobson - BSON library for Go. + +package bson + +import ( + "encoding/json" + "fmt" + "math" + "net/url" + "reflect" + "sort" + "strconv" + "sync" + "time" +) + +// -------------------------------------------------------------------------- +// Some internal infrastructure. + +var ( + typeBinary = reflect.TypeOf(Binary{}) + typeObjectId = reflect.TypeOf(ObjectId("")) + typeDBPointer = reflect.TypeOf(DBPointer{"", ObjectId("")}) + typeSymbol = reflect.TypeOf(Symbol("")) + typeMongoTimestamp = reflect.TypeOf(MongoTimestamp(0)) + typeOrderKey = reflect.TypeOf(MinKey) + typeDocElem = reflect.TypeOf(DocElem{}) + typeRawDocElem = reflect.TypeOf(RawDocElem{}) + typeRaw = reflect.TypeOf(Raw{}) + typeRawPtr = reflect.PtrTo(reflect.TypeOf(Raw{})) + typeURL = reflect.TypeOf(url.URL{}) + typeTime = reflect.TypeOf(time.Time{}) + typeString = reflect.TypeOf("") + typeJSONNumber = reflect.TypeOf(json.Number("")) + typeTimeDuration = reflect.TypeOf(time.Duration(0)) +) + +var ( + // spec for []uint8 or []byte encoding + arrayOps = map[string]bool{ + "$in": true, + "$nin": true, + "$all": true, + } +) + +const itoaCacheSize = 32 + +const ( + getterUnknown = iota + getterNone + getterTypeVal + getterTypePtr + getterAddr +) + +var itoaCache []string + +var getterStyles map[reflect.Type]int +var getterIface reflect.Type +var getterMutex sync.RWMutex + +func init() { + itoaCache = make([]string, itoaCacheSize) + for i := 0; i != itoaCacheSize; i++ { + itoaCache[i] = strconv.Itoa(i) + } + var iface Getter + getterIface = reflect.TypeOf(&iface).Elem() + getterStyles = make(map[reflect.Type]int) +} + +func itoa(i int) string { + if i < itoaCacheSize { + return itoaCache[i] + } + return strconv.Itoa(i) +} + +func getterStyle(outt reflect.Type) int { + getterMutex.RLock() + style := getterStyles[outt] + getterMutex.RUnlock() + if style != getterUnknown { + return style + } + + getterMutex.Lock() + defer getterMutex.Unlock() + if outt.Implements(getterIface) { + vt := outt + for vt.Kind() == reflect.Ptr { + vt = vt.Elem() + } + if vt.Implements(getterIface) { + style = getterTypeVal + } else { + style = getterTypePtr + } + } else if reflect.PtrTo(outt).Implements(getterIface) { + style = getterAddr + } else { + style = getterNone + } + getterStyles[outt] = style + return style +} + +func getGetter(outt reflect.Type, out reflect.Value) Getter { + style := getterStyle(outt) + if style == getterNone { + return nil + } + if style == getterAddr { + if !out.CanAddr() { + return nil + } + return out.Addr().Interface().(Getter) + } + if style == getterTypeVal && out.Kind() == reflect.Ptr && out.IsNil() { + return nil + } + return out.Interface().(Getter) +} + +// -------------------------------------------------------------------------- +// Marshaling of the document value itself. + +type encoder struct { + out []byte +} + +func (e *encoder) addDoc(v reflect.Value) { + for { + if vi, ok := v.Interface().(Getter); ok { + getv, err := vi.GetBSON() + if err != nil { + panic(err) + } + v = reflect.ValueOf(getv) + continue + } + if v.Kind() == reflect.Ptr { + v = v.Elem() + continue + } + break + } + + if v.Type() == typeRaw { + raw := v.Interface().(Raw) + if raw.Kind != 0x03 && raw.Kind != 0x00 { + panic("Attempted to marshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document") + } + if len(raw.Data) == 0 { + panic("Attempted to marshal empty Raw document") + } + e.addBytes(raw.Data...) + return + } + + start := e.reserveInt32() + + switch v.Kind() { + case reflect.Map: + e.addMap(v) + case reflect.Struct: + e.addStruct(v) + case reflect.Array, reflect.Slice: + e.addSlice(v) + default: + panic("Can't marshal " + v.Type().String() + " as a BSON document") + } + + e.addBytes(0) + e.setInt32(start, int32(len(e.out)-start)) +} + +func (e *encoder) addMap(v reflect.Value) { + for _, k := range v.MapKeys() { + e.addElem(fmt.Sprint(k), v.MapIndex(k), false) + } +} + +func (e *encoder) addStruct(v reflect.Value) { + sinfo, err := getStructInfo(v.Type()) + if err != nil { + panic(err) + } + var value reflect.Value + if sinfo.InlineMap >= 0 { + m := v.Field(sinfo.InlineMap) + if m.Len() > 0 { + for _, k := range m.MapKeys() { + ks := k.String() + if _, found := sinfo.FieldsMap[ks]; found { + panic(fmt.Sprintf("Can't have key %q in inlined map; conflicts with struct field", ks)) + } + e.addElem(ks, m.MapIndex(k), false) + } + } + } + for _, info := range sinfo.FieldsList { + if info.Inline == nil { + value = v.Field(info.Num) + } else { + // as pointers to struct are allowed here, + // there is no guarantee that pointer won't be nil. + // + // It is expected allowed behaviour + // so info.Inline MAY consist index to a nil pointer + // and that is why we safely call v.FieldByIndex and just continue on panic + field, errField := safeFieldByIndex(v, info.Inline) + if errField != nil { + continue + } + + value = field + } + if info.OmitEmpty && isZero(value) { + continue + } + if useRespectNilValues && + (value.Kind() == reflect.Slice || value.Kind() == reflect.Map) && + value.IsNil() { + e.addElem(info.Key, reflect.ValueOf(nil), info.MinSize) + continue + } + e.addElem(info.Key, value, info.MinSize) + } +} + +func safeFieldByIndex(v reflect.Value, index []int) (result reflect.Value, err error) { + defer func() { + if recovered := recover(); recovered != nil { + switch r := recovered.(type) { + case string: + err = fmt.Errorf("%s", r) + case error: + err = r + } + } + }() + + result = v.FieldByIndex(index) + return +} + +func isZero(v reflect.Value) bool { + switch v.Kind() { + case reflect.String: + return len(v.String()) == 0 + case reflect.Ptr, reflect.Interface: + return v.IsNil() + case reflect.Slice: + return v.Len() == 0 + case reflect.Map: + return v.Len() == 0 + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() == 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() == 0 + case reflect.Float32, reflect.Float64: + return v.Float() == 0 + case reflect.Bool: + return !v.Bool() + case reflect.Struct: + vt := v.Type() + if vt == typeTime { + return v.Interface().(time.Time).IsZero() + } + for i := 0; i < v.NumField(); i++ { + if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous { + continue // Private field + } + if !isZero(v.Field(i)) { + return false + } + } + return true + } + return false +} + +func (e *encoder) addSlice(v reflect.Value) { + vi := v.Interface() + if d, ok := vi.(D); ok { + for _, elem := range d { + e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) + } + return + } + if d, ok := vi.(RawD); ok { + for _, elem := range d { + e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) + } + return + } + l := v.Len() + et := v.Type().Elem() + if et == typeDocElem { + for i := 0; i < l; i++ { + elem := v.Index(i).Interface().(DocElem) + e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) + } + return + } + if et == typeRawDocElem { + for i := 0; i < l; i++ { + elem := v.Index(i).Interface().(RawDocElem) + e.addElem(elem.Name, reflect.ValueOf(elem.Value), false) + } + return + } + for i := 0; i < l; i++ { + e.addElem(itoa(i), v.Index(i), false) + } +} + +// -------------------------------------------------------------------------- +// Marshaling of elements in a document. + +func (e *encoder) addElemName(kind byte, name string) { + e.addBytes(kind) + e.addBytes([]byte(name)...) + e.addBytes(0) +} + +func (e *encoder) addElem(name string, v reflect.Value, minSize bool) { + + if !v.IsValid() { + e.addElemName(0x0A, name) + return + } + + if getter := getGetter(v.Type(), v); getter != nil { + getv, err := getter.GetBSON() + if err != nil { + panic(err) + } + e.addElem(name, reflect.ValueOf(getv), minSize) + return + } + + switch v.Kind() { + + case reflect.Interface: + e.addElem(name, v.Elem(), minSize) + + case reflect.Ptr: + e.addElem(name, v.Elem(), minSize) + + case reflect.String: + s := v.String() + switch v.Type() { + case typeObjectId: + if len(s) != 12 { + panic("ObjectIDs must be exactly 12 bytes long (got " + + strconv.Itoa(len(s)) + ")") + } + e.addElemName(0x07, name) + e.addBytes([]byte(s)...) + case typeSymbol: + e.addElemName(0x0E, name) + e.addStr(s) + case typeJSONNumber: + n := v.Interface().(json.Number) + if i, err := n.Int64(); err == nil { + e.addElemName(0x12, name) + e.addInt64(i) + } else if f, err := n.Float64(); err == nil { + e.addElemName(0x01, name) + e.addFloat64(f) + } else { + panic("failed to convert json.Number to a number: " + s) + } + default: + e.addElemName(0x02, name) + e.addStr(s) + } + + case reflect.Float32, reflect.Float64: + e.addElemName(0x01, name) + e.addFloat64(v.Float()) + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + u := v.Uint() + if int64(u) < 0 { + panic("BSON has no uint64 type, and value is too large to fit correctly in an int64") + } else if u <= math.MaxInt32 && (minSize || v.Kind() <= reflect.Uint32) { + e.addElemName(0x10, name) + e.addInt32(int32(u)) + } else { + e.addElemName(0x12, name) + e.addInt64(int64(u)) + } + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + switch v.Type() { + case typeMongoTimestamp: + e.addElemName(0x11, name) + e.addInt64(v.Int()) + + case typeOrderKey: + if v.Int() == int64(MaxKey) { + e.addElemName(0x7F, name) + } else { + e.addElemName(0xFF, name) + } + case typeTimeDuration: + // Stored as int64 + e.addElemName(0x12, name) + + e.addInt64(int64(v.Int() / 1e6)) + default: + i := v.Int() + if (minSize || v.Type().Kind() != reflect.Int64) && i >= math.MinInt32 && i <= math.MaxInt32 { + // It fits into an int32, encode as such. + e.addElemName(0x10, name) + e.addInt32(int32(i)) + } else { + e.addElemName(0x12, name) + e.addInt64(i) + } + } + + case reflect.Bool: + e.addElemName(0x08, name) + if v.Bool() { + e.addBytes(1) + } else { + e.addBytes(0) + } + + case reflect.Map: + e.addElemName(0x03, name) + e.addDoc(v) + + case reflect.Slice: + vt := v.Type() + et := vt.Elem() + if et.Kind() == reflect.Uint8 { + if arrayOps[name] { + e.addElemName(0x04, name) + e.addDoc(v) + } else { + e.addElemName(0x05, name) + e.addBinary(0x00, v.Bytes()) + } + } else if et == typeDocElem || et == typeRawDocElem { + e.addElemName(0x03, name) + e.addDoc(v) + } else { + e.addElemName(0x04, name) + e.addDoc(v) + } + + case reflect.Array: + et := v.Type().Elem() + if et.Kind() == reflect.Uint8 { + if arrayOps[name] { + e.addElemName(0x04, name) + e.addDoc(v) + } else { + e.addElemName(0x05, name) + if v.CanAddr() { + e.addBinary(0x00, v.Slice(0, v.Len()).Interface().([]byte)) + } else { + n := v.Len() + e.addInt32(int32(n)) + e.addBytes(0x00) + for i := 0; i < n; i++ { + el := v.Index(i) + e.addBytes(byte(el.Uint())) + } + } + } + } else { + e.addElemName(0x04, name) + e.addDoc(v) + } + + case reflect.Struct: + switch s := v.Interface().(type) { + + case Raw: + kind := s.Kind + if kind == 0x00 { + kind = 0x03 + } + if len(s.Data) == 0 && kind != 0x06 && kind != 0x0A && kind != 0xFF && kind != 0x7F { + panic("Attempted to marshal empty Raw document") + } + e.addElemName(kind, name) + e.addBytes(s.Data...) + + case Binary: + e.addElemName(0x05, name) + e.addBinary(s.Kind, s.Data) + + case Decimal128: + e.addElemName(0x13, name) + e.addInt64(int64(s.l)) + e.addInt64(int64(s.h)) + + case DBPointer: + e.addElemName(0x0C, name) + e.addStr(s.Namespace) + if len(s.Id) != 12 { + panic("ObjectIDs must be exactly 12 bytes long (got " + + strconv.Itoa(len(s.Id)) + ")") + } + e.addBytes([]byte(s.Id)...) + + case RegEx: + e.addElemName(0x0B, name) + e.addCStr(s.Pattern) + options := runes(s.Options) + sort.Sort(options) + e.addCStr(string(options)) + + case JavaScript: + if s.Scope == nil { + e.addElemName(0x0D, name) + e.addStr(s.Code) + } else { + e.addElemName(0x0F, name) + start := e.reserveInt32() + e.addStr(s.Code) + e.addDoc(reflect.ValueOf(s.Scope)) + e.setInt32(start, int32(len(e.out)-start)) + } + + case time.Time: + // MongoDB handles timestamps as milliseconds. + e.addElemName(0x09, name) + e.addInt64(s.Unix()*1000 + int64(s.Nanosecond()/1e6)) + + case url.URL: + e.addElemName(0x02, name) + e.addStr(s.String()) + + case undefined: + e.addElemName(0x06, name) + + default: + e.addElemName(0x03, name) + e.addDoc(v) + } + + default: + panic("Can't marshal " + v.Type().String() + " in a BSON document") + } +} + +// ------------- +// Helper method for sorting regex options +type runes []rune + +func (a runes) Len() int { return len(a) } +func (a runes) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a runes) Less(i, j int) bool { return a[i] < a[j] } + +// -------------------------------------------------------------------------- +// Marshaling of base types. + +func (e *encoder) addBinary(subtype byte, v []byte) { + if subtype == 0x02 { + // Wonder how that brilliant idea came to life. Obsolete, luckily. + e.addInt32(int32(len(v) + 4)) + e.addBytes(subtype) + e.addInt32(int32(len(v))) + } else { + e.addInt32(int32(len(v))) + e.addBytes(subtype) + } + e.addBytes(v...) +} + +func (e *encoder) addStr(v string) { + e.addInt32(int32(len(v) + 1)) + e.addCStr(v) +} + +func (e *encoder) addCStr(v string) { + e.addBytes([]byte(v)...) + e.addBytes(0) +} + +func (e *encoder) reserveInt32() (pos int) { + pos = len(e.out) + e.addBytes(0, 0, 0, 0) + return pos +} + +func (e *encoder) setInt32(pos int, v int32) { + e.out[pos+0] = byte(v) + e.out[pos+1] = byte(v >> 8) + e.out[pos+2] = byte(v >> 16) + e.out[pos+3] = byte(v >> 24) +} + +func (e *encoder) addInt32(v int32) { + u := uint32(v) + e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24)) +} + +func (e *encoder) addInt64(v int64) { + u := uint64(v) + e.addBytes(byte(u), byte(u>>8), byte(u>>16), byte(u>>24), + byte(u>>32), byte(u>>40), byte(u>>48), byte(u>>56)) +} + +func (e *encoder) addFloat64(v float64) { + e.addInt64(int64(math.Float64bits(v))) +} + +func (e *encoder) addBytes(v ...byte) { + e.out = append(e.out, v...) +} diff --git a/vendor/github.com/globalsign/mgo/bson/json.go b/vendor/github.com/globalsign/mgo/bson/json.go new file mode 100644 index 00000000000..045c713012b --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/json.go @@ -0,0 +1,384 @@ +package bson + +import ( + "bytes" + "encoding/base64" + "fmt" + "strconv" + "strings" + "time" + + "github.com/globalsign/mgo/internal/json" +) + +// UnmarshalJSON unmarshals a JSON value that may hold non-standard +// syntax as defined in BSON's extended JSON specification. +func UnmarshalJSON(data []byte, value interface{}) error { + d := json.NewDecoder(bytes.NewBuffer(data)) + d.Extend(&jsonExt) + return d.Decode(value) +} + +// MarshalJSON marshals a JSON value that may hold non-standard +// syntax as defined in BSON's extended JSON specification. +func MarshalJSON(value interface{}) ([]byte, error) { + var buf bytes.Buffer + e := json.NewEncoder(&buf) + e.Extend(&jsonExt) + err := e.Encode(value) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +// jdec is used internally by the JSON decoding functions +// so they may unmarshal functions without getting into endless +// recursion due to keyed objects. +func jdec(data []byte, value interface{}) error { + d := json.NewDecoder(bytes.NewBuffer(data)) + d.Extend(&funcExt) + return d.Decode(value) +} + +var jsonExt json.Extension +var funcExt json.Extension + +// TODO +// - Shell regular expressions ("/regexp/opts") + +func init() { + jsonExt.DecodeUnquotedKeys(true) + jsonExt.DecodeTrailingCommas(true) + + funcExt.DecodeFunc("BinData", "$binaryFunc", "$type", "$binary") + jsonExt.DecodeKeyed("$binary", jdecBinary) + jsonExt.DecodeKeyed("$binaryFunc", jdecBinary) + jsonExt.EncodeType([]byte(nil), jencBinarySlice) + jsonExt.EncodeType(Binary{}, jencBinaryType) + + funcExt.DecodeFunc("ISODate", "$dateFunc", "S") + funcExt.DecodeFunc("new Date", "$dateFunc", "S") + jsonExt.DecodeKeyed("$date", jdecDate) + jsonExt.DecodeKeyed("$dateFunc", jdecDate) + jsonExt.EncodeType(time.Time{}, jencDate) + + funcExt.DecodeFunc("Timestamp", "$timestamp", "t", "i") + jsonExt.DecodeKeyed("$timestamp", jdecTimestamp) + jsonExt.EncodeType(MongoTimestamp(0), jencTimestamp) + + funcExt.DecodeConst("undefined", Undefined) + + jsonExt.DecodeKeyed("$regex", jdecRegEx) + jsonExt.EncodeType(RegEx{}, jencRegEx) + + funcExt.DecodeFunc("ObjectId", "$oidFunc", "Id") + jsonExt.DecodeKeyed("$oid", jdecObjectId) + jsonExt.DecodeKeyed("$oidFunc", jdecObjectId) + jsonExt.EncodeType(ObjectId(""), jencObjectId) + + funcExt.DecodeFunc("DBRef", "$dbrefFunc", "$ref", "$id") + jsonExt.DecodeKeyed("$dbrefFunc", jdecDBRef) + + funcExt.DecodeFunc("NumberLong", "$numberLongFunc", "N") + jsonExt.DecodeKeyed("$numberLong", jdecNumberLong) + jsonExt.DecodeKeyed("$numberLongFunc", jdecNumberLong) + jsonExt.EncodeType(int64(0), jencNumberLong) + jsonExt.EncodeType(int(0), jencInt) + + funcExt.DecodeConst("MinKey", MinKey) + funcExt.DecodeConst("MaxKey", MaxKey) + jsonExt.DecodeKeyed("$minKey", jdecMinKey) + jsonExt.DecodeKeyed("$maxKey", jdecMaxKey) + jsonExt.EncodeType(orderKey(0), jencMinMaxKey) + + jsonExt.DecodeKeyed("$undefined", jdecUndefined) + jsonExt.EncodeType(Undefined, jencUndefined) + + jsonExt.Extend(&funcExt) +} + +func fbytes(format string, args ...interface{}) []byte { + var buf bytes.Buffer + fmt.Fprintf(&buf, format, args...) + return buf.Bytes() +} + +func jdecBinary(data []byte) (interface{}, error) { + var v struct { + Binary []byte `json:"$binary"` + Type string `json:"$type"` + Func struct { + Binary []byte `json:"$binary"` + Type int64 `json:"$type"` + } `json:"$binaryFunc"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + + var binData []byte + var binKind int64 + if v.Type == "" && v.Binary == nil { + binData = v.Func.Binary + binKind = v.Func.Type + } else if v.Type == "" { + return v.Binary, nil + } else { + binData = v.Binary + binKind, err = strconv.ParseInt(v.Type, 0, 64) + if err != nil { + binKind = -1 + } + } + + if binKind == 0 { + return binData, nil + } + if binKind < 0 || binKind > 255 { + return nil, fmt.Errorf("invalid type in binary object: %s", data) + } + + return Binary{Kind: byte(binKind), Data: binData}, nil +} + +func jencBinarySlice(v interface{}) ([]byte, error) { + in := v.([]byte) + out := make([]byte, base64.StdEncoding.EncodedLen(len(in))) + base64.StdEncoding.Encode(out, in) + return fbytes(`{"$binary":"%s","$type":"0x0"}`, out), nil +} + +func jencBinaryType(v interface{}) ([]byte, error) { + in := v.(Binary) + out := make([]byte, base64.StdEncoding.EncodedLen(len(in.Data))) + base64.StdEncoding.Encode(out, in.Data) + return fbytes(`{"$binary":"%s","$type":"0x%x"}`, out, in.Kind), nil +} + +const jdateFormat = "2006-01-02T15:04:05.999Z07:00" + +func jdecDate(data []byte) (interface{}, error) { + var v struct { + S string `json:"$date"` + Func struct { + S string + } `json:"$dateFunc"` + } + _ = jdec(data, &v) + if v.S == "" { + v.S = v.Func.S + } + if v.S != "" { + var errs []string + for _, format := range []string{jdateFormat, "2006-01-02"} { + t, err := time.Parse(format, v.S) + if err == nil { + return t, nil + } + errs = append(errs, err.Error()) + } + return nil, fmt.Errorf("cannot parse date: %q [%s]", v.S, strings.Join(errs, ", ")) + } + + var vn struct { + Date struct { + N int64 `json:"$numberLong,string"` + } `json:"$date"` + Func struct { + S int64 + } `json:"$dateFunc"` + } + err := jdec(data, &vn) + if err != nil { + return nil, fmt.Errorf("cannot parse date: %q", data) + } + n := vn.Date.N + if n == 0 { + n = vn.Func.S + } + return time.Unix(n/1000, n%1000*1e6).UTC(), nil +} + +func jencDate(v interface{}) ([]byte, error) { + t := v.(time.Time) + return fbytes(`{"$date":%q}`, t.Format(jdateFormat)), nil +} + +func jdecTimestamp(data []byte) (interface{}, error) { + var v struct { + Func struct { + T int32 `json:"t"` + I int32 `json:"i"` + } `json:"$timestamp"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + return MongoTimestamp(uint64(v.Func.T)<<32 | uint64(uint32(v.Func.I))), nil +} + +func jencTimestamp(v interface{}) ([]byte, error) { + ts := uint64(v.(MongoTimestamp)) + return fbytes(`{"$timestamp":{"t":%d,"i":%d}}`, ts>>32, uint32(ts)), nil +} + +func jdecRegEx(data []byte) (interface{}, error) { + var v struct { + Regex string `json:"$regex"` + Options string `json:"$options"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + return RegEx{v.Regex, v.Options}, nil +} + +func jencRegEx(v interface{}) ([]byte, error) { + re := v.(RegEx) + type regex struct { + Regex string `json:"$regex"` + Options string `json:"$options"` + } + return json.Marshal(regex{re.Pattern, re.Options}) +} + +func jdecObjectId(data []byte) (interface{}, error) { + var v struct { + Id string `json:"$oid"` + Func struct { + Id string + } `json:"$oidFunc"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + if v.Id == "" { + v.Id = v.Func.Id + } + return ObjectIdHex(v.Id), nil +} + +func jencObjectId(v interface{}) ([]byte, error) { + return fbytes(`{"$oid":"%s"}`, v.(ObjectId).Hex()), nil +} + +func jdecDBRef(data []byte) (interface{}, error) { + // TODO Support unmarshaling $ref and $id into the input value. + var v struct { + Obj map[string]interface{} `json:"$dbrefFunc"` + } + // TODO Fix this. Must not be required. + v.Obj = make(map[string]interface{}) + err := jdec(data, &v) + if err != nil { + return nil, err + } + return v.Obj, nil +} + +func jdecNumberLong(data []byte) (interface{}, error) { + var v struct { + N int64 `json:"$numberLong,string"` + Func struct { + N int64 `json:",string"` + } `json:"$numberLongFunc"` + } + var vn struct { + N int64 `json:"$numberLong"` + Func struct { + N int64 + } `json:"$numberLongFunc"` + } + err := jdec(data, &v) + if err != nil { + err = jdec(data, &vn) + v.N = vn.N + v.Func.N = vn.Func.N + } + if err != nil { + return nil, err + } + if v.N != 0 { + return v.N, nil + } + return v.Func.N, nil +} + +func jencNumberLong(v interface{}) ([]byte, error) { + n := v.(int64) + f := `{"$numberLong":"%d"}` + if n <= 1<<53 { + f = `{"$numberLong":%d}` + } + return fbytes(f, n), nil +} + +func jencInt(v interface{}) ([]byte, error) { + n := v.(int) + f := `{"$numberLong":"%d"}` + if int64(n) <= 1<<53 { + f = `%d` + } + return fbytes(f, n), nil +} + +func jdecMinKey(data []byte) (interface{}, error) { + var v struct { + N int64 `json:"$minKey"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + if v.N != 1 { + return nil, fmt.Errorf("invalid $minKey object: %s", data) + } + return MinKey, nil +} + +func jdecMaxKey(data []byte) (interface{}, error) { + var v struct { + N int64 `json:"$maxKey"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + if v.N != 1 { + return nil, fmt.Errorf("invalid $maxKey object: %s", data) + } + return MaxKey, nil +} + +func jencMinMaxKey(v interface{}) ([]byte, error) { + switch v.(orderKey) { + case MinKey: + return []byte(`{"$minKey":1}`), nil + case MaxKey: + return []byte(`{"$maxKey":1}`), nil + } + panic(fmt.Sprintf("invalid $minKey/$maxKey value: %d", v)) +} + +func jdecUndefined(data []byte) (interface{}, error) { + var v struct { + B bool `json:"$undefined"` + } + err := jdec(data, &v) + if err != nil { + return nil, err + } + if !v.B { + return nil, fmt.Errorf("invalid $undefined object: %s", data) + } + return Undefined, nil +} + +func jencUndefined(v interface{}) ([]byte, error) { + return []byte(`{"$undefined":true}`), nil +} diff --git a/vendor/github.com/globalsign/mgo/bson/stream.go b/vendor/github.com/globalsign/mgo/bson/stream.go new file mode 100644 index 00000000000..466528457b5 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/bson/stream.go @@ -0,0 +1,90 @@ +package bson + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" +) + +const ( + // MinDocumentSize is the size of the smallest possible valid BSON document: + // an int32 size header + 0x00 (end of document). + MinDocumentSize = 5 + + // MaxDocumentSize is the largest possible size for a BSON document allowed by MongoDB, + // that is, 16 MiB (see https://docs.mongodb.com/manual/reference/limits/). + MaxDocumentSize = 16777216 +) + +// ErrInvalidDocumentSize is an error returned when a BSON document's header +// contains a size smaller than MinDocumentSize or greater than MaxDocumentSize. +type ErrInvalidDocumentSize struct { + DocumentSize int32 +} + +func (e ErrInvalidDocumentSize) Error() string { + return fmt.Sprintf("invalid document size %d", e.DocumentSize) +} + +// A Decoder reads and decodes BSON values from an input stream. +type Decoder struct { + source io.Reader +} + +// NewDecoder returns a new Decoder that reads from source. +// It does not add any extra buffering, and may not read data from source beyond the BSON values requested. +func NewDecoder(source io.Reader) *Decoder { + return &Decoder{source: source} +} + +// Decode reads the next BSON-encoded value from its input and stores it in the value pointed to by v. +// See the documentation for Unmarshal for details about the conversion of BSON into a Go value. +func (dec *Decoder) Decode(v interface{}) (err error) { + // BSON documents start with their size as a *signed* int32. + var docSize int32 + if err = binary.Read(dec.source, binary.LittleEndian, &docSize); err != nil { + return + } + + if docSize < MinDocumentSize || docSize > MaxDocumentSize { + return ErrInvalidDocumentSize{DocumentSize: docSize} + } + + docBuffer := bytes.NewBuffer(make([]byte, 0, docSize)) + if err = binary.Write(docBuffer, binary.LittleEndian, docSize); err != nil { + return + } + + // docSize is the *full* document's size (including the 4-byte size header, + // which has already been read). + if _, err = io.CopyN(docBuffer, dec.source, int64(docSize-4)); err != nil { + return + } + + // Let Unmarshal handle the rest. + defer handleErr(&err) + return Unmarshal(docBuffer.Bytes(), v) +} + +// An Encoder encodes and writes BSON values to an output stream. +type Encoder struct { + target io.Writer +} + +// NewEncoder returns a new Encoder that writes to target. +func NewEncoder(target io.Writer) *Encoder { + return &Encoder{target: target} +} + +// Encode encodes v to BSON, and if successful writes it to the Encoder's output stream. +// See the documentation for Marshal for details about the conversion of Go values to BSON. +func (enc *Encoder) Encode(v interface{}) error { + data, err := Marshal(v) + if err != nil { + return err + } + + _, err = enc.target.Write(data) + return err +} diff --git a/vendor/github.com/globalsign/mgo/internal/json/BUILD b/vendor/github.com/globalsign/mgo/internal/json/BUILD new file mode 100644 index 00000000000..89f9d09ce93 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/internal/json/BUILD @@ -0,0 +1,32 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = [ + "decode.go", + "encode.go", + "extension.go", + "fold.go", + "indent.go", + "scanner.go", + "stream.go", + "tags.go", + ], + importmap = "k8s.io/kubernetes/vendor/github.com/globalsign/mgo/internal/json", + importpath = "github.com/globalsign/mgo/internal/json", + visibility = ["//vendor/github.com/globalsign/mgo:__subpackages__"], +) + +filegroup( + name = "package-srcs", + srcs = glob(["**"]), + tags = ["automanaged"], + visibility = ["//visibility:private"], +) + +filegroup( + name = "all-srcs", + srcs = [":package-srcs"], + tags = ["automanaged"], + visibility = ["//visibility:public"], +) diff --git a/vendor/github.com/globalsign/mgo/internal/json/LICENSE b/vendor/github.com/globalsign/mgo/internal/json/LICENSE new file mode 100644 index 00000000000..74487567632 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/internal/json/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2012 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/globalsign/mgo/internal/json/decode.go b/vendor/github.com/globalsign/mgo/internal/json/decode.go new file mode 100644 index 00000000000..d5ca1f9a851 --- /dev/null +++ b/vendor/github.com/globalsign/mgo/internal/json/decode.go @@ -0,0 +1,1685 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Represents JSON data structure using native Go types: booleans, floats, +// strings, arrays, and maps. + +package json + +import ( + "bytes" + "encoding" + "encoding/base64" + "errors" + "fmt" + "reflect" + "runtime" + "strconv" + "unicode" + "unicode/utf16" + "unicode/utf8" +) + +// Unmarshal parses the JSON-encoded data and stores the result +// in the value pointed to by v. +// +// Unmarshal uses the inverse of the encodings that +// Marshal uses, allocating maps, slices, and pointers as necessary, +// with the following additional rules: +// +// To unmarshal JSON into a pointer, Unmarshal first handles the case of +// the JSON being the JSON literal null. In that case, Unmarshal sets +// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into +// the value pointed at by the pointer. If the pointer is nil, Unmarshal +// allocates a new value for it to point to. +// +// To unmarshal JSON into a struct, Unmarshal matches incoming object +// keys to the keys used by Marshal (either the struct field name or its tag), +// preferring an exact match but also accepting a case-insensitive match. +// Unmarshal will only set exported fields of the struct. +// +// To unmarshal JSON into an interface value, +// Unmarshal stores one of these in the interface value: +// +// bool, for JSON booleans +// float64, for JSON numbers +// string, for JSON strings +// []interface{}, for JSON arrays +// map[string]interface{}, for JSON objects +// nil for JSON null +// +// To unmarshal a JSON array into a slice, Unmarshal resets the slice length +// to zero and then appends each element to the slice. +// As a special case, to unmarshal an empty JSON array into a slice, +// Unmarshal replaces the slice with a new empty slice. +// +// To unmarshal a JSON array into a Go array, Unmarshal decodes +// JSON array elements into corresponding Go array elements. +// If the Go array is smaller than the JSON array, +// the additional JSON array elements are discarded. +// If the JSON array is smaller than the Go array, +// the additional Go array elements are set to zero values. +// +// To unmarshal a JSON object into a map, Unmarshal first establishes a map to +// use, If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal +// reuses the existing map, keeping existing entries. Unmarshal then stores key- +// value pairs from the JSON object into the map. The map's key type must +// either be a string or implement encoding.TextUnmarshaler. +// +// If a JSON value is not appropriate for a given target type, +// or if a JSON number overflows the target type, Unmarshal +// skips that field and completes the unmarshaling as best it can. +// If no more serious errors are encountered, Unmarshal returns +// an UnmarshalTypeError describing the earliest such error. +// +// The JSON null value unmarshals into an interface, map, pointer, or slice +// by setting that Go value to nil. Because null is often used in JSON to mean +// ``not present,'' unmarshaling a JSON null into any other Go type has no effect +// on the value and produces no error. +// +// When unmarshaling quoted strings, invalid UTF-8 or +// invalid UTF-16 surrogate pairs are not treated as an error. +// Instead, they are replaced by the Unicode replacement +// character U+FFFD. +// +func Unmarshal(data []byte, v interface{}) error { + // Check for well-formedness. + // Avoids filling out half a data structure + // before discovering a JSON syntax error. + var d decodeState + err := checkValid(data, &d.scan) + if err != nil { + return err + } + + d.init(data) + return d.unmarshal(v) +} + +// Unmarshaler is the interface implemented by types +// that can unmarshal a JSON description of themselves. +// The input can be assumed to be a valid encoding of +// a JSON value. UnmarshalJSON must copy the JSON data +// if it wishes to retain the data after returning. +type Unmarshaler interface { + UnmarshalJSON([]byte) error +} + +// An UnmarshalTypeError describes a JSON value that was +// not appropriate for a value of a specific Go type. +type UnmarshalTypeError struct { + Value string // description of JSON value - "bool", "array", "number -5" + Type reflect.Type // type of Go value it could not be assigned to + Offset int64 // error occurred after reading Offset bytes +} + +func (e *UnmarshalTypeError) Error() string { + return "json: cannot unmarshal " + e.Value + " into Go value of type " + e.Type.String() +} + +// An UnmarshalFieldError describes a JSON object key that +// led to an unexported (and therefore unwritable) struct field. +// (No longer used; kept for compatibility.) +type UnmarshalFieldError struct { + Key string + Type reflect.Type + Field reflect.StructField +} + +func (e *UnmarshalFieldError) Error() string { + return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String() +} + +// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal. +// (The argument to Unmarshal must be a non-nil pointer.) +type InvalidUnmarshalError struct { + Type reflect.Type +} + +func (e *InvalidUnmarshalError) Error() string { + if e.Type == nil { + return "json: Unmarshal(nil)" + } + + if e.Type.Kind() != reflect.Ptr { + return "json: Unmarshal(non-pointer " + e.Type.String() + ")" + } + return "json: Unmarshal(nil " + e.Type.String() + ")" +} + +func (d *decodeState) unmarshal(v interface{}) (err error) { + defer func() { + if r := recover(); r != nil { + if _, ok := r.(runtime.Error); ok { + panic(r) + } + err = r.(error) + } + }() + + rv := reflect.ValueOf(v) + if rv.Kind() != reflect.Ptr || rv.IsNil() { + return &InvalidUnmarshalError{reflect.TypeOf(v)} + } + + d.scan.reset() + // We decode rv not rv.Elem because the Unmarshaler interface + // test must be applied at the top level of the value. + d.value(rv) + return d.savedError +} + +// A Number represents a JSON number literal. +type Number string + +// String returns the literal text of the number. +func (n Number) String() string { return string(n) } + +// Float64 returns the number as a float64. +func (n Number) Float64() (float64, error) { + return strconv.ParseFloat(string(n), 64) +} + +// Int64 returns the number as an int64. +func (n Number) Int64() (int64, error) { + return strconv.ParseInt(string(n), 10, 64) +} + +// isValidNumber reports whether s is a valid JSON number literal. +func isValidNumber(s string) bool { + // This function implements the JSON numbers grammar. + // See https://tools.ietf.org/html/rfc7159#section-6 + // and http://json.org/number.gif + + if s == "" { + return false + } + + // Optional - + if s[0] == '-' { + s = s[1:] + if s == "" { + return false + } + } + + // Digits + switch { + default: + return false + + case s[0] == '0': + s = s[1:] + + case '1' <= s[0] && s[0] <= '9': + s = s[1:] + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + } + } + + // . followed by 1 or more digits. + if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' { + s = s[2:] + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + } + } + + // e or E followed by an optional - or + and + // 1 or more digits. + if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') { + s = s[1:] + if s[0] == '+' || s[0] == '-' { + s = s[1:] + if s == "" { + return false + } + } + for len(s) > 0 && '0' <= s[0] && s[0] <= '9' { + s = s[1:] + } + } + + // Make sure we are at the end. + return s == "" +} + +// decodeState represents the state while decoding a JSON value. +type decodeState struct { + data []byte + off int // read offset in data + scan scanner + nextscan scanner // for calls to nextValue + savedError error + useNumber bool + ext Extension +} + +// errPhase is used for errors that should not happen unless +// there is a bug in the JSON decoder or something is editing +// the data slice while the decoder executes. +var errPhase = errors.New("JSON decoder out of sync - data changing underfoot?") + +func (d *decodeState) init(data []byte) *decodeState { + d.data = data + d.off = 0 + d.savedError = nil + return d +} + +// error aborts the decoding by panicking with err. +func (d *decodeState) error(err error) { + panic(err) +} + +// saveError saves the first err it is called with, +// for reporting at the end of the unmarshal. +func (d *decodeState) saveError(err error) { + if d.savedError == nil { + d.savedError = err + } +} + +// next cuts off and returns the next full JSON value in d.data[d.off:]. +// The next value is known to be an object or array, not a literal. +func (d *decodeState) next() []byte { + c := d.data[d.off] + item, rest, err := nextValue(d.data[d.off:], &d.nextscan) + if err != nil { + d.error(err) + } + d.off = len(d.data) - len(rest) + + // Our scanner has seen the opening brace/bracket + // and thinks we're still in the middle of the object. + // invent a closing brace/bracket to get it out. + if c == '{' { + d.scan.step(&d.scan, '}') + } else if c == '[' { + d.scan.step(&d.scan, ']') + } else { + // Was inside a function name. Get out of it. + d.scan.step(&d.scan, '(') + d.scan.step(&d.scan, ')') + } + + return item +} + +// scanWhile processes bytes in d.data[d.off:] until it +// receives a scan code not equal to op. +// It updates d.off and returns the new scan code. +func (d *decodeState) scanWhile(op int) int { + var newOp int + for { + if d.off >= len(d.data) { + newOp = d.scan.eof() + d.off = len(d.data) + 1 // mark processed EOF with len+1 + } else { + c := d.data[d.off] + d.off++ + newOp = d.scan.step(&d.scan, c) + } + if newOp != op { + break + } + } + return newOp +} + +// value decodes a JSON value from d.data[d.off:] into the value. +// it updates d.off to point past the decoded value. +func (d *decodeState) value(v reflect.Value) { + if !v.IsValid() { + _, rest, err := nextValue(d.data[d.off:], &d.nextscan) + if err != nil { + d.error(err) + } + d.off = len(d.data) - len(rest) + + // d.scan thinks we're still at the beginning of the item. + // Feed in an empty string - the shortest, simplest value - + // so that it knows we got to the end of the value. + if d.scan.redo { + // rewind. + d.scan.redo = false + d.scan.step = stateBeginValue + } + d.scan.step(&d.scan, '"') + d.scan.step(&d.scan, '"') + + n := len(d.scan.parseState) + if n > 0 && d.scan.parseState[n-1] == parseObjectKey { + // d.scan thinks we just read an object key; finish the object + d.scan.step(&d.scan, ':') + d.scan.step(&d.scan, '"') + d.scan.step(&d.scan, '"') + d.scan.step(&d.scan, '}') + } + + return + } + + switch op := d.scanWhile(scanSkipSpace); op { + default: + d.error(errPhase) + + case scanBeginArray: + d.array(v) + + case scanBeginObject: + d.object(v) + + case scanBeginLiteral: + d.literal(v) + + case scanBeginName: + d.name(v) + } +} + +type unquotedValue struct{} + +// valueQuoted is like value but decodes a +// quoted string literal or literal null into an interface value. +// If it finds anything other than a quoted string literal or null, +// valueQuoted returns unquotedValue{}. +func (d *decodeState) valueQuoted() interface{} { + switch op := d.scanWhile(scanSkipSpace); op { + default: + d.error(errPhase) + + case scanBeginArray: + d.array(reflect.Value{}) + + case scanBeginObject: + d.object(reflect.Value{}) + + case scanBeginName: + switch v := d.nameInterface().(type) { + case nil, string: + return v + } + + case scanBeginLiteral: + switch v := d.literalInterface().(type) { + case nil, string: + return v + } + } + return unquotedValue{} +} + +// indirect walks down v allocating pointers as needed, +// until it gets to a non-pointer. +// if it encounters an Unmarshaler, indirect stops and returns that. +// if decodingNull is true, indirect stops at the last pointer so it can be set to nil. +func (d *decodeState) indirect(v reflect.Value, decodingNull bool) (Unmarshaler, encoding.TextUnmarshaler, reflect.Value) { + // If v is a named type and is addressable, + // start with its address, so that if the type has pointer methods, + // we find them. + if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() { + v = v.Addr() + } + for { + // Load value from interface, but only if the result will be + // usefully addressable. + if v.Kind() == reflect.Interface && !v.IsNil() { + e := v.Elem() + if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) { + v = e + continue + } + } + + if v.Kind() != reflect.Ptr { + break + } + + if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() { + break + } + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + if v.Type().NumMethod() > 0 { + if u, ok := v.Interface().(Unmarshaler); ok { + return u, nil, v + } + if u, ok := v.Interface().(encoding.TextUnmarshaler); ok { + return nil, u, v + } + } + v = v.Elem() + } + return nil, nil, v +} + +// array consumes an array from d.data[d.off-1:], decoding into the value v. +// the first byte of the array ('[') has been read already. +func (d *decodeState) array(v reflect.Value) { + // Check for unmarshaler. + u, ut, pv := d.indirect(v, false) + if u != nil { + d.off-- + err := u.UnmarshalJSON(d.next()) + if err != nil { + d.error(err) + } + return + } + if ut != nil { + d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)}) + d.off-- + d.next() + return + } + + v = pv + + // Check type of target. + switch v.Kind() { + case reflect.Interface: + if v.NumMethod() == 0 { + // Decoding into nil interface? Switch to non-reflect code. + v.Set(reflect.ValueOf(d.arrayInterface())) + return + } + // Otherwise it's invalid. + fallthrough + default: + d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)}) + d.off-- + d.next() + return + case reflect.Array: + case reflect.Slice: + break + } + + i := 0 + for { + // Look ahead for ] - can only happen on first iteration. + op := d.scanWhile(scanSkipSpace) + if op == scanEndArray { + break + } + + // Back up so d.value can have the byte we just read. + d.off-- + d.scan.undo(op) + + // Get element of array, growing if necessary. + if v.Kind() == reflect.Slice { + // Grow slice if necessary + if i >= v.Cap() { + newcap := v.Cap() + v.Cap()/2 + if newcap < 4 { + newcap = 4 + } + newv := reflect.MakeSlice(v.Type(), v.Len(), newcap) + reflect.Copy(newv, v) + v.Set(newv) + } + if i >= v.Len() { + v.SetLen(i + 1) + } + } + + if i < v.Len() { + // Decode into element. + d.value(v.Index(i)) + } else { + // Ran out of fixed array: skip. + d.value(reflect.Value{}) + } + i++ + + // Next token must be , or ]. + op = d.scanWhile(scanSkipSpace) + if op == scanEndArray { + break + } + if op != scanArrayValue { + d.error(errPhase) + } + } + + if i < v.Len() { + if v.Kind() == reflect.Array { + // Array. Zero the rest. + z := reflect.Zero(v.Type().Elem()) + for ; i < v.Len(); i++ { + v.Index(i).Set(z) + } + } else { + v.SetLen(i) + } + } + if i == 0 && v.Kind() == reflect.Slice { + v.Set(reflect.MakeSlice(v.Type(), 0, 0)) + } +} + +var nullLiteral = []byte("null") +var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem() + +// object consumes an object from d.data[d.off-1:], decoding into the value v. +// the first byte ('{') of the object has been read already. +func (d *decodeState) object(v reflect.Value) { + // Check for unmarshaler. + u, ut, pv := d.indirect(v, false) + if d.storeKeyed(pv) { + return + } + if u != nil { + d.off-- + err := u.UnmarshalJSON(d.next()) + if err != nil { + d.error(err) + } + return + } + if ut != nil { + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over { } in input + return + } + v = pv + + // Decoding into nil interface? Switch to non-reflect code. + if v.Kind() == reflect.Interface && v.NumMethod() == 0 { + v.Set(reflect.ValueOf(d.objectInterface())) + return + } + + // Check type of target: + // struct or + // map[string]T or map[encoding.TextUnmarshaler]T + switch v.Kind() { + case reflect.Map: + // Map key must either have string kind or be an encoding.TextUnmarshaler. + t := v.Type() + if t.Key().Kind() != reflect.String && + !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) { + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over { } in input + return + } + if v.IsNil() { + v.Set(reflect.MakeMap(t)) + } + case reflect.Struct: + + default: + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over { } in input + return + } + + var mapElem reflect.Value + + empty := true + for { + // Read opening " of string key or closing }. + op := d.scanWhile(scanSkipSpace) + if op == scanEndObject { + if !empty && !d.ext.trailingCommas { + d.syntaxError("beginning of object key string") + } + break + } + empty = false + if op == scanBeginName { + if !d.ext.unquotedKeys { + d.syntaxError("beginning of object key string") + } + } else if op != scanBeginLiteral { + d.error(errPhase) + } + unquotedKey := op == scanBeginName + + // Read key. + start := d.off - 1 + op = d.scanWhile(scanContinue) + item := d.data[start : d.off-1] + var key []byte + if unquotedKey { + key = item + // TODO Fix code below to quote item when necessary. + } else { + var ok bool + key, ok = unquoteBytes(item) + if !ok { + d.error(errPhase) + } + } + + // Figure out field corresponding to key. + var subv reflect.Value + destring := false // whether the value is wrapped in a string to be decoded first + + if v.Kind() == reflect.Map { + elemType := v.Type().Elem() + if !mapElem.IsValid() { + mapElem = reflect.New(elemType).Elem() + } else { + mapElem.Set(reflect.Zero(elemType)) + } + subv = mapElem + } else { + var f *field + fields := cachedTypeFields(v.Type()) + for i := range fields { + ff := &fields[i] + if bytes.Equal(ff.nameBytes, key) { + f = ff + break + } + if f == nil && ff.equalFold(ff.nameBytes, key) { + f = ff + } + } + if f != nil { + subv = v + destring = f.quoted + for _, i := range f.index { + if subv.Kind() == reflect.Ptr { + if subv.IsNil() { + subv.Set(reflect.New(subv.Type().Elem())) + } + subv = subv.Elem() + } + subv = subv.Field(i) + } + } + } + + // Read : before value. + if op == scanSkipSpace { + op = d.scanWhile(scanSkipSpace) + } + if op != scanObjectKey { + d.error(errPhase) + } + + // Read value. + if destring { + switch qv := d.valueQuoted().(type) { + case nil: + d.literalStore(nullLiteral, subv, false) + case string: + d.literalStore([]byte(qv), subv, true) + default: + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type())) + } + } else { + d.value(subv) + } + + // Write value back to map; + // if using struct, subv points into struct already. + if v.Kind() == reflect.Map { + kt := v.Type().Key() + var kv reflect.Value + switch { + case kt.Kind() == reflect.String: + kv = reflect.ValueOf(key).Convert(v.Type().Key()) + case reflect.PtrTo(kt).Implements(textUnmarshalerType): + kv = reflect.New(v.Type().Key()) + d.literalStore(item, kv, true) + kv = kv.Elem() + default: + panic("json: Unexpected key type") // should never occur + } + v.SetMapIndex(kv, subv) + } + + // Next token must be , or }. + op = d.scanWhile(scanSkipSpace) + if op == scanEndObject { + break + } + if op != scanObjectValue { + d.error(errPhase) + } + } +} + +// isNull returns whether there's a null literal at the provided offset. +func (d *decodeState) isNull(off int) bool { + if off+4 >= len(d.data) || d.data[off] != 'n' || d.data[off+1] != 'u' || d.data[off+2] != 'l' || d.data[off+3] != 'l' { + return false + } + d.nextscan.reset() + for i, c := range d.data[off:] { + if i > 4 { + return false + } + switch d.nextscan.step(&d.nextscan, c) { + case scanContinue, scanBeginName: + continue + } + break + } + return true +} + +// name consumes a const or function from d.data[d.off-1:], decoding into the value v. +// the first byte of the function name has been read already. +func (d *decodeState) name(v reflect.Value) { + if d.isNull(d.off - 1) { + d.literal(v) + return + } + + // Check for unmarshaler. + u, ut, pv := d.indirect(v, false) + if d.storeKeyed(pv) { + return + } + if u != nil { + d.off-- + err := u.UnmarshalJSON(d.next()) + if err != nil { + d.error(err) + } + return + } + if ut != nil { + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over function in input + return + } + v = pv + + // Decoding into nil interface? Switch to non-reflect code. + if v.Kind() == reflect.Interface && v.NumMethod() == 0 { + out := d.nameInterface() + if out == nil { + v.Set(reflect.Zero(v.Type())) + } else { + v.Set(reflect.ValueOf(out)) + } + return + } + + nameStart := d.off - 1 + + op := d.scanWhile(scanContinue) + + name := d.data[nameStart : d.off-1] + if op != scanParam { + // Back up so the byte just read is consumed next. + d.off-- + d.scan.undo(op) + if l, ok := d.convertLiteral(name); ok { + d.storeValue(v, l) + return + } + d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)}) + } + + funcName := string(name) + funcData := d.ext.funcs[funcName] + if funcData.key == "" { + d.error(fmt.Errorf("json: unknown function %q", funcName)) + } + + // Check type of target: + // struct or + // map[string]T or map[encoding.TextUnmarshaler]T + switch v.Kind() { + case reflect.Map: + // Map key must either have string kind or be an encoding.TextUnmarshaler. + t := v.Type() + if t.Key().Kind() != reflect.String && + !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) { + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over { } in input + return + } + if v.IsNil() { + v.Set(reflect.MakeMap(t)) + } + case reflect.Struct: + + default: + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + d.off-- + d.next() // skip over { } in input + return + } + + // TODO Fix case of func field as map. + //topv := v + + // Figure out field corresponding to function. + key := []byte(funcData.key) + if v.Kind() == reflect.Map { + elemType := v.Type().Elem() + v = reflect.New(elemType).Elem() + } else { + var f *field + fields := cachedTypeFields(v.Type()) + for i := range fields { + ff := &fields[i] + if bytes.Equal(ff.nameBytes, key) { + f = ff + break + } + if f == nil && ff.equalFold(ff.nameBytes, key) { + f = ff + } + } + if f != nil { + for _, i := range f.index { + if v.Kind() == reflect.Ptr { + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + v = v.Elem() + } + v = v.Field(i) + } + if v.Kind() == reflect.Ptr { + if v.IsNil() { + v.Set(reflect.New(v.Type().Elem())) + } + v = v.Elem() + } + } + } + + // Check for unmarshaler on func field itself. + u, _, _ = d.indirect(v, false) + if u != nil { + d.off = nameStart + err := u.UnmarshalJSON(d.next()) + if err != nil { + d.error(err) + } + return + } + + var mapElem reflect.Value + + // Parse function arguments. + for i := 0; ; i++ { + // closing ) - can only happen on first iteration. + op := d.scanWhile(scanSkipSpace) + if op == scanEndParams { + break + } + + // Back up so d.value can have the byte we just read. + d.off-- + d.scan.undo(op) + + if i >= len(funcData.args) { + d.error(fmt.Errorf("json: too many arguments for function %s", funcName)) + } + key := []byte(funcData.args[i]) + + // Figure out field corresponding to key. + var subv reflect.Value + destring := false // whether the value is wrapped in a string to be decoded first + + if v.Kind() == reflect.Map { + elemType := v.Type().Elem() + if !mapElem.IsValid() { + mapElem = reflect.New(elemType).Elem() + } else { + mapElem.Set(reflect.Zero(elemType)) + } + subv = mapElem + } else { + var f *field + fields := cachedTypeFields(v.Type()) + for i := range fields { + ff := &fields[i] + if bytes.Equal(ff.nameBytes, key) { + f = ff + break + } + if f == nil && ff.equalFold(ff.nameBytes, key) { + f = ff + } + } + if f != nil { + subv = v + destring = f.quoted + for _, i := range f.index { + if subv.Kind() == reflect.Ptr { + if subv.IsNil() { + subv.Set(reflect.New(subv.Type().Elem())) + } + subv = subv.Elem() + } + subv = subv.Field(i) + } + } + } + + // Read value. + if destring { + switch qv := d.valueQuoted().(type) { + case nil: + d.literalStore(nullLiteral, subv, false) + case string: + d.literalStore([]byte(qv), subv, true) + default: + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type())) + } + } else { + d.value(subv) + } + + // Write value back to map; + // if using struct, subv points into struct already. + if v.Kind() == reflect.Map { + kt := v.Type().Key() + var kv reflect.Value + switch { + case kt.Kind() == reflect.String: + kv = reflect.ValueOf(key).Convert(v.Type().Key()) + case reflect.PtrTo(kt).Implements(textUnmarshalerType): + kv = reflect.New(v.Type().Key()) + d.literalStore(key, kv, true) + kv = kv.Elem() + default: + panic("json: Unexpected key type") // should never occur + } + v.SetMapIndex(kv, subv) + } + + // Next token must be , or ). + op = d.scanWhile(scanSkipSpace) + if op == scanEndParams { + break + } + if op != scanParam { + d.error(errPhase) + } + } +} + +// keyed attempts to decode an object or function using a keyed doc extension, +// and returns the value and true on success, or nil and false otherwise. +func (d *decodeState) keyed() (interface{}, bool) { + if len(d.ext.keyed) == 0 { + return nil, false + } + + unquote := false + + // Look-ahead first key to check for a keyed document extension. + d.nextscan.reset() + var start, end int + for i, c := range d.data[d.off-1:] { + switch op := d.nextscan.step(&d.nextscan, c); op { + case scanSkipSpace, scanContinue, scanBeginObject: + continue + case scanBeginLiteral, scanBeginName: + unquote = op == scanBeginLiteral + start = i + continue + } + end = i + break + } + + name := bytes.Trim(d.data[d.off-1+start:d.off-1+end], " \n\t") + + var key []byte + var ok bool + if unquote { + key, ok = unquoteBytes(name) + if !ok { + d.error(errPhase) + } + } else { + funcData, ok := d.ext.funcs[string(name)] + if !ok { + return nil, false + } + key = []byte(funcData.key) + } + + decode, ok := d.ext.keyed[string(key)] + if !ok { + return nil, false + } + + d.off-- + out, err := decode(d.next()) + if err != nil { + d.error(err) + } + return out, true +} + +func (d *decodeState) storeKeyed(v reflect.Value) bool { + keyed, ok := d.keyed() + if !ok { + return false + } + d.storeValue(v, keyed) + return true +} + +var ( + trueBytes = []byte("true") + falseBytes = []byte("false") + nullBytes = []byte("null") +) + +func (d *decodeState) storeValue(v reflect.Value, from interface{}) { + switch from { + case nil: + d.literalStore(nullBytes, v, false) + return + case true: + d.literalStore(trueBytes, v, false) + return + case false: + d.literalStore(falseBytes, v, false) + return + } + fromv := reflect.ValueOf(from) + for fromv.Kind() == reflect.Ptr && !fromv.IsNil() { + fromv = fromv.Elem() + } + fromt := fromv.Type() + for v.Kind() == reflect.Ptr && !v.IsNil() { + v = v.Elem() + } + vt := v.Type() + if fromt.AssignableTo(vt) { + v.Set(fromv) + } else if fromt.ConvertibleTo(vt) { + v.Set(fromv.Convert(vt)) + } else { + d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)}) + } +} + +func (d *decodeState) convertLiteral(name []byte) (interface{}, bool) { + if len(name) == 0 { + return nil, false + } + switch name[0] { + case 't': + if bytes.Equal(name, trueBytes) { + return true, true + } + case 'f': + if bytes.Equal(name, falseBytes) { + return false, true + } + case 'n': + if bytes.Equal(name, nullBytes) { + return nil, true + } + } + if l, ok := d.ext.consts[string(name)]; ok { + return l, true + } + return nil, false +} + +// literal consumes a literal from d.data[d.off-1:], decoding into the value v. +// The first byte of the literal has been read already +// (that's how the caller knows it's a literal). +func (d *decodeState) literal(v reflect.Value) { + // All bytes inside literal return scanContinue op code. + start := d.off - 1 + op := d.scanWhile(scanContinue) + + // Scan read one byte too far; back up. + d.off-- + d.scan.undo(op) + + d.literalStore(d.data[start:d.off], v, false) +} + +// convertNumber converts the number literal s to a float64 or a Number +// depending on the setting of d.useNumber. +func (d *decodeState) convertNumber(s string) (interface{}, error) { + if d.useNumber { + return Number(s), nil + } + f, err := strconv.ParseFloat(s, 64) + if err != nil { + return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0), int64(d.off)} + } + return f, nil +} + +var numberType = reflect.TypeOf(Number("")) + +// literalStore decodes a literal stored in item into v. +// +// fromQuoted indicates whether this literal came from unwrapping a +// string from the ",string" struct tag option. this is used only to +// produce more helpful error messages. +func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) { + // Check for unmarshaler. + if len(item) == 0 { + //Empty string given + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + return + } + wantptr := item[0] == 'n' // null + u, ut, pv := d.indirect(v, wantptr) + if u != nil { + err := u.UnmarshalJSON(item) + if err != nil { + d.error(err) + } + return + } + if ut != nil { + if item[0] != '"' { + if fromQuoted { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) + } + return + } + s, ok := unquoteBytes(item) + if !ok { + if fromQuoted { + d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.error(errPhase) + } + } + err := ut.UnmarshalText(s) + if err != nil { + d.error(err) + } + return + } + + v = pv + + switch c := item[0]; c { + case 'n': // null + switch v.Kind() { + case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice: + v.Set(reflect.Zero(v.Type())) + // otherwise, ignore null for primitives/string + } + case 't', 'f': // true, false + value := c == 't' + switch v.Kind() { + default: + if fromQuoted { + d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)}) + } + case reflect.Bool: + v.SetBool(value) + case reflect.Interface: + if v.NumMethod() == 0 { + v.Set(reflect.ValueOf(value)) + } else { + d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)}) + } + } + + case '"': // string + s, ok := unquoteBytes(item) + if !ok { + if fromQuoted { + d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.error(errPhase) + } + } + switch v.Kind() { + default: + d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) + case reflect.Slice: + if v.Type().Elem().Kind() != reflect.Uint8 { + d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) + break + } + b := make([]byte, base64.StdEncoding.DecodedLen(len(s))) + n, err := base64.StdEncoding.Decode(b, s) + if err != nil { + d.saveError(err) + break + } + v.SetBytes(b[:n]) + case reflect.String: + v.SetString(string(s)) + case reflect.Interface: + if v.NumMethod() == 0 { + v.Set(reflect.ValueOf(string(s))) + } else { + d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)}) + } + } + + default: // number + if c != '-' && (c < '0' || c > '9') { + if fromQuoted { + d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.error(errPhase) + } + } + s := string(item) + switch v.Kind() { + default: + if v.Kind() == reflect.String && v.Type() == numberType { + v.SetString(s) + if !isValidNumber(s) { + d.error(fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item)) + } + break + } + if fromQuoted { + d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type())) + } else { + d.error(&UnmarshalTypeError{"number", v.Type(), int64(d.off)}) + } + case reflect.Interface: + n, err := d.convertNumber(s) + if err != nil { + d.saveError(err) + break + } + if v.NumMethod() != 0 { + d.saveError(&UnmarshalTypeError{"number", v.Type(), int64(d.off)}) + break + } + v.Set(reflect.ValueOf(n)) + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + n, err := strconv.ParseInt(s, 10, 64) + if err != nil || v.OverflowInt(n) { + d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) + break + } + v.SetInt(n) + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + n, err := strconv.ParseUint(s, 10, 64) + if err != nil || v.OverflowUint(n) { + d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) + break + } + v.SetUint(n) + + case reflect.Float32, reflect.Float64: + n, err := strconv.ParseFloat(s, v.Type().Bits()) + if err != nil || v.OverflowFloat(n) { + d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)}) + break + } + v.SetFloat(n) + } + } +} + +// The xxxInterface routines build up a value to be stored +// in an empty interface. They are not strictly necessary, +// but they avoid the weight of reflection in this common case. + +// valueInterface is like value but returns interface{} +func (d *decodeState) valueInterface() interface{} { + switch d.scanWhile(scanSkipSpace) { + default: + d.error(errPhase) + panic("unreachable") + case scanBeginArray: + return d.arrayInterface() + case scanBeginObject: + return d.objectInterface() + case scanBeginLiteral: + return d.literalInterface() + case scanBeginName: + return d.nameInterface() + } +} + +func (d *decodeState) syntaxError(expected string) { + msg := fmt.Sprintf("invalid character '%c' looking for %s", d.data[d.off-1], expected) + d.error(&SyntaxError{msg, int64(d.off)}) +} + +// arrayInterface is like array but returns []interface{}. +func (d *decodeState) arrayInterface() []interface{} { + var v = make([]interface{}, 0) + for { + // Look ahead for ] - can only happen on first iteration. + op := d.scanWhile(scanSkipSpace) + if op == scanEndArray { + if len(v) > 0 && !d.ext.trailingCommas { + d.syntaxError("beginning of value") + } + break + } + + // Back up so d.value can have the byte we just read. + d.off-- + d.scan.undo(op) + + v = append(v, d.valueInterface()) + + // Next token must be , or ]. + op = d.scanWhile(scanSkipSpace) + if op == scanEndArray { + break + } + if op != scanArrayValue { + d.error(errPhase) + } + } + return v +} + +// objectInterface is like object but returns map[string]interface{}. +func (d *decodeState) objectInterface() interface{} { + v, ok := d.keyed() + if ok { + return v + } + + m := make(map[string]interface{}) + for { + // Read opening " of string key or closing }. + op := d.scanWhile(scanSkipSpace) + if op == scanEndObject { + if len(m) > 0 && !d.ext.trailingCommas { + d.syntaxError("beginning of object key string") + } + break + } + if op == scanBeginName { + if !d.ext.unquotedKeys { + d.syntaxError("beginning of object key string") + } + } else if op != scanBeginLiteral { + d.error(errPhase) + } + unquotedKey := op == scanBeginName + + // Read string key. + start := d.off - 1 + op = d.scanWhile(scanContinue) + item := d.data[start : d.off-1] + var key string + if unquotedKey { + key = string(item) + } else { + var ok bool + key, ok = unquote(item) + if !ok { + d.error(errPhase) + } + } + + // Read : before value. + if op == scanSkipSpace { + op = d.scanWhile(scanSkipSpace) + } + if op != scanObjectKey { + d.error(errPhase) + } + + // Read value. + m[key] = d.valueInterface() + + // Next token must be , or }. + op = d.scanWhile(scanSkipSpace) + if op == scanEndObject { + break + } + if op != scanObjectValue { + d.error(errPhase) + } + } + return m +} + +// literalInterface is like literal but returns an interface value. +func (d *decodeState) literalInterface() interface{} { + // All bytes inside literal return scanContinue op code. + start := d.off - 1 + op := d.scanWhile(scanContinue) + + // Scan read one byte too far; back up. + d.off-- + d.scan.undo(op) + item := d.data[start:d.off] + + switch c := item[0]; c { + case 'n': // null + return nil + + case 't', 'f': // true, false + return c == 't' + + case '"': // string + s, ok := unquote(item) + if !ok { + d.error(errPhase) + } + return s + + default: // number + if c != '-' && (c < '0' || c > '9') { + d.error(errPhase) + } + n, err := d.convertNumber(string(item)) + if err != nil { + d.saveError(err) + } + return n + } +} + +// nameInterface is like function but returns map[string]interface{}. +func (d *decodeState) nameInterface() interface{} { + v, ok := d.keyed() + if ok { + return v + } + + nameStart := d.off - 1 + + op := d.scanWhile(scanContinue) + + name := d.data[nameStart : d.off-1] + if op != scanParam { + // Back up so the byte just read is consumed next. + d.off-- + d.scan.undo(op) + if l, ok := d.convertLiteral(name); ok { + return l + } + d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)}) + } + + funcName := string(name) + funcData := d.ext.funcs[funcName] + if funcData.key == "" { + d.error(fmt.Errorf("json: unknown function %q", funcName)) + } + + m := make(map[string]interface{}) + for i := 0; ; i++ { + // Look ahead for ) - can only happen on first iteration. + op := d.scanWhile(scanSkipSpace) + if op == scanEndParams { + break + } + + // Back up so d.value can have the byte we just read. + d.off-- + d.scan.undo(op) + + if i >= len(funcData.args) { + d.error(fmt.Errorf("json: too many arguments for function %s", funcName)) + } + m[funcData.args[i]] = d.valueInterface() + + // Next token must be , or ). + op = d.scanWhile(scanSkipSpace) + if op == scanEndParams { + break + } + if op != scanParam { + d.error(errPhase) + } + } + return map[string]interface{}{funcData.key: m} +} + +// getu4 decodes \uXXXX from the beginning of s, returning the hex value, +// or it returns -1. +func getu4(s []byte) rune { + if len(s) < 6 || s[0] != '\\' || s[1] != 'u' { + return -1 + } + r, err := strconv.ParseUint(string(s[2:6]), 16, 64) + if err != nil { + return -1 + } + return rune(r) +} + +// unquote converts a quoted JSON string literal s into an actual string t. +// The rules are different than for Go, so cannot use strconv.Unquote. +func unquote(s []byte) (t string, ok bool) { + s, ok = unquoteBytes(s) + t = string(s) + return +} + +func unquoteBytes(s []byte) (t []byte, ok bool) { + if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' { + return + } + s = s[1 : len(s)-1] + + // Check for unusual characters. If there are none, + // then no unquoting is needed, so return a slice of the + // original bytes. + r := 0 + for r < len(s) { + c := s[r] + if c == '\\' || c == '"' || c < ' ' { + break + } + if c < utf8.RuneSelf { + r++ + continue + } + rr, size := utf8.DecodeRune(s[r:]) + if rr == utf8.RuneError && size == 1 { + break + } + r += size + } + if r == len(s) { + return s, true + } + + b := make([]byte, len(s)+2*utf8.UTFMax) + w := copy(b, s[0:r]) + for r < len(s) { + // Out of room? Can only happen if s is full of + // malformed UTF-8 and we're replacing each + // byte with RuneError. + if w >= len(b)-2*utf8.UTFMax { + nb := make([]byte, (len(b)+utf8.UTFMax)*2) + copy(nb, b[0:w]) + b = nb + } + switch c := s[r]; { + case c == '\\': + r++ + if r >= len(s) { + return + } + switch s[r] { + default: + return + case '"', '\\', '/', '\'': + b[w] = s[r] + r++ + w++ + case 'b': + b[w] = '\b' + r++ + w++ + case 'f': + b[w] = '\f' + r++ + w++ + case 'n': + b[w] = '\n' + r++ + w++ + case 'r': + b[w] = '\r' + r++ + w++ + case 't': + b[w] = '\t' + r++ + w++ + case 'u': + r-- + rr := getu4(s[r:]) + if rr < 0 { + return + } + r += 6 + if utf16.IsSurrogate(rr) { + rr1 := getu4(s[r:]) + if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar { + // A valid pair; consume. + r += 6 + w += utf8.EncodeRune(b[w:], dec) + break + } + // Invalid surrogate; fall back to replacement rune. + rr = unicode.ReplacementChar + } + w += utf8.EncodeRune(b[w:], rr) + } + + // Quote, control characters are invalid. + case c == '"', c < ' ': + return + + // ASCII + case c < utf8.RuneSelf: + b[w] = c + r++ + w++ + + // Coerce to well-formed UTF-8. + default: + rr, size := utf8.DecodeRune(s[r:]) + r += size + w += utf8.EncodeRune(b[w:], rr) + } + } + return b[0:w], true +} diff --git a/vendor/github.com/globalsign/mgo/internal/json/encode.go b/vendor/github.com/globalsign/mgo/internal/json/encode.go new file mode 100644 index 00000000000..e4b8f86487c --- /dev/null +++ b/vendor/github.com/globalsign/mgo/internal/json/encode.go @@ -0,0 +1,1260 @@ +// Copyright 2010 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package json implements encoding and decoding of JSON as defined in +// RFC 4627. The mapping between JSON and Go values is described +// in the documentation for the Marshal and Unmarshal functions. +// +// See "JSON and Go" for an introduction to this package: +// https://golang.org/doc/articles/json_and_go.html +package json + +import ( + "bytes" + "encoding" + "encoding/base64" + "fmt" + "math" + "reflect" + "runtime" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +// Marshal returns the JSON encoding of v. +// +// Marshal traverses the value v recursively. +// If an encountered value implements the Marshaler interface +// and is not a nil pointer, Marshal calls its MarshalJSON method +// to produce JSON. If no MarshalJSON method is present but the +// value implements encoding.TextMarshaler instead, Marshal calls +// its MarshalText method. +// The nil pointer exception is not strictly necessary +// but mimics a similar, necessary exception in the behavior of +// UnmarshalJSON. +// +// Otherwise, Marshal uses the following type-dependent default encodings: +// +// Boolean values encode as JSON booleans. +// +// Floating point, integer, and Number values encode as JSON numbers. +// +// String values encode as JSON strings coerced to valid UTF-8, +// replacing invalid bytes with the Unicode replacement rune. +// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e" +// to keep some browsers from misinterpreting JSON output as HTML. +// Ampersand "&" is also escaped to "\u0026" for the same reason. +// This escaping can be disabled using an Encoder with DisableHTMLEscaping. +// +// Array and slice values encode as JSON arrays, except that +// []byte encodes as a base64-encoded string, and a nil slice +// encodes as the null JSON value. +// +// Struct values encode as JSON objects. Each exported struct field +// becomes a member of the object unless +// - the field's tag is "-", or +// - the field is empty and its tag specifies the "omitempty" option. +// The empty values are false, 0, any +// nil pointer or interface value, and any array, slice, map, or string of +// length zero. The object's default key string is the struct field name +// but can be specified in the struct field's tag value. The "json" key in +// the struct field's tag value is the key name, followed by an optional comma +// and options. Examples: +// +// // Field is ignored by this package. +// Field int `json:"-"` +// +// // Field appears in JSON as key "myName". +// Field int `json:"myName"` +// +// // Field appears in JSON as key "myName" and +// // the field is omitted from the object if its value is empty, +// // as defined above. +// Field int `json:"myName,omitempty"` +// +// // Field appears in JSON as key "Field" (the default), but +// // the field is skipped if empty. +// // Note the leading comma. +// Field int `json:",omitempty"` +// +// The "string" option signals that a field is stored as JSON inside a +// JSON-encoded string. It applies only to fields of string, floating point, +// integer, or boolean types. This extra level of encoding is sometimes used +// when communicating with JavaScript programs: +// +// Int64String int64 `json:",string"` +// +// The key name will be used if it's a non-empty string consisting of +// only Unicode letters, digits, dollar signs, percent signs, hyphens, +// underscores and slashes. +// +// Anonymous struct fields are usually marshaled as if their inner exported fields +// were fields in the outer struct, subject to the usual Go visibility rules amended +// as described in the next paragraph. +// An anonymous struct field with a name given in its JSON tag is treated as +// having that name, rather than being anonymous. +// An anonymous struct field of interface type is treated the same as having +// that type as its name, rather than being anonymous. +// +// The Go visibility rules for struct fields are amended for JSON when +// deciding which field to marshal or unmarshal. If there are +// multiple fields at the same level, and that level is the least +// nested (and would therefore be the nesting level selected by the +// usual Go rules), the following extra rules apply: +// +// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered, +// even if there are multiple untagged fields that would otherwise conflict. +// 2) If there is exactly one field (tagged or not according to the first rule), that is selected. +// 3) Otherwise there are multiple fields, and all are ignored; no error occurs. +// +// Handling of anonymous struct fields is new in Go 1.1. +// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of +// an anonymous struct field in both current and earlier versions, give the field +// a JSON tag of "-". +// +// Map values encode as JSON objects. The map's key type must either be a string +// or implement encoding.TextMarshaler. The map keys are used as JSON object +// keys, subject to the UTF-8 coercion described for string values above. +// +// Pointer values encode as the value pointed to. +// A nil pointer encodes as the null JSON value. +// +// Interface values encode as the value contained in the interface. +// A nil interface value encodes as the null JSON value. +// +// Channel, complex, and function values cannot be encoded in JSON. +// Attempting to encode such a value causes Marshal to return +// an UnsupportedTypeError. +// +// JSON cannot represent cyclic data structures and Marshal does not +// handle them. Passing cyclic structures to Marshal will result in +// an infinite recursion. +// +func Marshal(v interface{}) ([]byte, error) { + e := &encodeState{} + err := e.marshal(v, encOpts{escapeHTML: true}) + if err != nil { + return nil, err + } + return e.Bytes(), nil +} + +// MarshalIndent is like Marshal but applies Indent to format the output. +func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) { + b, err := Marshal(v) + if err != nil { + return nil, err + } + var buf bytes.Buffer + err = Indent(&buf, b, prefix, indent) + if err != nil { + return nil, err + } + return buf.Bytes(), nil +} + +// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029 +// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029 +// so that the JSON will be safe to embed inside HTML