Update to gazelle 0.14.0 and run hack/update-bazel.sh
This commit is contained in:
72
Godeps/Godeps.json
generated
72
Godeps/Godeps.json
generated
@@ -336,58 +336,78 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/cmd/gazelle",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/cmd/gazelle",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/config",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/config",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/generator",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/flag",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/label",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/label",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/language",
|
||||||
|
"Comment": "0.14.0",
|
||||||
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/language/go",
|
||||||
|
"Comment": "0.14.0",
|
||||||
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/language/proto",
|
||||||
|
"Comment": "0.14.0",
|
||||||
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/merger",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/merger",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
|
||||||
{
|
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/packages",
|
|
||||||
"Comment": "0.12.0",
|
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/pathtools",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/pathtools",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/repos",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/repos",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/rule",
|
||||||
|
"Comment": "0.14.0",
|
||||||
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/version",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/version",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/walk",
|
||||||
|
"Comment": "0.14.0",
|
||||||
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/wspace",
|
"ImportPath": "github.com/bazelbuild/bazel-gazelle/internal/wspace",
|
||||||
"Comment": "0.12.0",
|
"Comment": "0.14.0",
|
||||||
"Rev": "7f30ba724af9495b221e2df0f5ac58511179485f"
|
"Rev": "6a1b93cc9b1c7e55e7d05a6d324bcf9d87ea3ab1"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"ImportPath": "github.com/bazelbuild/buildtools/build",
|
"ImportPath": "github.com/bazelbuild/buildtools/build",
|
||||||
|
|||||||
846
Godeps/LICENSES
generated
846
Godeps/LICENSES
generated
@@ -11268,7 +11268,7 @@ THE SOFTWARE.
|
|||||||
|
|
||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
= vendor/github.com/bazelbuild/bazel-gazelle/internal/generator licensed under: =
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/flag licensed under: =
|
||||||
|
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
@@ -11688,7 +11688,7 @@ THE SOFTWARE.
|
|||||||
|
|
||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
= vendor/github.com/bazelbuild/bazel-gazelle/internal/merger licensed under: =
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/language licensed under: =
|
||||||
|
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
@@ -11898,7 +11898,427 @@ THE SOFTWARE.
|
|||||||
|
|
||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
= vendor/github.com/bazelbuild/bazel-gazelle/internal/packages licensed under: =
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go licensed under: =
|
||||||
|
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/LICENSE 3b83ef96387f14655fc854ddc3c6bd57
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto licensed under: =
|
||||||
|
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/LICENSE 3b83ef96387f14655fc854ddc3c6bd57
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/merger licensed under: =
|
||||||
|
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
@@ -12737,6 +13157,216 @@ THE SOFTWARE.
|
|||||||
================================================================================
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/rule licensed under: =
|
||||||
|
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/LICENSE 3b83ef96387f14655fc854ddc3c6bd57
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
= vendor/github.com/bazelbuild/bazel-gazelle/internal/version licensed under: =
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/version licensed under: =
|
||||||
|
|
||||||
@@ -12947,6 +13577,216 @@ THE SOFTWARE.
|
|||||||
================================================================================
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
|
================================================================================
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/walk licensed under: =
|
||||||
|
|
||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
|
= vendor/github.com/bazelbuild/bazel-gazelle/LICENSE 3b83ef96387f14655fc854ddc3c6bd57
|
||||||
|
================================================================================
|
||||||
|
|
||||||
|
|
||||||
================================================================================
|
================================================================================
|
||||||
= vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace licensed under: =
|
= vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace licensed under: =
|
||||||
|
|
||||||
|
|||||||
6
vendor/BUILD
vendored
6
vendor/BUILD
vendored
@@ -49,14 +49,16 @@ filegroup(
|
|||||||
"//vendor/github.com/aws/aws-sdk-go/service/sts:all-srcs",
|
"//vendor/github.com/aws/aws-sdk-go/service/sts:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/generator:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/flag:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:all-srcs",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:all-srcs",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:all-srcs",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:all-srcs",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/walk:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:all-srcs",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:all-srcs",
|
"//vendor/github.com/bazelbuild/buildtools/build:all-srcs",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/tables:all-srcs",
|
"//vendor/github.com/bazelbuild/buildtools/tables:all-srcs",
|
||||||
|
|||||||
12
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD
generated
vendored
12
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/BUILD
generated
vendored
@@ -6,8 +6,8 @@ go_library(
|
|||||||
"diff.go",
|
"diff.go",
|
||||||
"fix.go",
|
"fix.go",
|
||||||
"fix-update.go",
|
"fix-update.go",
|
||||||
"flags.go",
|
|
||||||
"gazelle.go",
|
"gazelle.go",
|
||||||
|
"langs.go",
|
||||||
"print.go",
|
"print.go",
|
||||||
"update-repos.go",
|
"update-repos.go",
|
||||||
"version.go",
|
"version.go",
|
||||||
@@ -17,15 +17,17 @@ go_library(
|
|||||||
visibility = ["//visibility:private"],
|
visibility = ["//visibility:private"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/generator:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/flag:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/merger:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/version:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/walk:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
11
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go
generated
vendored
11
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/diff.go
generated
vendored
@@ -20,21 +20,18 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"os/exec"
|
"os/exec"
|
||||||
|
"path/filepath"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func diffFile(c *config.Config, file *bf.File, path string) error {
|
func diffFile(path string, newContents []byte) error {
|
||||||
oldContents, err := ioutil.ReadFile(file.Path)
|
oldContents, err := ioutil.ReadFile(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
oldContents = nil
|
oldContents = nil
|
||||||
}
|
}
|
||||||
newContents := bf.Format(file)
|
|
||||||
if bytes.Equal(oldContents, newContents) {
|
if bytes.Equal(oldContents, newContents) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
f, err := ioutil.TempFile("", c.DefaultBuildFileName())
|
f, err := ioutil.TempFile("", filepath.Base(path))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
429
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go
generated
vendored
429
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix-update.go
generated
vendored
@@ -16,7 +16,6 @@ limitations under the License.
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
@@ -26,27 +25,24 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/generator"
|
gzflag "github.com/bazelbuild/bazel-gazelle/internal/flag"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/merger"
|
"github.com/bazelbuild/bazel-gazelle/internal/merger"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/packages"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
|
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/wspace"
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
"github.com/bazelbuild/bazel-gazelle/internal/walk"
|
||||||
)
|
)
|
||||||
|
|
||||||
// updateConfig holds configuration information needed to run the fix and
|
// updateConfig holds configuration information needed to run the fix and
|
||||||
// update commands. This includes everything in config.Config, but it also
|
// update commands. This includes everything in config.Config, but it also
|
||||||
// includes some additional fields that aren't relevant to other packages.
|
// includes some additional fields that aren't relevant to other packages.
|
||||||
type updateConfig struct {
|
type updateConfig struct {
|
||||||
c *config.Config
|
emit emitFunc
|
||||||
emit emitFunc
|
repos []repos.Repo
|
||||||
outDir, outSuffix string
|
|
||||||
repos []repos.Repo
|
|
||||||
}
|
}
|
||||||
|
|
||||||
type emitFunc func(*config.Config, *bf.File, string) error
|
type emitFunc func(path string, data []byte) error
|
||||||
|
|
||||||
var modeFromName = map[string]emitFunc{
|
var modeFromName = map[string]emitFunc{
|
||||||
"print": printFile,
|
"print": printFile,
|
||||||
@@ -54,6 +50,59 @@ var modeFromName = map[string]emitFunc{
|
|||||||
"diff": diffFile,
|
"diff": diffFile,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateName = "_update"
|
||||||
|
|
||||||
|
func getUpdateConfig(c *config.Config) *updateConfig {
|
||||||
|
return c.Exts[updateName].(*updateConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
type updateConfigurer struct {
|
||||||
|
mode string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ucr *updateConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
|
||||||
|
uc := &updateConfig{}
|
||||||
|
c.Exts[updateName] = uc
|
||||||
|
|
||||||
|
c.ShouldFix = cmd == "fix"
|
||||||
|
|
||||||
|
fs.StringVar(&ucr.mode, "mode", "fix", "print: prints all of the updated BUILD files\n\tfix: rewrites all of the BUILD files in place\n\tdiff: computes the rewrite but then just does a diff")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ucr *updateConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
|
||||||
|
uc := getUpdateConfig(c)
|
||||||
|
var ok bool
|
||||||
|
uc.emit, ok = modeFromName[ucr.mode]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("unrecognized emit mode: %q", ucr.mode)
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Dirs = fs.Args()
|
||||||
|
if len(c.Dirs) == 0 {
|
||||||
|
c.Dirs = []string{"."}
|
||||||
|
}
|
||||||
|
for i := range c.Dirs {
|
||||||
|
dir, err := filepath.Abs(c.Dirs[i])
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to find absolute path: %v", c.Dirs[i], err)
|
||||||
|
}
|
||||||
|
dir, err = filepath.EvalSymlinks(dir)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to resolve symlinks: %v", c.Dirs[i], err)
|
||||||
|
}
|
||||||
|
if !isDescendingDir(dir, c.RepoRoot) {
|
||||||
|
return fmt.Errorf("dir %q is not a subdirectory of repo root %q", dir, c.RepoRoot)
|
||||||
|
}
|
||||||
|
c.Dirs[i] = dir
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ucr *updateConfigurer) KnownDirectives() []string { return nil }
|
||||||
|
|
||||||
|
func (ucr *updateConfigurer) Configure(c *config.Config, rel string, f *rule.File) {}
|
||||||
|
|
||||||
// visitRecord stores information about about a directory visited with
|
// visitRecord stores information about about a directory visited with
|
||||||
// packages.Walk.
|
// packages.Walk.
|
||||||
type visitRecord struct {
|
type visitRecord struct {
|
||||||
@@ -62,13 +111,13 @@ type visitRecord struct {
|
|||||||
pkgRel string
|
pkgRel string
|
||||||
|
|
||||||
// rules is a list of generated Go rules.
|
// rules is a list of generated Go rules.
|
||||||
rules []bf.Expr
|
rules []*rule.Rule
|
||||||
|
|
||||||
// empty is a list of empty Go rules that may be deleted.
|
// empty is a list of empty Go rules that may be deleted.
|
||||||
empty []bf.Expr
|
empty []*rule.Rule
|
||||||
|
|
||||||
// file is the build file being processed.
|
// file is the build file being processed.
|
||||||
file *bf.File
|
file *rule.File
|
||||||
}
|
}
|
||||||
|
|
||||||
type byPkgRel []visitRecord
|
type byPkgRel []visitRecord
|
||||||
@@ -77,230 +126,163 @@ func (vs byPkgRel) Len() int { return len(vs) }
|
|||||||
func (vs byPkgRel) Less(i, j int) bool { return vs[i].pkgRel < vs[j].pkgRel }
|
func (vs byPkgRel) Less(i, j int) bool { return vs[i].pkgRel < vs[j].pkgRel }
|
||||||
func (vs byPkgRel) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
|
func (vs byPkgRel) Swap(i, j int) { vs[i], vs[j] = vs[j], vs[i] }
|
||||||
|
|
||||||
|
var genericLoads = []rule.LoadInfo{
|
||||||
|
{
|
||||||
|
Name: "@bazel_gazelle//:def.bzl",
|
||||||
|
Symbols: []string{"gazelle"},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
func runFixUpdate(cmd command, args []string) error {
|
func runFixUpdate(cmd command, args []string) error {
|
||||||
uc, err := newFixUpdateConfiguration(cmd, args)
|
cexts := make([]config.Configurer, 0, len(languages)+2)
|
||||||
|
cexts = append(cexts, &config.CommonConfigurer{}, &updateConfigurer{})
|
||||||
|
kindToResolver := make(map[string]resolve.Resolver)
|
||||||
|
kinds := make(map[string]rule.KindInfo)
|
||||||
|
loads := genericLoads
|
||||||
|
for _, lang := range languages {
|
||||||
|
cexts = append(cexts, lang)
|
||||||
|
for kind, info := range lang.Kinds() {
|
||||||
|
kindToResolver[kind] = lang
|
||||||
|
kinds[kind] = info
|
||||||
|
}
|
||||||
|
loads = append(loads, lang.Loads()...)
|
||||||
|
}
|
||||||
|
ruleIndex := resolve.NewRuleIndex(kindToResolver)
|
||||||
|
|
||||||
|
c, err := newFixUpdateConfiguration(cmd, args, cexts, loads)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if cmd == fixCmd {
|
if cmd == fixCmd {
|
||||||
// Only check the version when "fix" is run. Generated build files
|
// Only check the version when "fix" is run. Generated build files
|
||||||
// frequently work with older version of rules_go, and we don't want to
|
// frequently work with older version of rules_go, and we don't want to
|
||||||
// nag too much since there's no way to disable this warning.
|
// nag too much since there's no way to disable this warning.
|
||||||
checkRulesGoVersion(uc.c.RepoRoot)
|
checkRulesGoVersion(c.RepoRoot)
|
||||||
}
|
}
|
||||||
|
|
||||||
l := label.NewLabeler(uc.c)
|
|
||||||
ruleIndex := resolve.NewRuleIndex()
|
|
||||||
|
|
||||||
var visits []visitRecord
|
|
||||||
|
|
||||||
// Visit all directories in the repository.
|
// Visit all directories in the repository.
|
||||||
packages.Walk(uc.c, uc.c.RepoRoot, func(dir, rel string, c *config.Config, pkg *packages.Package, file *bf.File, isUpdateDir bool) {
|
var visits []visitRecord
|
||||||
|
walk.Walk(c, cexts, func(dir, rel string, c *config.Config, update bool, f *rule.File, subdirs, regularFiles, genFiles []string) {
|
||||||
// If this file is ignored or if Gazelle was not asked to update this
|
// If this file is ignored or if Gazelle was not asked to update this
|
||||||
// directory, just index the build file and move on.
|
// directory, just index the build file and move on.
|
||||||
if !isUpdateDir {
|
if !update {
|
||||||
if file != nil {
|
if f != nil {
|
||||||
ruleIndex.AddRulesFromFile(c, file)
|
for _, r := range f.Rules {
|
||||||
|
ruleIndex.AddRule(c, r, f)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fix any problems in the file.
|
// Fix any problems in the file.
|
||||||
if file != nil {
|
if f != nil {
|
||||||
merger.FixFile(c, file)
|
for _, l := range languages {
|
||||||
|
l.Fix(c, f)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the file exists, but no Go code is present, create an empty package.
|
// Generate rules.
|
||||||
// This lets us delete existing rules.
|
var empty, gen []*rule.Rule
|
||||||
if pkg == nil && file != nil {
|
for _, l := range languages {
|
||||||
pkg = packages.EmptyPackage(c, dir, rel)
|
lempty, lgen := l.GenerateRules(c, dir, rel, f, subdirs, regularFiles, genFiles, empty, gen)
|
||||||
|
empty = append(empty, lempty...)
|
||||||
|
gen = append(gen, lgen...)
|
||||||
|
}
|
||||||
|
if f == nil && len(gen) == 0 {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate new rules and merge them into the existing file (if present).
|
// Insert or merge rules into the build file.
|
||||||
if pkg != nil {
|
if f == nil {
|
||||||
g := generator.NewGenerator(c, l, file)
|
f = rule.EmptyFile(filepath.Join(dir, c.DefaultBuildFileName()), rel)
|
||||||
rules, empty, err := g.GenerateRules(pkg)
|
for _, r := range gen {
|
||||||
if err != nil {
|
r.Insert(f)
|
||||||
log.Print(err)
|
|
||||||
return
|
|
||||||
}
|
}
|
||||||
if file == nil {
|
} else {
|
||||||
file = &bf.File{
|
merger.MergeFile(f, empty, gen, merger.PreResolve, kinds)
|
||||||
Path: filepath.Join(c.RepoRoot, filepath.FromSlash(rel), c.DefaultBuildFileName()),
|
|
||||||
Stmt: rules,
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
rules = merger.MergeFile(rules, empty, file, merger.PreResolveAttrs)
|
|
||||||
}
|
|
||||||
visits = append(visits, visitRecord{
|
|
||||||
pkgRel: rel,
|
|
||||||
rules: rules,
|
|
||||||
empty: empty,
|
|
||||||
file: file,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
visits = append(visits, visitRecord{
|
||||||
|
pkgRel: rel,
|
||||||
|
rules: gen,
|
||||||
|
empty: empty,
|
||||||
|
file: f,
|
||||||
|
})
|
||||||
|
|
||||||
// Add library rules to the dependency resolution table.
|
// Add library rules to the dependency resolution table.
|
||||||
if file != nil {
|
for _, r := range f.Rules {
|
||||||
ruleIndex.AddRulesFromFile(c, file)
|
ruleIndex.AddRule(c, r, f)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
uc := getUpdateConfig(c)
|
||||||
|
|
||||||
// Finish building the index for dependency resolution.
|
// Finish building the index for dependency resolution.
|
||||||
ruleIndex.Finish()
|
ruleIndex.Finish()
|
||||||
|
|
||||||
// Resolve dependencies.
|
// Resolve dependencies.
|
||||||
rc := repos.NewRemoteCache(uc.repos)
|
rc := repos.NewRemoteCache(uc.repos)
|
||||||
resolver := resolve.NewResolver(uc.c, l, ruleIndex, rc)
|
for _, v := range visits {
|
||||||
for i := range visits {
|
for _, r := range v.rules {
|
||||||
for j := range visits[i].rules {
|
from := label.New(c.RepoName, v.pkgRel, r.Name())
|
||||||
visits[i].rules[j] = resolver.ResolveRule(visits[i].rules[j], visits[i].pkgRel)
|
kindToResolver[r.Kind()].Resolve(c, ruleIndex, rc, r, from)
|
||||||
}
|
}
|
||||||
merger.MergeFile(visits[i].rules, visits[i].empty, visits[i].file, merger.PostResolveAttrs)
|
merger.MergeFile(v.file, v.empty, v.rules, merger.PostResolve, kinds)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Emit merged files.
|
// Emit merged files.
|
||||||
for _, v := range visits {
|
for _, v := range visits {
|
||||||
generator.SortLabels(v.file)
|
merger.FixLoads(v.file, loads)
|
||||||
merger.FixLoads(v.file)
|
content := v.file.Format()
|
||||||
bf.Rewrite(v.file, nil) // have buildifier 'format' our rules.
|
outputPath := findOutputPath(c, v.file)
|
||||||
|
if err := uc.emit(outputPath, content); err != nil {
|
||||||
path := v.file.Path
|
|
||||||
if uc.outDir != "" {
|
|
||||||
stem := filepath.Base(v.file.Path) + uc.outSuffix
|
|
||||||
path = filepath.Join(uc.outDir, v.pkgRel, stem)
|
|
||||||
}
|
|
||||||
if err := uc.emit(uc.c, v.file, path); err != nil {
|
|
||||||
log.Print(err)
|
log.Print(err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func newFixUpdateConfiguration(cmd command, args []string) (*updateConfig, error) {
|
func newFixUpdateConfiguration(cmd command, args []string, cexts []config.Configurer, loads []rule.LoadInfo) (*config.Config, error) {
|
||||||
uc := &updateConfig{c: &config.Config{}}
|
c := config.New()
|
||||||
var err error
|
|
||||||
|
|
||||||
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
|
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
|
||||||
// Flag will call this on any parse error. Don't print usage unless
|
// Flag will call this on any parse error. Don't print usage unless
|
||||||
// -h or -help were passed explicitly.
|
// -h or -help were passed explicitly.
|
||||||
fs.Usage = func() {}
|
fs.Usage = func() {}
|
||||||
|
|
||||||
knownImports := multiFlag{}
|
var knownImports []string
|
||||||
buildFileName := fs.String("build_file_name", "BUILD.bazel,BUILD", "comma-separated list of valid build file names.\nThe first element of the list is the name of output build files to generate.")
|
fs.Var(&gzflag.MultiFlag{Values: &knownImports}, "known_import", "import path for which external resolution is skipped (can specify multiple times)")
|
||||||
buildTags := fs.String("build_tags", "", "comma-separated list of build tags. If not specified, Gazelle will not\n\tfilter sources with build constraints.")
|
|
||||||
external := fs.String("external", "external", "external: resolve external packages with go_repository\n\tvendored: resolve external packages as packages in vendor/")
|
for _, cext := range cexts {
|
||||||
var goPrefix explicitFlag
|
cext.RegisterFlags(fs, cmd.String(), c)
|
||||||
fs.Var(&goPrefix, "go_prefix", "prefix of import paths in the current workspace")
|
}
|
||||||
repoRoot := fs.String("repo_root", "", "path to a directory which corresponds to go_prefix, otherwise gazelle searches for it.")
|
|
||||||
fs.Var(&knownImports, "known_import", "import path for which external resolution is skipped (can specify multiple times)")
|
|
||||||
mode := fs.String("mode", "fix", "print: prints all of the updated BUILD files\n\tfix: rewrites all of the BUILD files in place\n\tdiff: computes the rewrite but then just does a diff")
|
|
||||||
outDir := fs.String("experimental_out_dir", "", "write build files to an alternate directory tree")
|
|
||||||
outSuffix := fs.String("experimental_out_suffix", "", "extra suffix appended to build file names. Only used if -experimental_out_dir is also set.")
|
|
||||||
var proto explicitFlag
|
|
||||||
fs.Var(&proto, "proto", "default: generates new proto rules\n\tdisable: does not touch proto rules\n\tlegacy (deprecated): generates old proto rules")
|
|
||||||
if err := fs.Parse(args); err != nil {
|
if err := fs.Parse(args); err != nil {
|
||||||
if err == flag.ErrHelp {
|
if err == flag.ErrHelp {
|
||||||
fixUpdateUsage(fs)
|
fixUpdateUsage(fs)
|
||||||
os.Exit(0)
|
return nil, err
|
||||||
}
|
}
|
||||||
// flag already prints the error; don't print it again.
|
// flag already prints the error; don't print it again.
|
||||||
log.Fatal("Try -help for more information.")
|
log.Fatal("Try -help for more information.")
|
||||||
}
|
}
|
||||||
|
|
||||||
uc.c.Dirs = fs.Args()
|
for _, cext := range cexts {
|
||||||
if len(uc.c.Dirs) == 0 {
|
if err := cext.CheckFlags(fs, c); err != nil {
|
||||||
uc.c.Dirs = []string{"."}
|
|
||||||
}
|
|
||||||
for i := range uc.c.Dirs {
|
|
||||||
uc.c.Dirs[i], err = filepath.Abs(uc.c.Dirs[i])
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if *repoRoot != "" {
|
uc := getUpdateConfig(c)
|
||||||
uc.c.RepoRoot = *repoRoot
|
workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE")
|
||||||
} else if len(uc.c.Dirs) == 1 {
|
if workspace, err := rule.LoadFile(workspacePath, ""); err != nil {
|
||||||
uc.c.RepoRoot, err = wspace.Find(uc.c.Dirs[0])
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
uc.c.RepoRoot, err = wspace.Find(".")
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
uc.c.RepoRoot, err = filepath.EvalSymlinks(uc.c.RepoRoot)
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to evaluate symlinks for repo root: %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, dir := range uc.c.Dirs {
|
|
||||||
if !isDescendingDir(dir, uc.c.RepoRoot) {
|
|
||||||
return nil, fmt.Errorf("dir %q is not a subdirectory of repo root %q", dir, uc.c.RepoRoot)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
uc.c.ValidBuildFileNames = strings.Split(*buildFileName, ",")
|
|
||||||
if len(uc.c.ValidBuildFileNames) == 0 {
|
|
||||||
return nil, fmt.Errorf("no valid build file names specified")
|
|
||||||
}
|
|
||||||
|
|
||||||
uc.c.SetBuildTags(*buildTags)
|
|
||||||
uc.c.PreprocessTags()
|
|
||||||
|
|
||||||
if goPrefix.set {
|
|
||||||
uc.c.GoPrefix = goPrefix.value
|
|
||||||
} else {
|
|
||||||
uc.c.GoPrefix, err = loadGoPrefix(uc.c)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := config.CheckPrefix(uc.c.GoPrefix); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
uc.c.ShouldFix = cmd == fixCmd
|
|
||||||
|
|
||||||
uc.c.DepMode, err = config.DependencyModeFromString(*external)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if proto.set {
|
|
||||||
uc.c.ProtoMode, err = config.ProtoModeFromString(proto.value)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
uc.c.ProtoModeExplicit = true
|
|
||||||
}
|
|
||||||
|
|
||||||
emit, ok := modeFromName[*mode]
|
|
||||||
if !ok {
|
|
||||||
return nil, fmt.Errorf("unrecognized emit mode: %q", *mode)
|
|
||||||
}
|
|
||||||
uc.emit = emit
|
|
||||||
|
|
||||||
uc.outDir = *outDir
|
|
||||||
uc.outSuffix = *outSuffix
|
|
||||||
|
|
||||||
workspacePath := filepath.Join(uc.c.RepoRoot, "WORKSPACE")
|
|
||||||
if workspaceContent, err := ioutil.ReadFile(workspacePath); err != nil {
|
|
||||||
if !os.IsNotExist(err) {
|
if !os.IsNotExist(err) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
workspace, err := bf.Parse(workspacePath, workspaceContent)
|
if err := fixWorkspace(c, workspace, loads); err != nil {
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := fixWorkspace(uc, workspace); err != nil {
|
c.RepoName = findWorkspaceName(workspace)
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
uc.c.RepoName = findWorkspaceName(workspace)
|
|
||||||
uc.repos = repos.ListRepositories(workspace)
|
uc.repos = repos.ListRepositories(workspace)
|
||||||
}
|
}
|
||||||
repoPrefixes := make(map[string]bool)
|
repoPrefixes := make(map[string]bool)
|
||||||
@@ -318,7 +300,7 @@ func newFixUpdateConfiguration(cmd command, args []string) (*updateConfig, error
|
|||||||
uc.repos = append(uc.repos, repo)
|
uc.repos = append(uc.repos, repo)
|
||||||
}
|
}
|
||||||
|
|
||||||
return uc, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func fixUpdateUsage(fs *flag.FlagSet) {
|
func fixUpdateUsage(fs *flag.FlagSet) {
|
||||||
@@ -350,74 +332,14 @@ FLAGS:
|
|||||||
fs.PrintDefaults()
|
fs.PrintDefaults()
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadBuildFile(c *config.Config, dir string) (*bf.File, error) {
|
func fixWorkspace(c *config.Config, workspace *rule.File, loads []rule.LoadInfo) error {
|
||||||
var buildPath string
|
uc := getUpdateConfig(c)
|
||||||
for _, base := range c.ValidBuildFileNames {
|
if !c.ShouldFix {
|
||||||
p := filepath.Join(dir, base)
|
|
||||||
fi, err := os.Stat(p)
|
|
||||||
if err == nil {
|
|
||||||
if fi.Mode().IsRegular() {
|
|
||||||
buildPath = p
|
|
||||||
break
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if !os.IsNotExist(err) {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if buildPath == "" {
|
|
||||||
return nil, os.ErrNotExist
|
|
||||||
}
|
|
||||||
|
|
||||||
data, err := ioutil.ReadFile(buildPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return bf.Parse(buildPath, data)
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadGoPrefix(c *config.Config) (string, error) {
|
|
||||||
f, err := loadBuildFile(c, c.RepoRoot)
|
|
||||||
if err != nil {
|
|
||||||
return "", errors.New("-go_prefix not set")
|
|
||||||
}
|
|
||||||
for _, d := range config.ParseDirectives(f) {
|
|
||||||
if d.Key == "prefix" {
|
|
||||||
return d.Value, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, s := range f.Stmt {
|
|
||||||
c, ok := s.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
l, ok := c.X.(*bf.LiteralExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if l.Token != "go_prefix" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if len(c.List) != 1 {
|
|
||||||
return "", fmt.Errorf("-go_prefix not set, and %s has go_prefix(%v) with too many args", f.Path, c.List)
|
|
||||||
}
|
|
||||||
v, ok := c.List[0].(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
return "", fmt.Errorf("-go_prefix not set, and %s has go_prefix(%v) which is not a string", f.Path, bf.FormatString(c.List[0]))
|
|
||||||
}
|
|
||||||
return v.Value, nil
|
|
||||||
}
|
|
||||||
return "", fmt.Errorf("-go_prefix not set, and no # gazelle:prefix directive found in %s", f.Path)
|
|
||||||
}
|
|
||||||
|
|
||||||
func fixWorkspace(uc *updateConfig, workspace *bf.File) error {
|
|
||||||
if !uc.c.ShouldFix {
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
shouldFix := false
|
shouldFix := false
|
||||||
for _, d := range uc.c.Dirs {
|
for _, d := range c.Dirs {
|
||||||
if d == uc.c.RepoRoot {
|
if d == c.RepoRoot {
|
||||||
shouldFix = true
|
shouldFix = true
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -426,22 +348,17 @@ func fixWorkspace(uc *updateConfig, workspace *bf.File) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
merger.FixWorkspace(workspace)
|
merger.FixWorkspace(workspace)
|
||||||
merger.FixLoads(workspace)
|
merger.FixLoads(workspace, loads)
|
||||||
if err := merger.CheckGazelleLoaded(workspace); err != nil {
|
if err := merger.CheckGazelleLoaded(workspace); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return uc.emit(uc.c, workspace, workspace.Path)
|
return uc.emit(workspace.Path, workspace.Format())
|
||||||
}
|
}
|
||||||
|
|
||||||
func findWorkspaceName(f *bf.File) string {
|
func findWorkspaceName(f *rule.File) string {
|
||||||
for _, stmt := range f.Stmt {
|
for _, r := range f.Rules {
|
||||||
call, ok := stmt.(*bf.CallExpr)
|
if r.Kind() == "workspace" {
|
||||||
if !ok {
|
return r.Name()
|
||||||
continue
|
|
||||||
}
|
|
||||||
rule := bf.Rule{Call: call}
|
|
||||||
if rule.Kind() == "workspace" {
|
|
||||||
return rule.Name()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
@@ -457,3 +374,25 @@ func isDescendingDir(dir, root string) bool {
|
|||||||
}
|
}
|
||||||
return !strings.HasPrefix(rel, "..")
|
return !strings.HasPrefix(rel, "..")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func findOutputPath(c *config.Config, f *rule.File) string {
|
||||||
|
if c.ReadBuildFilesDir == "" && c.WriteBuildFilesDir == "" {
|
||||||
|
return f.Path
|
||||||
|
}
|
||||||
|
baseDir := c.WriteBuildFilesDir
|
||||||
|
if c.WriteBuildFilesDir == "" {
|
||||||
|
baseDir = c.RepoRoot
|
||||||
|
}
|
||||||
|
outputDir := filepath.Join(baseDir, filepath.FromSlash(f.Pkg))
|
||||||
|
defaultOutputPath := filepath.Join(outputDir, c.DefaultBuildFileName())
|
||||||
|
files, err := ioutil.ReadDir(outputDir)
|
||||||
|
if err != nil {
|
||||||
|
// Ignore error. Directory probably doesn't exist.
|
||||||
|
return defaultOutputPath
|
||||||
|
}
|
||||||
|
outputPath := rule.MatchBuildFileName(outputDir, c.ValidBuildFileNames, files)
|
||||||
|
if outputPath == "" {
|
||||||
|
return defaultOutputPath
|
||||||
|
}
|
||||||
|
return outputPath
|
||||||
|
}
|
||||||
|
|||||||
10
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go
generated
vendored
10
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/fix.go
generated
vendored
@@ -19,17 +19,11 @@ import (
|
|||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func fixFile(c *config.Config, file *bf.File, path string) error {
|
func fixFile(path string, data []byte) error {
|
||||||
if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil {
|
if err := os.MkdirAll(filepath.Dir(path), 0777); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := ioutil.WriteFile(path, bf.Format(file), 0666); err != nil {
|
return ioutil.WriteFile(path, data, 0666)
|
||||||
return err
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
|||||||
51
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/flags.go
generated
vendored
51
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/flags.go
generated
vendored
@@ -1,51 +0,0 @@
|
|||||||
// Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
// multiFlag allows repeated string flags to be collected into a slice
|
|
||||||
type multiFlag []string
|
|
||||||
|
|
||||||
func (m *multiFlag) String() string {
|
|
||||||
if len(*m) == 0 {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return fmt.Sprint(*m)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *multiFlag) Set(v string) error {
|
|
||||||
(*m) = append(*m, v)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// explicitFlag is a string flag that tracks whether it was set.
|
|
||||||
type explicitFlag struct {
|
|
||||||
set bool
|
|
||||||
value string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *explicitFlag) Set(value string) error {
|
|
||||||
f.set = true
|
|
||||||
f.value = value
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (f *explicitFlag) String() string {
|
|
||||||
if f == nil {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return f.value
|
|
||||||
}
|
|
||||||
18
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go
generated
vendored
18
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/gazelle.go
generated
vendored
@@ -18,6 +18,7 @@ limitations under the License.
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
@@ -39,6 +40,18 @@ var commandFromName = map[string]command{
|
|||||||
"update-repos": updateReposCmd,
|
"update-repos": updateReposCmd,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var nameFromCommand = []string{
|
||||||
|
// keep in sync with definition above
|
||||||
|
"update",
|
||||||
|
"fix",
|
||||||
|
"update-repos",
|
||||||
|
"help",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cmd command) String() string {
|
||||||
|
return nameFromCommand[cmd]
|
||||||
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
log.SetPrefix("gazelle: ")
|
log.SetPrefix("gazelle: ")
|
||||||
log.SetFlags(0) // don't print timestamps
|
log.SetFlags(0) // don't print timestamps
|
||||||
@@ -64,7 +77,7 @@ func run(args []string) error {
|
|||||||
case fixCmd, updateCmd:
|
case fixCmd, updateCmd:
|
||||||
return runFixUpdate(cmd, args)
|
return runFixUpdate(cmd, args)
|
||||||
case helpCmd:
|
case helpCmd:
|
||||||
help()
|
return help()
|
||||||
case updateReposCmd:
|
case updateReposCmd:
|
||||||
return updateRepos(args)
|
return updateRepos(args)
|
||||||
default:
|
default:
|
||||||
@@ -73,7 +86,7 @@ func run(args []string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func help() {
|
func help() error {
|
||||||
fmt.Fprint(os.Stderr, `usage: gazelle <command> [args...]
|
fmt.Fprint(os.Stderr, `usage: gazelle <command> [args...]
|
||||||
|
|
||||||
Gazelle is a BUILD file generator for Go projects. It can create new BUILD files
|
Gazelle is a BUILD file generator for Go projects. It can create new BUILD files
|
||||||
@@ -103,4 +116,5 @@ Gazelle is under active delevopment, and its interface may change
|
|||||||
without notice.
|
without notice.
|
||||||
|
|
||||||
`)
|
`)
|
||||||
|
return flag.ErrHelp
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
@@ -13,5 +13,15 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// Package packages provides Go package traversal in a Bazel repository.
|
package main
|
||||||
package packages
|
|
||||||
|
import (
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/go"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
)
|
||||||
|
|
||||||
|
var languages = []language.Language{
|
||||||
|
proto.New(),
|
||||||
|
golang.New(),
|
||||||
|
}
|
||||||
7
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go
generated
vendored
7
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/print.go
generated
vendored
@@ -17,12 +17,9 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func printFile(c *config.Config, f *bf.File, _ string) error {
|
func printFile(_ string, data []byte) error {
|
||||||
_, err := os.Stdout.Write(bf.Format(f))
|
_, err := os.Stdout.Write(data)
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|||||||
130
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go
generated
vendored
130
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/update-repos.go
generated
vendored
@@ -19,101 +19,121 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/merger"
|
"github.com/bazelbuild/bazel-gazelle/internal/merger"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/wspace"
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type updateReposFn func(c *updateReposConfiguration, oldFile *bf.File) error
|
type updateReposFn func(c *updateReposConfig, oldFile *rule.File, kinds map[string]rule.KindInfo) error
|
||||||
|
|
||||||
type updateReposConfiguration struct {
|
type updateReposConfig struct {
|
||||||
fn updateReposFn
|
fn updateReposFn
|
||||||
repoRoot string
|
|
||||||
lockFilename string
|
lockFilename string
|
||||||
importPaths []string
|
importPaths []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateReposName = "_update-repos"
|
||||||
|
|
||||||
|
func getUpdateReposConfig(c *config.Config) *updateReposConfig {
|
||||||
|
return c.Exts[updateReposName].(*updateReposConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
type updateReposConfigurer struct{}
|
||||||
|
|
||||||
|
func (_ *updateReposConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
|
||||||
|
uc := &updateReposConfig{}
|
||||||
|
c.Exts[updateReposName] = uc
|
||||||
|
fs.StringVar(&uc.lockFilename, "from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE. Currently only dep's Gopkg.lock is supported.")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *updateReposConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
|
||||||
|
uc := getUpdateReposConfig(c)
|
||||||
|
switch {
|
||||||
|
case uc.lockFilename != "":
|
||||||
|
if len(fs.Args()) != 0 {
|
||||||
|
return fmt.Errorf("Got %d positional arguments with -from_file; wanted 0.\nTry -help for more information.", len(fs.Args()))
|
||||||
|
}
|
||||||
|
uc.fn = importFromLockFile
|
||||||
|
|
||||||
|
default:
|
||||||
|
if len(fs.Args()) == 0 {
|
||||||
|
return fmt.Errorf("No repositories specified\nTry -help for more information.")
|
||||||
|
}
|
||||||
|
uc.fn = updateImportPaths
|
||||||
|
uc.importPaths = fs.Args()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *updateReposConfigurer) KnownDirectives() []string { return nil }
|
||||||
|
|
||||||
|
func (_ *updateReposConfigurer) Configure(c *config.Config, rel string, f *rule.File) {}
|
||||||
|
|
||||||
func updateRepos(args []string) error {
|
func updateRepos(args []string) error {
|
||||||
c, err := newUpdateReposConfiguration(args)
|
cexts := make([]config.Configurer, 0, len(languages)+2)
|
||||||
|
cexts = append(cexts, &config.CommonConfigurer{}, &updateReposConfigurer{})
|
||||||
|
kinds := make(map[string]rule.KindInfo)
|
||||||
|
loads := []rule.LoadInfo{}
|
||||||
|
for _, lang := range languages {
|
||||||
|
cexts = append(cexts, lang)
|
||||||
|
loads = append(loads, lang.Loads()...)
|
||||||
|
for kind, info := range lang.Kinds() {
|
||||||
|
kinds[kind] = info
|
||||||
|
}
|
||||||
|
}
|
||||||
|
c, err := newUpdateReposConfiguration(args, cexts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
uc := getUpdateReposConfig(c)
|
||||||
|
|
||||||
workspacePath := filepath.Join(c.repoRoot, "WORKSPACE")
|
workspacePath := filepath.Join(c.RepoRoot, "WORKSPACE")
|
||||||
content, err := ioutil.ReadFile(workspacePath)
|
f, err := rule.LoadFile(workspacePath, "")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("error reading %q: %v", workspacePath, err)
|
return fmt.Errorf("error loading %q: %v", workspacePath, err)
|
||||||
}
|
|
||||||
f, err := bf.Parse(workspacePath, content)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("error parsing %q: %v", workspacePath, err)
|
|
||||||
}
|
}
|
||||||
merger.FixWorkspace(f)
|
merger.FixWorkspace(f)
|
||||||
|
|
||||||
if err := c.fn(c, f); err != nil {
|
if err := uc.fn(uc, f, kinds); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
merger.FixLoads(f)
|
merger.FixLoads(f, loads)
|
||||||
if err := merger.CheckGazelleLoaded(f); err != nil {
|
if err := merger.CheckGazelleLoaded(f); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if err := ioutil.WriteFile(f.Path, bf.Format(f), 0666); err != nil {
|
if err := f.Save(f.Path); err != nil {
|
||||||
return fmt.Errorf("error writing %q: %v", f.Path, err)
|
return fmt.Errorf("error writing %q: %v", f.Path, err)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func newUpdateReposConfiguration(args []string) (*updateReposConfiguration, error) {
|
func newUpdateReposConfiguration(args []string, cexts []config.Configurer) (*config.Config, error) {
|
||||||
c := new(updateReposConfiguration)
|
c := config.New()
|
||||||
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
|
fs := flag.NewFlagSet("gazelle", flag.ContinueOnError)
|
||||||
// Flag will call this on any parse error. Don't print usage unless
|
// Flag will call this on any parse error. Don't print usage unless
|
||||||
// -h or -help were passed explicitly.
|
// -h or -help were passed explicitly.
|
||||||
fs.Usage = func() {}
|
fs.Usage = func() {}
|
||||||
|
for _, cext := range cexts {
|
||||||
fromFileFlag := fs.String("from_file", "", "Gazelle will translate repositories listed in this file into repository rules in WORKSPACE. Currently only dep's Gopkg.lock is supported.")
|
cext.RegisterFlags(fs, "update-repos", c)
|
||||||
repoRootFlag := fs.String("repo_root", "", "path to the root directory of the repository. If unspecified, this is assumed to be the directory containing WORKSPACE.")
|
}
|
||||||
if err := fs.Parse(args); err != nil {
|
if err := fs.Parse(args); err != nil {
|
||||||
if err == flag.ErrHelp {
|
if err == flag.ErrHelp {
|
||||||
updateReposUsage(fs)
|
updateReposUsage(fs)
|
||||||
os.Exit(0)
|
return nil, err
|
||||||
}
|
}
|
||||||
// flag already prints the error; don't print it again.
|
// flag already prints the error; don't print it again.
|
||||||
return nil, errors.New("Try -help for more information")
|
return nil, errors.New("Try -help for more information")
|
||||||
}
|
}
|
||||||
|
for _, cext := range cexts {
|
||||||
// Handle general flags that apply to all subcommands.
|
if err := cext.CheckFlags(fs, c); err != nil {
|
||||||
c.repoRoot = *repoRootFlag
|
|
||||||
if c.repoRoot == "" {
|
|
||||||
if repoRoot, err := wspace.Find("."); err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
} else {
|
|
||||||
c.repoRoot = repoRoot
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Handle flags specific to each subcommand.
|
|
||||||
switch {
|
|
||||||
case *fromFileFlag != "":
|
|
||||||
if len(fs.Args()) != 0 {
|
|
||||||
return nil, fmt.Errorf("Got %d positional arguments with -from_file; wanted 0.\nTry -help for more information.", len(fs.Args()))
|
|
||||||
}
|
|
||||||
c.fn = importFromLockFile
|
|
||||||
c.lockFilename = *fromFileFlag
|
|
||||||
|
|
||||||
default:
|
|
||||||
if len(fs.Args()) == 0 {
|
|
||||||
return nil, fmt.Errorf("No repositories specified\nTry -help for more information.")
|
|
||||||
}
|
|
||||||
c.fn = updateImportPaths
|
|
||||||
c.importPaths = fs.Args()
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,16 +156,16 @@ FLAGS:
|
|||||||
`)
|
`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func updateImportPaths(c *updateReposConfiguration, f *bf.File) error {
|
func updateImportPaths(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error {
|
||||||
rs := repos.ListRepositories(f)
|
rs := repos.ListRepositories(f)
|
||||||
rc := repos.NewRemoteCache(rs)
|
rc := repos.NewRemoteCache(rs)
|
||||||
|
|
||||||
genRules := make([]bf.Expr, len(c.importPaths))
|
genRules := make([]*rule.Rule, len(c.importPaths))
|
||||||
errs := make([]error, len(c.importPaths))
|
errs := make([]error, len(c.importPaths))
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
wg.Add(len(c.importPaths))
|
wg.Add(len(c.importPaths))
|
||||||
for i, imp := range c.importPaths {
|
for i, imp := range c.importPaths {
|
||||||
go func(i int) {
|
go func(i int, imp string) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
repo, err := repos.UpdateRepo(rc, imp)
|
repo, err := repos.UpdateRepo(rc, imp)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@@ -156,7 +176,7 @@ func updateImportPaths(c *updateReposConfiguration, f *bf.File) error {
|
|||||||
repo.VCS = ""
|
repo.VCS = ""
|
||||||
rule := repos.GenerateRule(repo)
|
rule := repos.GenerateRule(repo)
|
||||||
genRules[i] = rule
|
genRules[i] = rule
|
||||||
}(i)
|
}(i, imp)
|
||||||
}
|
}
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
@@ -165,16 +185,16 @@ func updateImportPaths(c *updateReposConfiguration, f *bf.File) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
merger.MergeFile(genRules, nil, f, merger.RepoAttrs)
|
merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func importFromLockFile(c *updateReposConfiguration, f *bf.File) error {
|
func importFromLockFile(c *updateReposConfig, f *rule.File, kinds map[string]rule.KindInfo) error {
|
||||||
genRules, err := repos.ImportRepoRules(c.lockFilename)
|
genRules, err := repos.ImportRepoRules(c.lockFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
merger.MergeFile(genRules, nil, f, merger.RepoAttrs)
|
merger.MergeFile(f, nil, genRules, merger.PreResolve, kinds)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|||||||
2
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go
generated
vendored
2
vendor/github.com/bazelbuild/bazel-gazelle/cmd/gazelle/version.go
generated
vendored
@@ -26,7 +26,7 @@ import (
|
|||||||
"github.com/bazelbuild/bazel-gazelle/internal/version"
|
"github.com/bazelbuild/bazel-gazelle/internal/version"
|
||||||
)
|
)
|
||||||
|
|
||||||
var minimumRulesGoVersion = version.Version{0, 11, 0}
|
var minimumRulesGoVersion = version.Version{0, 13, 0}
|
||||||
|
|
||||||
// checkRulesGoVersion checks whether a compatible version of rules_go is
|
// checkRulesGoVersion checks whether a compatible version of rules_go is
|
||||||
// being used in the workspace. A message will be logged if an incompatible
|
// being used in the workspace. A message will be logged if an incompatible
|
||||||
|
|||||||
7
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/BUILD
generated
vendored
7
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/BUILD
generated
vendored
@@ -5,13 +5,14 @@ go_library(
|
|||||||
srcs = [
|
srcs = [
|
||||||
"config.go",
|
"config.go",
|
||||||
"constants.go",
|
"constants.go",
|
||||||
"directives.go",
|
|
||||||
"platform.go",
|
|
||||||
],
|
],
|
||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/config",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/config",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/config",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/config",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = ["//vendor/github.com/bazelbuild/buildtools/build:go_default_library"],
|
deps = [
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/wspace:go_default_library",
|
||||||
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
183
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/config.go
generated
vendored
183
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/config.go
generated
vendored
@@ -16,15 +16,28 @@ limitations under the License.
|
|||||||
package config
|
package config
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/build"
|
"go/build"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/wspace"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Config holds information about how Gazelle should run. This is mostly
|
// Config holds information about how Gazelle should run. This is based on
|
||||||
// based on command-line arguments.
|
// command line arguments, directives, other hints in build files.
|
||||||
|
//
|
||||||
|
// A Config applies to a single directory. A Config is created for the
|
||||||
|
// repository root directory, then copied and modified for each subdirectory.
|
||||||
|
//
|
||||||
|
// Config itself contains only general information. Most configuration
|
||||||
|
// information is language-specific and is stored in Exts. This information
|
||||||
|
// is modified by extensions that implement Configurer.
|
||||||
type Config struct {
|
type Config struct {
|
||||||
// Dirs is a list of absolute paths to directories where Gazelle should run.
|
// Dirs is a list of absolute, canonical paths to directories where Gazelle
|
||||||
|
// should run.
|
||||||
Dirs []string
|
Dirs []string
|
||||||
|
|
||||||
// RepoRoot is the absolute, canonical path to the root directory of the
|
// RepoRoot is the absolute, canonical path to the root directory of the
|
||||||
@@ -34,45 +47,47 @@ type Config struct {
|
|||||||
// RepoName is the name of the repository.
|
// RepoName is the name of the repository.
|
||||||
RepoName string
|
RepoName string
|
||||||
|
|
||||||
|
// ReadBuildFilesDir is the absolute path to a directory where
|
||||||
|
// build files should be read from instead of RepoRoot.
|
||||||
|
ReadBuildFilesDir string
|
||||||
|
|
||||||
|
// WriteBuildFilesDir is the absolute path to a directory where
|
||||||
|
// build files should be written to instead of RepoRoot.
|
||||||
|
WriteBuildFilesDir string
|
||||||
|
|
||||||
// ValidBuildFileNames is a list of base names that are considered valid
|
// ValidBuildFileNames is a list of base names that are considered valid
|
||||||
// build files. Some repositories may have files named "BUILD" that are not
|
// build files. Some repositories may have files named "BUILD" that are not
|
||||||
// used by Bazel and should be ignored. Must contain at least one string.
|
// used by Bazel and should be ignored. Must contain at least one string.
|
||||||
ValidBuildFileNames []string
|
ValidBuildFileNames []string
|
||||||
|
|
||||||
// GenericTags is a set of build constraints that are true on all platforms.
|
|
||||||
// It should not be nil.
|
|
||||||
GenericTags BuildTags
|
|
||||||
|
|
||||||
// GoPrefix is the portion of the import path for the root of this repository.
|
|
||||||
// This is used to map imports to labels within the repository.
|
|
||||||
GoPrefix string
|
|
||||||
|
|
||||||
// GoPrefixRel is the slash-separated path to the directory where GoPrefix
|
|
||||||
// was set, relative to the repository root. "" for the repository root.
|
|
||||||
GoPrefixRel string
|
|
||||||
|
|
||||||
// GoImportMapPrefix is a string that should appear at the beginning of
|
|
||||||
// importmap attributes in go_library rules generated by Gazelle. These
|
|
||||||
// attributes are not generated when this is empty.
|
|
||||||
GoImportMapPrefix string
|
|
||||||
|
|
||||||
// GoImportMapPrefixRel is the slash-separated path to the directory where
|
|
||||||
// GoImportMapPrefix was set, relative to the repository root. "" for the
|
|
||||||
// repository root.
|
|
||||||
GoImportMapPrefixRel string
|
|
||||||
|
|
||||||
// ShouldFix determines whether Gazelle attempts to remove and replace
|
// ShouldFix determines whether Gazelle attempts to remove and replace
|
||||||
// usage of deprecated rules.
|
// usage of deprecated rules.
|
||||||
ShouldFix bool
|
ShouldFix bool
|
||||||
|
|
||||||
// DepMode determines how imports outside of GoPrefix are resolved.
|
// Exts is a set of configurable extensions. Generally, each language
|
||||||
DepMode DependencyMode
|
// has its own set of extensions, but other modules may provide their own
|
||||||
|
// extensions as well. Values in here may be populated by command line
|
||||||
|
// arguments, directives in build files, or other mechanisms.
|
||||||
|
Exts map[string]interface{}
|
||||||
|
}
|
||||||
|
|
||||||
// ProtoMode determines how rules are generated for protos.
|
func New() *Config {
|
||||||
ProtoMode ProtoMode
|
return &Config{
|
||||||
|
ValidBuildFileNames: DefaultValidBuildFileNames,
|
||||||
|
Exts: make(map[string]interface{}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// ProtoModeExplicit indicates whether the proto mode was set explicitly.
|
// Clone creates a copy of the configuration for use in a subdirectory.
|
||||||
ProtoModeExplicit bool
|
// Note that the Exts map is copied, but its contents are not.
|
||||||
|
// Configurer.Configure should do this, if needed.
|
||||||
|
func (c *Config) Clone() *Config {
|
||||||
|
cc := *c
|
||||||
|
cc.Exts = make(map[string]interface{})
|
||||||
|
for k, v := range c.Exts {
|
||||||
|
cc.Exts[k] = v
|
||||||
|
}
|
||||||
|
return &cc
|
||||||
}
|
}
|
||||||
|
|
||||||
var DefaultValidBuildFileNames = []string{"BUILD.bazel", "BUILD"}
|
var DefaultValidBuildFileNames = []string{"BUILD.bazel", "BUILD"}
|
||||||
@@ -90,33 +105,99 @@ func (c *Config) DefaultBuildFileName() string {
|
|||||||
return c.ValidBuildFileNames[0]
|
return c.ValidBuildFileNames[0]
|
||||||
}
|
}
|
||||||
|
|
||||||
// BuildTags is a set of build constraints.
|
// Configurer is the interface for language or library-specific configuration
|
||||||
type BuildTags map[string]bool
|
// extensions. Most (ideally all) modifications to Config should happen
|
||||||
|
// via this interface.
|
||||||
|
type Configurer interface {
|
||||||
|
// RegisterFlags registers command-line flags used by the extension. This
|
||||||
|
// method is called once with the root configuration when Gazelle
|
||||||
|
// starts. RegisterFlags may set an initial values in Config.Exts. When flags
|
||||||
|
// are set, they should modify these values.
|
||||||
|
RegisterFlags(fs *flag.FlagSet, cmd string, c *Config)
|
||||||
|
|
||||||
// SetBuildTags sets GenericTags by parsing as a comma separated list. An
|
// CheckFlags validates the configuration after command line flags are parsed.
|
||||||
// error will be returned for tags that wouldn't be recognized by "go build".
|
// This is called once with the root configuration when Gazelle starts.
|
||||||
// PreprocessTags should be called after this.
|
// CheckFlags may set default values in flags or make implied changes.
|
||||||
func (c *Config) SetBuildTags(tags string) error {
|
CheckFlags(fs *flag.FlagSet, c *Config) error
|
||||||
c.GenericTags = make(BuildTags)
|
|
||||||
if tags == "" {
|
// KnownDirectives returns a list of directive keys that this Configurer can
|
||||||
return nil
|
// interpret. Gazelle prints errors for directives that are not recoginized by
|
||||||
}
|
// any Configurer.
|
||||||
for _, t := range strings.Split(tags, ",") {
|
KnownDirectives() []string
|
||||||
if strings.HasPrefix(t, "!") {
|
|
||||||
return fmt.Errorf("build tags can't be negated: %s", t)
|
// Configure modifies the configuration using directives and other information
|
||||||
|
// extracted from a build file. Configure is called in each directory.
|
||||||
|
//
|
||||||
|
// c is the configuration for the current directory. It starts out as a copy
|
||||||
|
// of the configuration for the parent directory.
|
||||||
|
//
|
||||||
|
// rel is the slash-separated relative path from the repository root to
|
||||||
|
// the current directory. It is "" for the root directory itself.
|
||||||
|
//
|
||||||
|
// f is the build file for the current directory or nil if there is no
|
||||||
|
// existing build file.
|
||||||
|
Configure(c *Config, rel string, f *rule.File)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CommonConfigurer handles language-agnostic command-line flags and directives,
|
||||||
|
// i.e., those that apply to Config itself and not to Config.Exts.
|
||||||
|
type CommonConfigurer struct {
|
||||||
|
repoRoot, buildFileNames, readBuildFilesDir, writeBuildFilesDir string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cc *CommonConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *Config) {
|
||||||
|
fs.StringVar(&cc.repoRoot, "repo_root", "", "path to a directory which corresponds to go_prefix, otherwise gazelle searches for it.")
|
||||||
|
fs.StringVar(&cc.buildFileNames, "build_file_name", strings.Join(DefaultValidBuildFileNames, ","), "comma-separated list of valid build file names.\nThe first element of the list is the name of output build files to generate.")
|
||||||
|
fs.StringVar(&cc.readBuildFilesDir, "experimental_read_build_files_dir", "", "path to a directory where build files should be read from (instead of -repo_root)")
|
||||||
|
fs.StringVar(&cc.writeBuildFilesDir, "experimental_write_build_files_dir", "", "path to a directory where build files should be written to (instead of -repo_root)")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cc *CommonConfigurer) CheckFlags(fs *flag.FlagSet, c *Config) error {
|
||||||
|
var err error
|
||||||
|
if cc.repoRoot == "" {
|
||||||
|
cc.repoRoot, err = wspace.Find(".")
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("-repo_root not specified, and WORKSPACE cannot be found: %v", err)
|
||||||
}
|
}
|
||||||
c.GenericTags[t] = true
|
|
||||||
}
|
}
|
||||||
|
c.RepoRoot, err = filepath.Abs(cc.repoRoot)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to find absolute path of repo root: %v", cc.repoRoot, err)
|
||||||
|
}
|
||||||
|
c.RepoRoot, err = filepath.EvalSymlinks(c.RepoRoot)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to resolve symlinks: %v", cc.repoRoot, err)
|
||||||
|
}
|
||||||
|
c.ValidBuildFileNames = strings.Split(cc.buildFileNames, ",")
|
||||||
|
if cc.readBuildFilesDir != "" {
|
||||||
|
c.ReadBuildFilesDir, err = filepath.Abs(cc.readBuildFilesDir)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to find absolute path of -read_build_files_dir: %v", cc.readBuildFilesDir, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if cc.writeBuildFilesDir != "" {
|
||||||
|
c.WriteBuildFilesDir, err = filepath.Abs(cc.writeBuildFilesDir)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: failed to find absolute path of -write_build_files_dir: %v", cc.writeBuildFilesDir, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// PreprocessTags adds some tags which are on by default before they are
|
func (cc *CommonConfigurer) KnownDirectives() []string {
|
||||||
// used to match files.
|
return []string{"build_file_name"}
|
||||||
func (c *Config) PreprocessTags() {
|
}
|
||||||
if c.GenericTags == nil {
|
|
||||||
c.GenericTags = make(BuildTags)
|
func (cc *CommonConfigurer) Configure(c *Config, rel string, f *rule.File) {
|
||||||
|
if f == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, d := range f.Directives {
|
||||||
|
if d.Key == "build_file_name" {
|
||||||
|
c.ValidBuildFileNames = strings.Split(d.Value, ",")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
c.GenericTags["gc"] = true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// CheckPrefix checks that a string may be used as a prefix. We forbid local
|
// CheckPrefix checks that a string may be used as a prefix. We forbid local
|
||||||
|
|||||||
12
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/constants.go
generated
vendored
12
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/constants.go
generated
vendored
@@ -39,18 +39,6 @@ const (
|
|||||||
// "compilers" attribute of go_proto_library rules.
|
// "compilers" attribute of go_proto_library rules.
|
||||||
GrpcCompilerLabel = "@io_bazel_rules_go//proto:go_grpc"
|
GrpcCompilerLabel = "@io_bazel_rules_go//proto:go_grpc"
|
||||||
|
|
||||||
// WellKnownTypesProtoRepo is the repository containing proto_library rules
|
|
||||||
// for the Well Known Types.
|
|
||||||
WellKnownTypesProtoRepo = "com_google_protobuf"
|
|
||||||
// WellKnownTypeProtoPrefix is the proto import path prefix for the
|
|
||||||
// Well Known Types.
|
|
||||||
WellKnownTypesProtoPrefix = "google/protobuf"
|
|
||||||
// WellKnownTypesGoPrefix is the import path for the Go repository containing
|
|
||||||
// pre-generated code for the Well Known Types.
|
|
||||||
WellKnownTypesGoPrefix = "github.com/golang/protobuf"
|
|
||||||
// WellKnownTypesPkg is the package name for the predefined WKTs in rules_go.
|
|
||||||
WellKnownTypesPkg = "proto/wkt"
|
|
||||||
|
|
||||||
// GazelleImportsKey is an internal attribute that lists imported packages
|
// GazelleImportsKey is an internal attribute that lists imported packages
|
||||||
// on generated rules. It is replaced with "deps" during import resolution.
|
// on generated rules. It is replaced with "deps" during import resolution.
|
||||||
GazelleImportsKey = "_gazelle_imports"
|
GazelleImportsKey = "_gazelle_imports"
|
||||||
|
|||||||
213
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/directives.go
generated
vendored
213
vendor/github.com/bazelbuild/bazel-gazelle/internal/config/directives.go
generated
vendored
@@ -1,213 +0,0 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"path"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Directive is a key-value pair extracted from a top-level comment in
|
|
||||||
// a build file. Directives have the following format:
|
|
||||||
//
|
|
||||||
// # gazelle:key value
|
|
||||||
//
|
|
||||||
// Keys may not contain spaces. Values may be empty and may contain spaces,
|
|
||||||
// but surrounding space is trimmed.
|
|
||||||
type Directive struct {
|
|
||||||
Key, Value string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Top-level directives apply to the whole package or build file. They must
|
|
||||||
// appear before the first statement.
|
|
||||||
var knownTopLevelDirectives = map[string]bool{
|
|
||||||
"build_file_name": true,
|
|
||||||
"build_tags": true,
|
|
||||||
"exclude": true,
|
|
||||||
"ignore": true,
|
|
||||||
"importmap_prefix": true,
|
|
||||||
"repo": true,
|
|
||||||
"prefix": true,
|
|
||||||
"proto": true,
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(jayconrod): annotation directives will apply to an individual rule.
|
|
||||||
// They must appear in the block of comments above that rule.
|
|
||||||
|
|
||||||
// ParseDirectives scans f for Gazelle directives. The full list of directives
|
|
||||||
// is returned. Errors are reported for unrecognized directives and directives
|
|
||||||
// out of place (after the first statement).
|
|
||||||
func ParseDirectives(f *bf.File) []Directive {
|
|
||||||
var directives []Directive
|
|
||||||
parseComment := func(com bf.Comment) {
|
|
||||||
match := directiveRe.FindStringSubmatch(com.Token)
|
|
||||||
if match == nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
key, value := match[1], match[2]
|
|
||||||
if _, ok := knownTopLevelDirectives[key]; !ok {
|
|
||||||
log.Printf("%s:%d: unknown directive: %s", f.Path, com.Start.Line, com.Token)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
directives = append(directives, Directive{key, value})
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, s := range f.Stmt {
|
|
||||||
coms := s.Comment()
|
|
||||||
for _, com := range coms.Before {
|
|
||||||
parseComment(com)
|
|
||||||
}
|
|
||||||
for _, com := range coms.After {
|
|
||||||
parseComment(com)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return directives
|
|
||||||
}
|
|
||||||
|
|
||||||
var directiveRe = regexp.MustCompile(`^#\s*gazelle:(\w+)\s*(.*?)\s*$`)
|
|
||||||
|
|
||||||
// ApplyDirectives applies directives that modify the configuration to a copy of
|
|
||||||
// c, which is returned. If there are no configuration directives, c is returned
|
|
||||||
// unmodified.
|
|
||||||
func ApplyDirectives(c *Config, directives []Directive, rel string) *Config {
|
|
||||||
modified := *c
|
|
||||||
didModify := false
|
|
||||||
for _, d := range directives {
|
|
||||||
switch d.Key {
|
|
||||||
case "build_file_name":
|
|
||||||
modified.ValidBuildFileNames = strings.Split(d.Value, ",")
|
|
||||||
didModify = true
|
|
||||||
case "build_tags":
|
|
||||||
if err := modified.SetBuildTags(d.Value); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
modified.GenericTags = c.GenericTags
|
|
||||||
} else {
|
|
||||||
modified.PreprocessTags()
|
|
||||||
didModify = true
|
|
||||||
}
|
|
||||||
case "importmap_prefix":
|
|
||||||
if err := CheckPrefix(d.Value); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
modified.GoImportMapPrefix = d.Value
|
|
||||||
modified.GoImportMapPrefixRel = rel
|
|
||||||
didModify = true
|
|
||||||
case "prefix":
|
|
||||||
if err := CheckPrefix(d.Value); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
modified.GoPrefix = d.Value
|
|
||||||
modified.GoPrefixRel = rel
|
|
||||||
didModify = true
|
|
||||||
case "proto":
|
|
||||||
protoMode, err := ProtoModeFromString(d.Value)
|
|
||||||
if err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
modified.ProtoMode = protoMode
|
|
||||||
modified.ProtoModeExplicit = true
|
|
||||||
didModify = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !didModify {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
return &modified
|
|
||||||
}
|
|
||||||
|
|
||||||
// InferProtoMode sets Config.ProtoMode, based on the contents of f. If the
|
|
||||||
// proto mode is already set to something other than the default, or if the mode
|
|
||||||
// is set explicitly in directives, this function does not change it. If the
|
|
||||||
// legacy go_proto_library.bzl is loaded, or if this is the Well Known Types
|
|
||||||
// repository, legacy mode is used. If go_proto_library is loaded from another
|
|
||||||
// file, proto rule generation is disabled.
|
|
||||||
func InferProtoMode(c *Config, rel string, f *bf.File, directives []Directive) *Config {
|
|
||||||
if c.ProtoMode != DefaultProtoMode || c.ProtoModeExplicit {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
for _, d := range directives {
|
|
||||||
if d.Key == "proto" {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if c.GoPrefix == WellKnownTypesGoPrefix {
|
|
||||||
// Use legacy mode in this repo. We don't need proto_library or
|
|
||||||
// go_proto_library, since we get that from @com_google_protobuf.
|
|
||||||
// Legacy rules still refer to .proto files in here, which need are
|
|
||||||
// exposed by filegroup. go_library rules from .pb.go files will be
|
|
||||||
// generated, which are depended upon by the new rules.
|
|
||||||
modified := *c
|
|
||||||
modified.ProtoMode = LegacyProtoMode
|
|
||||||
return &modified
|
|
||||||
}
|
|
||||||
if path.Base(rel) == "vendor" {
|
|
||||||
modified := *c
|
|
||||||
modified.ProtoMode = DisableProtoMode
|
|
||||||
return &modified
|
|
||||||
}
|
|
||||||
if f == nil {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
mode := DefaultProtoMode
|
|
||||||
for _, stmt := range f.Stmt {
|
|
||||||
c, ok := stmt.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
x, ok := c.X.(*bf.LiteralExpr)
|
|
||||||
if !ok || x.Token != "load" || len(c.List) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
name, ok := c.List[0].(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if name.Value == "@io_bazel_rules_go//proto:def.bzl" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if name.Value == "@io_bazel_rules_go//proto:go_proto_library.bzl" {
|
|
||||||
mode = LegacyProtoMode
|
|
||||||
break
|
|
||||||
}
|
|
||||||
for _, arg := range c.List[1:] {
|
|
||||||
if sym, ok := arg.(*bf.StringExpr); ok && sym.Value == "go_proto_library" {
|
|
||||||
mode = DisableProtoMode
|
|
||||||
break
|
|
||||||
}
|
|
||||||
kwarg, ok := arg.(*bf.BinaryExpr)
|
|
||||||
if !ok || kwarg.Op != "=" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if key, ok := kwarg.X.(*bf.LiteralExpr); ok && key.Token == "go_proto_library" {
|
|
||||||
mode = DisableProtoMode
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if mode == DefaultProtoMode || c.ProtoMode == mode || c.ShouldFix && mode == LegacyProtoMode {
|
|
||||||
return c
|
|
||||||
}
|
|
||||||
modified := *c
|
|
||||||
modified.ProtoMode = mode
|
|
||||||
return &modified
|
|
||||||
}
|
|
||||||
23
vendor/github.com/bazelbuild/bazel-gazelle/internal/flag/BUILD
generated
vendored
Normal file
23
vendor/github.com/bazelbuild/bazel-gazelle/internal/flag/BUILD
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = ["flag.go"],
|
||||||
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/flag",
|
||||||
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/flag",
|
||||||
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "package-srcs",
|
||||||
|
srcs = glob(["**"]),
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:private"],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "all-srcs",
|
||||||
|
srcs = [":package-srcs"],
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
60
vendor/github.com/bazelbuild/bazel-gazelle/internal/flag/flag.go
generated
vendored
Normal file
60
vendor/github.com/bazelbuild/bazel-gazelle/internal/flag/flag.go
generated
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
// Copyright 2017 The Bazel Authors. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package flag
|
||||||
|
|
||||||
|
import (
|
||||||
|
stdflag "flag"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MultiFlag allows repeated string flags to be collected into a slice
|
||||||
|
type MultiFlag struct {
|
||||||
|
Values *[]string
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ stdflag.Value = (*MultiFlag)(nil)
|
||||||
|
|
||||||
|
func (m *MultiFlag) Set(v string) error {
|
||||||
|
*m.Values = append(*m.Values, v)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *MultiFlag) String() string {
|
||||||
|
if m == nil || m.Values == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return strings.Join(*m.Values, ",")
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExplicitFlag is a string flag that tracks whether it was set.
|
||||||
|
type ExplicitFlag struct {
|
||||||
|
IsSet *bool
|
||||||
|
Value *string
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ stdflag.Value = (*ExplicitFlag)(nil)
|
||||||
|
|
||||||
|
func (f *ExplicitFlag) Set(value string) error {
|
||||||
|
*f.IsSet = true
|
||||||
|
*f.Value = value
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *ExplicitFlag) String() string {
|
||||||
|
if f == nil || f.Value == nil {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return *f.Value
|
||||||
|
}
|
||||||
217
vendor/github.com/bazelbuild/bazel-gazelle/internal/generator/construct.go
generated
vendored
217
vendor/github.com/bazelbuild/bazel-gazelle/internal/generator/construct.go
generated
vendored
@@ -1,217 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package generator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"reflect"
|
|
||||||
"sort"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/packages"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
bt "github.com/bazelbuild/buildtools/tables"
|
|
||||||
)
|
|
||||||
|
|
||||||
// KeyValue represents a key-value pair. This gets converted into a
|
|
||||||
// rule attribute, i.e., a Skylark keyword argument.
|
|
||||||
type KeyValue struct {
|
|
||||||
Key string
|
|
||||||
Value interface{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// GlobValue represents a Bazel glob expression.
|
|
||||||
type GlobValue struct {
|
|
||||||
Patterns []string
|
|
||||||
Excludes []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// EmptyRule generates an empty rule with the given kind and name.
|
|
||||||
func EmptyRule(kind, name string) *bf.CallExpr {
|
|
||||||
return NewRule(kind, []KeyValue{{"name", name}})
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewRule generates a rule of the given kind with the given attributes.
|
|
||||||
func NewRule(kind string, kwargs []KeyValue) *bf.CallExpr {
|
|
||||||
sort.Sort(byAttrName(kwargs))
|
|
||||||
|
|
||||||
var list []bf.Expr
|
|
||||||
for _, arg := range kwargs {
|
|
||||||
expr := newValue(arg.Value)
|
|
||||||
list = append(list, &bf.BinaryExpr{
|
|
||||||
X: &bf.LiteralExpr{Token: arg.Key},
|
|
||||||
Op: "=",
|
|
||||||
Y: expr,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
return &bf.CallExpr{
|
|
||||||
X: &bf.LiteralExpr{Token: kind},
|
|
||||||
List: list,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// newValue converts a Go value into the corresponding expression in Bazel BUILD file.
|
|
||||||
func newValue(val interface{}) bf.Expr {
|
|
||||||
rv := reflect.ValueOf(val)
|
|
||||||
switch rv.Kind() {
|
|
||||||
case reflect.Bool:
|
|
||||||
tok := "False"
|
|
||||||
if rv.Bool() {
|
|
||||||
tok = "True"
|
|
||||||
}
|
|
||||||
return &bf.LiteralExpr{Token: tok}
|
|
||||||
|
|
||||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
|
||||||
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
|
||||||
return &bf.LiteralExpr{Token: fmt.Sprintf("%d", val)}
|
|
||||||
|
|
||||||
case reflect.Float32, reflect.Float64:
|
|
||||||
return &bf.LiteralExpr{Token: fmt.Sprintf("%f", val)}
|
|
||||||
|
|
||||||
case reflect.String:
|
|
||||||
return &bf.StringExpr{Value: val.(string)}
|
|
||||||
|
|
||||||
case reflect.Slice, reflect.Array:
|
|
||||||
var list []bf.Expr
|
|
||||||
for i := 0; i < rv.Len(); i++ {
|
|
||||||
elem := newValue(rv.Index(i).Interface())
|
|
||||||
list = append(list, elem)
|
|
||||||
}
|
|
||||||
return &bf.ListExpr{List: list}
|
|
||||||
|
|
||||||
case reflect.Map:
|
|
||||||
rkeys := rv.MapKeys()
|
|
||||||
sort.Sort(byString(rkeys))
|
|
||||||
args := make([]bf.Expr, len(rkeys))
|
|
||||||
for i, rk := range rkeys {
|
|
||||||
label := fmt.Sprintf("@%s//go/platform:%s", config.RulesGoRepoName, mapKeyString(rk))
|
|
||||||
k := &bf.StringExpr{Value: label}
|
|
||||||
v := newValue(rv.MapIndex(rk).Interface())
|
|
||||||
if l, ok := v.(*bf.ListExpr); ok {
|
|
||||||
l.ForceMultiLine = true
|
|
||||||
}
|
|
||||||
args[i] = &bf.KeyValueExpr{Key: k, Value: v}
|
|
||||||
}
|
|
||||||
args = append(args, &bf.KeyValueExpr{
|
|
||||||
Key: &bf.StringExpr{Value: "//conditions:default"},
|
|
||||||
Value: &bf.ListExpr{},
|
|
||||||
})
|
|
||||||
sel := &bf.CallExpr{
|
|
||||||
X: &bf.LiteralExpr{Token: "select"},
|
|
||||||
List: []bf.Expr{&bf.DictExpr{List: args, ForceMultiLine: true}},
|
|
||||||
}
|
|
||||||
return sel
|
|
||||||
|
|
||||||
case reflect.Struct:
|
|
||||||
switch val := val.(type) {
|
|
||||||
case GlobValue:
|
|
||||||
patternsValue := newValue(val.Patterns)
|
|
||||||
globArgs := []bf.Expr{patternsValue}
|
|
||||||
if len(val.Excludes) > 0 {
|
|
||||||
excludesValue := newValue(val.Excludes)
|
|
||||||
globArgs = append(globArgs, &bf.KeyValueExpr{
|
|
||||||
Key: &bf.StringExpr{Value: "excludes"},
|
|
||||||
Value: excludesValue,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
return &bf.CallExpr{
|
|
||||||
X: &bf.LiteralExpr{Token: "glob"},
|
|
||||||
List: globArgs,
|
|
||||||
}
|
|
||||||
|
|
||||||
case packages.PlatformStrings:
|
|
||||||
var pieces []bf.Expr
|
|
||||||
if len(val.Generic) > 0 {
|
|
||||||
pieces = append(pieces, newValue(val.Generic))
|
|
||||||
}
|
|
||||||
if len(val.OS) > 0 {
|
|
||||||
pieces = append(pieces, newValue(val.OS))
|
|
||||||
}
|
|
||||||
if len(val.Arch) > 0 {
|
|
||||||
pieces = append(pieces, newValue(val.Arch))
|
|
||||||
}
|
|
||||||
if len(val.Platform) > 0 {
|
|
||||||
pieces = append(pieces, newValue(val.Platform))
|
|
||||||
}
|
|
||||||
if len(pieces) == 0 {
|
|
||||||
return &bf.ListExpr{}
|
|
||||||
} else if len(pieces) == 1 {
|
|
||||||
return pieces[0]
|
|
||||||
} else {
|
|
||||||
e := pieces[0]
|
|
||||||
if list, ok := e.(*bf.ListExpr); ok {
|
|
||||||
list.ForceMultiLine = true
|
|
||||||
}
|
|
||||||
for _, piece := range pieces[1:] {
|
|
||||||
e = &bf.BinaryExpr{X: e, Y: piece, Op: "+"}
|
|
||||||
}
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Panicf("type not supported: %T", val)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func mapKeyString(k reflect.Value) string {
|
|
||||||
switch s := k.Interface().(type) {
|
|
||||||
case string:
|
|
||||||
return s
|
|
||||||
case config.Platform:
|
|
||||||
return s.String()
|
|
||||||
default:
|
|
||||||
log.Panicf("unexpected map key: %v", k)
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type byAttrName []KeyValue
|
|
||||||
|
|
||||||
var _ sort.Interface = byAttrName{}
|
|
||||||
|
|
||||||
func (s byAttrName) Len() int {
|
|
||||||
return len(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s byAttrName) Less(i, j int) bool {
|
|
||||||
if cmp := bt.NamePriority[s[i].Key] - bt.NamePriority[s[j].Key]; cmp != 0 {
|
|
||||||
return cmp < 0
|
|
||||||
}
|
|
||||||
return s[i].Key < s[j].Key
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s byAttrName) Swap(i, j int) {
|
|
||||||
s[i], s[j] = s[j], s[i]
|
|
||||||
}
|
|
||||||
|
|
||||||
type byString []reflect.Value
|
|
||||||
|
|
||||||
var _ sort.Interface = byString{}
|
|
||||||
|
|
||||||
func (s byString) Len() int {
|
|
||||||
return len(s)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s byString) Less(i, j int) bool {
|
|
||||||
return mapKeyString(s[i]) < mapKeyString(s[j])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (s byString) Swap(i, j int) {
|
|
||||||
s[i], s[j] = s[j], s[i]
|
|
||||||
}
|
|
||||||
334
vendor/github.com/bazelbuild/bazel-gazelle/internal/generator/generator.go
generated
vendored
334
vendor/github.com/bazelbuild/bazel-gazelle/internal/generator/generator.go
generated
vendored
@@ -1,334 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package generator
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/packages"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
|
||||||
|
|
||||||
// NewGenerator returns a new instance of Generator.
|
|
||||||
// "oldFile" is the existing build file. May be nil.
|
|
||||||
func NewGenerator(c *config.Config, l *label.Labeler, oldFile *bf.File) *Generator {
|
|
||||||
shouldSetVisibility := oldFile == nil || !hasDefaultVisibility(oldFile)
|
|
||||||
return &Generator{c: c, l: l, shouldSetVisibility: shouldSetVisibility}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generator generates Bazel build rules for Go build targets.
|
|
||||||
type Generator struct {
|
|
||||||
c *config.Config
|
|
||||||
l *label.Labeler
|
|
||||||
shouldSetVisibility bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// GenerateRules generates a list of rules for targets in "pkg". It also returns
|
|
||||||
// a list of empty rules that may be deleted from an existing file.
|
|
||||||
func (g *Generator) GenerateRules(pkg *packages.Package) (rules []bf.Expr, empty []bf.Expr, err error) {
|
|
||||||
var rs []bf.Expr
|
|
||||||
|
|
||||||
protoLibName, protoRules := g.generateProto(pkg)
|
|
||||||
rs = append(rs, protoRules...)
|
|
||||||
|
|
||||||
libName, libRule := g.generateLib(pkg, protoLibName)
|
|
||||||
rs = append(rs, libRule)
|
|
||||||
|
|
||||||
rs = append(rs,
|
|
||||||
g.generateBin(pkg, libName),
|
|
||||||
g.generateTest(pkg, libName))
|
|
||||||
|
|
||||||
for _, r := range rs {
|
|
||||||
if isEmpty(r) {
|
|
||||||
empty = append(empty, r)
|
|
||||||
} else {
|
|
||||||
rules = append(rules, r)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return rules, empty, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) generateProto(pkg *packages.Package) (string, []bf.Expr) {
|
|
||||||
if g.c.ProtoMode == config.DisableProtoMode {
|
|
||||||
// Don't create or delete proto rules in this mode. Any existing rules
|
|
||||||
// are likely hand-written.
|
|
||||||
return "", nil
|
|
||||||
}
|
|
||||||
|
|
||||||
filegroupName := config.DefaultProtosName
|
|
||||||
protoName := g.l.ProtoLabel(pkg.Rel, pkg.Name).Name
|
|
||||||
goProtoName := g.l.GoProtoLabel(pkg.Rel, pkg.Name).Name
|
|
||||||
|
|
||||||
if g.c.ProtoMode == config.LegacyProtoMode {
|
|
||||||
if !pkg.Proto.HasProto() {
|
|
||||||
return "", []bf.Expr{EmptyRule("filegroup", filegroupName)}
|
|
||||||
}
|
|
||||||
attrs := []KeyValue{
|
|
||||||
{Key: "name", Value: filegroupName},
|
|
||||||
{Key: "srcs", Value: pkg.Proto.Sources},
|
|
||||||
}
|
|
||||||
if g.shouldSetVisibility {
|
|
||||||
attrs = append(attrs, KeyValue{"visibility", []string{checkInternalVisibility(pkg.Rel, "//visibility:public")}})
|
|
||||||
}
|
|
||||||
return "", []bf.Expr{NewRule("filegroup", attrs)}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !pkg.Proto.HasProto() {
|
|
||||||
return "", []bf.Expr{
|
|
||||||
EmptyRule("filegroup", filegroupName),
|
|
||||||
EmptyRule("proto_library", protoName),
|
|
||||||
EmptyRule("go_proto_library", goProtoName),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var rules []bf.Expr
|
|
||||||
visibility := []string{checkInternalVisibility(pkg.Rel, "//visibility:public")}
|
|
||||||
protoAttrs := []KeyValue{
|
|
||||||
{"name", protoName},
|
|
||||||
{"srcs", pkg.Proto.Sources},
|
|
||||||
}
|
|
||||||
if g.shouldSetVisibility {
|
|
||||||
protoAttrs = append(protoAttrs, KeyValue{"visibility", visibility})
|
|
||||||
}
|
|
||||||
imports := pkg.Proto.Imports
|
|
||||||
if !imports.IsEmpty() {
|
|
||||||
protoAttrs = append(protoAttrs, KeyValue{config.GazelleImportsKey, imports})
|
|
||||||
}
|
|
||||||
rules = append(rules, NewRule("proto_library", protoAttrs))
|
|
||||||
|
|
||||||
goProtoAttrs := []KeyValue{
|
|
||||||
{"name", goProtoName},
|
|
||||||
{"proto", ":" + protoName},
|
|
||||||
}
|
|
||||||
goProtoAttrs = append(goProtoAttrs, g.importAttrs(pkg)...)
|
|
||||||
if pkg.Proto.HasServices {
|
|
||||||
goProtoAttrs = append(goProtoAttrs, KeyValue{"compilers", []string{"@io_bazel_rules_go//proto:go_grpc"}})
|
|
||||||
}
|
|
||||||
if g.shouldSetVisibility {
|
|
||||||
goProtoAttrs = append(goProtoAttrs, KeyValue{"visibility", visibility})
|
|
||||||
}
|
|
||||||
if !imports.IsEmpty() {
|
|
||||||
goProtoAttrs = append(goProtoAttrs, KeyValue{config.GazelleImportsKey, imports})
|
|
||||||
}
|
|
||||||
rules = append(rules, NewRule("go_proto_library", goProtoAttrs))
|
|
||||||
|
|
||||||
return goProtoName, rules
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) generateBin(pkg *packages.Package, library string) bf.Expr {
|
|
||||||
name := g.l.BinaryLabel(pkg.Rel).Name
|
|
||||||
if !pkg.IsCommand() || pkg.Binary.Sources.IsEmpty() && library == "" {
|
|
||||||
return EmptyRule("go_binary", name)
|
|
||||||
}
|
|
||||||
visibility := checkInternalVisibility(pkg.Rel, "//visibility:public")
|
|
||||||
attrs := g.commonAttrs(pkg.Rel, name, visibility, pkg.Binary)
|
|
||||||
if library != "" {
|
|
||||||
attrs = append(attrs, KeyValue{"embed", []string{":" + library}})
|
|
||||||
}
|
|
||||||
return NewRule("go_binary", attrs)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) generateLib(pkg *packages.Package, goProtoName string) (string, *bf.CallExpr) {
|
|
||||||
name := g.l.LibraryLabel(pkg.Rel).Name
|
|
||||||
if !pkg.Library.HasGo() && goProtoName == "" {
|
|
||||||
return "", EmptyRule("go_library", name)
|
|
||||||
}
|
|
||||||
var visibility string
|
|
||||||
if pkg.IsCommand() {
|
|
||||||
// Libraries made for a go_binary should not be exposed to the public.
|
|
||||||
visibility = "//visibility:private"
|
|
||||||
} else {
|
|
||||||
visibility = checkInternalVisibility(pkg.Rel, "//visibility:public")
|
|
||||||
}
|
|
||||||
|
|
||||||
attrs := g.commonAttrs(pkg.Rel, name, visibility, pkg.Library)
|
|
||||||
attrs = append(attrs, g.importAttrs(pkg)...)
|
|
||||||
if goProtoName != "" {
|
|
||||||
attrs = append(attrs, KeyValue{"embed", []string{":" + goProtoName}})
|
|
||||||
}
|
|
||||||
|
|
||||||
rule := NewRule("go_library", attrs)
|
|
||||||
return name, rule
|
|
||||||
}
|
|
||||||
|
|
||||||
// hasDefaultVisibility returns whether oldFile contains a "package" rule with
|
|
||||||
// a "default_visibility" attribute. Rules generated by Gazelle should not
|
|
||||||
// have their own visibility attributes if this is the case.
|
|
||||||
func hasDefaultVisibility(oldFile *bf.File) bool {
|
|
||||||
for _, s := range oldFile.Stmt {
|
|
||||||
c, ok := s.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
r := bf.Rule{Call: c}
|
|
||||||
if r.Kind() == "package" && r.Attr("default_visibility") != nil {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkInternalVisibility overrides the given visibility if the package is
|
|
||||||
// internal.
|
|
||||||
func checkInternalVisibility(rel, visibility string) string {
|
|
||||||
if i := strings.LastIndex(rel, "/internal/"); i >= 0 {
|
|
||||||
visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i])
|
|
||||||
} else if strings.HasPrefix(rel, "internal/") {
|
|
||||||
visibility = "//:__subpackages__"
|
|
||||||
}
|
|
||||||
return visibility
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) generateTest(pkg *packages.Package, library string) bf.Expr {
|
|
||||||
name := g.l.TestLabel(pkg.Rel).Name
|
|
||||||
if !pkg.Test.HasGo() {
|
|
||||||
return EmptyRule("go_test", name)
|
|
||||||
}
|
|
||||||
attrs := g.commonAttrs(pkg.Rel, name, "", pkg.Test)
|
|
||||||
if library != "" {
|
|
||||||
attrs = append(attrs, KeyValue{"embed", []string{":" + library}})
|
|
||||||
}
|
|
||||||
if pkg.HasTestdata {
|
|
||||||
glob := GlobValue{Patterns: []string{"testdata/**"}}
|
|
||||||
attrs = append(attrs, KeyValue{"data", glob})
|
|
||||||
}
|
|
||||||
return NewRule("go_test", attrs)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) commonAttrs(pkgRel, name, visibility string, target packages.GoTarget) []KeyValue {
|
|
||||||
attrs := []KeyValue{{"name", name}}
|
|
||||||
if !target.Sources.IsEmpty() {
|
|
||||||
attrs = append(attrs, KeyValue{"srcs", target.Sources.Flat()})
|
|
||||||
}
|
|
||||||
if target.Cgo {
|
|
||||||
attrs = append(attrs, KeyValue{"cgo", true})
|
|
||||||
}
|
|
||||||
if !target.CLinkOpts.IsEmpty() {
|
|
||||||
attrs = append(attrs, KeyValue{"clinkopts", g.options(target.CLinkOpts, pkgRel)})
|
|
||||||
}
|
|
||||||
if !target.COpts.IsEmpty() {
|
|
||||||
attrs = append(attrs, KeyValue{"copts", g.options(target.COpts, pkgRel)})
|
|
||||||
}
|
|
||||||
if g.shouldSetVisibility && visibility != "" {
|
|
||||||
attrs = append(attrs, KeyValue{"visibility", []string{visibility}})
|
|
||||||
}
|
|
||||||
imports := target.Imports
|
|
||||||
if !imports.IsEmpty() {
|
|
||||||
attrs = append(attrs, KeyValue{config.GazelleImportsKey, imports})
|
|
||||||
}
|
|
||||||
return attrs
|
|
||||||
}
|
|
||||||
|
|
||||||
func (g *Generator) importAttrs(pkg *packages.Package) []KeyValue {
|
|
||||||
attrs := []KeyValue{{"importpath", pkg.ImportPath}}
|
|
||||||
if g.c.GoImportMapPrefix != "" {
|
|
||||||
fromPrefixRel := pathtools.TrimPrefix(pkg.Rel, g.c.GoImportMapPrefixRel)
|
|
||||||
importMap := path.Join(g.c.GoImportMapPrefix, fromPrefixRel)
|
|
||||||
if importMap != pkg.ImportPath {
|
|
||||||
attrs = append(attrs, KeyValue{"importmap", importMap})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return attrs
|
|
||||||
}
|
|
||||||
|
|
||||||
var (
|
|
||||||
// shortOptPrefixes are strings that come at the beginning of an option
|
|
||||||
// argument that includes a path, e.g., -Ifoo/bar.
|
|
||||||
shortOptPrefixes = []string{"-I", "-L", "-F"}
|
|
||||||
|
|
||||||
// longOptPrefixes are separate arguments that come before a path argument,
|
|
||||||
// e.g., -iquote foo/bar.
|
|
||||||
longOptPrefixes = []string{"-I", "-L", "-F", "-iquote", "-isystem"}
|
|
||||||
)
|
|
||||||
|
|
||||||
// options transforms package-relative paths in cgo options into repository-
|
|
||||||
// root-relative paths that Bazel can understand. For example, if a cgo file
|
|
||||||
// in //foo declares an include flag in its copts: "-Ibar", this method
|
|
||||||
// will transform that flag into "-Ifoo/bar".
|
|
||||||
func (g *Generator) options(opts packages.PlatformStrings, pkgRel string) packages.PlatformStrings {
|
|
||||||
fixPath := func(opt string) string {
|
|
||||||
if strings.HasPrefix(opt, "/") {
|
|
||||||
return opt
|
|
||||||
}
|
|
||||||
return path.Clean(path.Join(pkgRel, opt))
|
|
||||||
}
|
|
||||||
|
|
||||||
fixGroups := func(groups []string) ([]string, error) {
|
|
||||||
fixedGroups := make([]string, len(groups))
|
|
||||||
for i, group := range groups {
|
|
||||||
opts := strings.Split(group, packages.OptSeparator)
|
|
||||||
fixedOpts := make([]string, len(opts))
|
|
||||||
isPath := false
|
|
||||||
for j, opt := range opts {
|
|
||||||
if isPath {
|
|
||||||
opt = fixPath(opt)
|
|
||||||
isPath = false
|
|
||||||
goto next
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, short := range shortOptPrefixes {
|
|
||||||
if strings.HasPrefix(opt, short) && len(opt) > len(short) {
|
|
||||||
opt = short + fixPath(opt[len(short):])
|
|
||||||
goto next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, long := range longOptPrefixes {
|
|
||||||
if opt == long {
|
|
||||||
isPath = true
|
|
||||||
goto next
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
next:
|
|
||||||
fixedOpts[j] = escapeOption(opt)
|
|
||||||
}
|
|
||||||
fixedGroups[i] = strings.Join(fixedOpts, " ")
|
|
||||||
}
|
|
||||||
|
|
||||||
return fixedGroups, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
opts, errs := opts.MapSlice(fixGroups)
|
|
||||||
if errs != nil {
|
|
||||||
log.Panicf("unexpected error when transforming options with pkg %q: %v", pkgRel, errs)
|
|
||||||
}
|
|
||||||
return opts
|
|
||||||
}
|
|
||||||
|
|
||||||
func escapeOption(opt string) string {
|
|
||||||
return strings.NewReplacer(
|
|
||||||
`\`, `\\`,
|
|
||||||
`'`, `\'`,
|
|
||||||
`"`, `\"`,
|
|
||||||
` `, `\ `,
|
|
||||||
"\t", "\\\t",
|
|
||||||
"\n", "\\\n",
|
|
||||||
"\r", "\\\r",
|
|
||||||
).Replace(opt)
|
|
||||||
}
|
|
||||||
|
|
||||||
func isEmpty(r bf.Expr) bool {
|
|
||||||
c, ok := r.(*bf.CallExpr)
|
|
||||||
return ok && len(c.List) == 1 // name
|
|
||||||
}
|
|
||||||
10
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/BUILD
generated
vendored
10
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/BUILD
generated
vendored
@@ -2,17 +2,11 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
|||||||
|
|
||||||
go_library(
|
go_library(
|
||||||
name = "go_default_library",
|
name = "go_default_library",
|
||||||
srcs = [
|
srcs = ["label.go"],
|
||||||
"label.go",
|
|
||||||
"labeler.go",
|
|
||||||
],
|
|
||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/label",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/label",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/label",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/label",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = ["//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library"],
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
10
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/label.go
generated
vendored
10
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/label.go
generated
vendored
@@ -127,6 +127,16 @@ func (l Label) Abs(repo, pkg string) Label {
|
|||||||
return Label{Repo: repo, Pkg: pkg, Name: l.Name}
|
return Label{Repo: repo, Pkg: pkg, Name: l.Name}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (l Label) Rel(repo, pkg string) Label {
|
||||||
|
if l.Relative || l.Repo != repo {
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
if l.Pkg == pkg {
|
||||||
|
return Label{Name: l.Name, Relative: true}
|
||||||
|
}
|
||||||
|
return Label{Pkg: l.Pkg, Name: l.Name}
|
||||||
|
}
|
||||||
|
|
||||||
func (l Label) Equal(other Label) bool {
|
func (l Label) Equal(other Label) bool {
|
||||||
return l.Repo == other.Repo &&
|
return l.Repo == other.Repo &&
|
||||||
l.Pkg == other.Pkg &&
|
l.Pkg == other.Pkg &&
|
||||||
|
|||||||
52
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/labeler.go
generated
vendored
52
vendor/github.com/bazelbuild/bazel-gazelle/internal/label/labeler.go
generated
vendored
@@ -1,52 +0,0 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package label
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Labeler generates Bazel labels for rules, based on their locations
|
|
||||||
// within the repository.
|
|
||||||
type Labeler struct {
|
|
||||||
c *config.Config
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewLabeler(c *config.Config) *Labeler {
|
|
||||||
return &Labeler{c}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Labeler) LibraryLabel(rel string) Label {
|
|
||||||
return Label{Pkg: rel, Name: config.DefaultLibName}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Labeler) TestLabel(rel string) Label {
|
|
||||||
return Label{Pkg: rel, Name: config.DefaultTestName}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Labeler) BinaryLabel(rel string) Label {
|
|
||||||
name := pathtools.RelBaseName(rel, l.c.GoPrefix, l.c.RepoRoot)
|
|
||||||
return Label{Pkg: rel, Name: name}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Labeler) ProtoLabel(rel, name string) Label {
|
|
||||||
return Label{Pkg: rel, Name: name + "_proto"}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (l *Labeler) GoProtoLabel(rel, name string) Label {
|
|
||||||
return Label{Pkg: rel, Name: name + "_go_proto"}
|
|
||||||
}
|
|
||||||
32
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/BUILD
generated
vendored
Normal file
32
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/BUILD
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = ["lang.go"],
|
||||||
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/language",
|
||||||
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/language",
|
||||||
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "package-srcs",
|
||||||
|
srcs = glob(["**"]),
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:private"],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "all-srcs",
|
||||||
|
srcs = [
|
||||||
|
":package-srcs",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go:all-srcs",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto:all-srcs",
|
||||||
|
],
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
48
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/BUILD
generated
vendored
Normal file
48
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/BUILD
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"config.go",
|
||||||
|
"constants.go",
|
||||||
|
"fileinfo.go",
|
||||||
|
"fix.go",
|
||||||
|
"generate.go",
|
||||||
|
"kinds.go",
|
||||||
|
"known_go_imports.go",
|
||||||
|
"known_proto_imports.go",
|
||||||
|
"lang.go",
|
||||||
|
"package.go",
|
||||||
|
"resolve.go",
|
||||||
|
"std_package_list.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go",
|
||||||
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/language/go",
|
||||||
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/flag:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "package-srcs",
|
||||||
|
srcs = glob(["**"]),
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:private"],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "all-srcs",
|
||||||
|
srcs = [":package-srcs"],
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
276
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/config.go
generated
vendored
Normal file
276
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/config.go
generated
vendored
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
gzflag "github.com/bazelbuild/bazel-gazelle/internal/flag"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
// goConfig contains configuration values related to Go rules.
|
||||||
|
type goConfig struct {
|
||||||
|
// genericTags is a set of tags that Gazelle considers to be true. Set with
|
||||||
|
// -build_tags or # gazelle:build_tags. Some tags, like gc, are always on.
|
||||||
|
genericTags map[string]bool
|
||||||
|
|
||||||
|
// prefix is a prefix of an import path, used to generate importpath
|
||||||
|
// attributes. Set with -go_prefix or # gazelle:prefix.
|
||||||
|
prefix string
|
||||||
|
|
||||||
|
// prefixRel is the package name of the directory where the prefix was set
|
||||||
|
// ("" for the root directory).
|
||||||
|
prefixRel string
|
||||||
|
|
||||||
|
// prefixSet indicates whether the prefix was set explicitly. It is an error
|
||||||
|
// to infer an importpath for a rule without setting the prefix.
|
||||||
|
prefixSet bool
|
||||||
|
|
||||||
|
// importMapPrefix is a prefix of a package path, used to generate importmap
|
||||||
|
// attributes. Set with # gazelle:importmap_prefix.
|
||||||
|
importMapPrefix string
|
||||||
|
|
||||||
|
// importMapPrefixRel is the package name of the directory where importMapPrefix
|
||||||
|
// was set ("" for the root directory).
|
||||||
|
importMapPrefixRel string
|
||||||
|
|
||||||
|
// depMode determines how imports that are not standard, indexed, or local
|
||||||
|
// (under the current prefix) should be resolved.
|
||||||
|
depMode dependencyMode
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGoConfig() *goConfig {
|
||||||
|
gc := &goConfig{}
|
||||||
|
gc.preprocessTags()
|
||||||
|
return gc
|
||||||
|
}
|
||||||
|
|
||||||
|
func getGoConfig(c *config.Config) *goConfig {
|
||||||
|
return c.Exts[goName].(*goConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gc *goConfig) clone() *goConfig {
|
||||||
|
gcCopy := *gc
|
||||||
|
gcCopy.genericTags = make(map[string]bool)
|
||||||
|
for k, v := range gc.genericTags {
|
||||||
|
gcCopy.genericTags[k] = v
|
||||||
|
}
|
||||||
|
return &gcCopy
|
||||||
|
}
|
||||||
|
|
||||||
|
// preprocessTags adds some tags which are on by default before they are
|
||||||
|
// used to match files.
|
||||||
|
func (gc *goConfig) preprocessTags() {
|
||||||
|
if gc.genericTags == nil {
|
||||||
|
gc.genericTags = make(map[string]bool)
|
||||||
|
}
|
||||||
|
gc.genericTags["gc"] = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// setBuildTags sets genericTags by parsing as a comma separated list. An
|
||||||
|
// error will be returned for tags that wouldn't be recognized by "go build".
|
||||||
|
// preprocessTags should be called before this.
|
||||||
|
func (gc *goConfig) setBuildTags(tags string) error {
|
||||||
|
if tags == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for _, t := range strings.Split(tags, ",") {
|
||||||
|
if strings.HasPrefix(t, "!") {
|
||||||
|
return fmt.Errorf("build tags can't be negated: %s", t)
|
||||||
|
}
|
||||||
|
gc.genericTags[t] = true
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// dependencyMode determines how imports of packages outside of the prefix
|
||||||
|
// are resolved.
|
||||||
|
type dependencyMode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// externalMode indicates imports should be resolved to external dependencies
|
||||||
|
// (declared in WORKSPACE).
|
||||||
|
externalMode dependencyMode = iota
|
||||||
|
|
||||||
|
// vendorMode indicates imports should be resolved to libraries in the
|
||||||
|
// vendor directory.
|
||||||
|
vendorMode
|
||||||
|
)
|
||||||
|
|
||||||
|
func (m dependencyMode) String() string {
|
||||||
|
if m == externalMode {
|
||||||
|
return "external"
|
||||||
|
} else {
|
||||||
|
return "vendored"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type externalFlag struct {
|
||||||
|
depMode *dependencyMode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *externalFlag) Set(value string) error {
|
||||||
|
switch value {
|
||||||
|
case "external":
|
||||||
|
*f.depMode = externalMode
|
||||||
|
case "vendored":
|
||||||
|
*f.depMode = vendorMode
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("unrecognized dependency mode: %q", value)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *externalFlag) String() string {
|
||||||
|
if f == nil || f.depMode == nil {
|
||||||
|
return "external"
|
||||||
|
}
|
||||||
|
return f.depMode.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
type tagsFlag func(string) error
|
||||||
|
|
||||||
|
func (f tagsFlag) Set(value string) error {
|
||||||
|
return f(value)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f tagsFlag) String() string {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) KnownDirectives() []string {
|
||||||
|
return []string{
|
||||||
|
"build_tags",
|
||||||
|
"importmap_prefix",
|
||||||
|
"prefix",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
|
||||||
|
gc := newGoConfig()
|
||||||
|
switch cmd {
|
||||||
|
case "fix", "update":
|
||||||
|
fs.Var(
|
||||||
|
tagsFlag(gc.setBuildTags),
|
||||||
|
"build_tags",
|
||||||
|
"comma-separated list of build tags. If not specified, Gazelle will not\n\tfilter sources with build constraints.")
|
||||||
|
fs.Var(
|
||||||
|
&gzflag.ExplicitFlag{Value: &gc.prefix, IsSet: &gc.prefixSet},
|
||||||
|
"go_prefix",
|
||||||
|
"prefix of import paths in the current workspace")
|
||||||
|
fs.Var(
|
||||||
|
&externalFlag{&gc.depMode},
|
||||||
|
"external",
|
||||||
|
"external: resolve external packages with go_repository\n\tvendored: resolve external packages as packages in vendor/")
|
||||||
|
}
|
||||||
|
c.Exts[goName] = gc
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
|
||||||
|
// The base of the -go_prefix flag may be used to generate proto_library
|
||||||
|
// rule names when there are no .proto sources (empty rules to be deleted)
|
||||||
|
// or when the package name can't be determined.
|
||||||
|
// TODO(jayconrod): deprecate and remove this behavior.
|
||||||
|
gc := getGoConfig(c)
|
||||||
|
pc := proto.GetProtoConfig(c)
|
||||||
|
pc.GoPrefix = gc.prefix
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) Configure(c *config.Config, rel string, f *rule.File) {
|
||||||
|
var gc *goConfig
|
||||||
|
if raw, ok := c.Exts[goName]; !ok {
|
||||||
|
gc = newGoConfig()
|
||||||
|
} else {
|
||||||
|
gc = raw.(*goConfig).clone()
|
||||||
|
}
|
||||||
|
c.Exts[goName] = gc
|
||||||
|
|
||||||
|
if path.Base(rel) == "vendor" {
|
||||||
|
gc.importMapPrefix = inferImportPath(gc, rel)
|
||||||
|
gc.importMapPrefixRel = rel
|
||||||
|
gc.prefix = ""
|
||||||
|
gc.prefixRel = rel
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != nil {
|
||||||
|
setPrefix := func(prefix string) {
|
||||||
|
if err := checkPrefix(prefix); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
gc.prefix = prefix
|
||||||
|
gc.prefixSet = true
|
||||||
|
gc.prefixRel = rel
|
||||||
|
}
|
||||||
|
for _, d := range f.Directives {
|
||||||
|
switch d.Key {
|
||||||
|
case "build_tags":
|
||||||
|
if err := gc.setBuildTags(d.Value); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
gc.preprocessTags()
|
||||||
|
gc.setBuildTags(d.Value)
|
||||||
|
case "importmap_prefix":
|
||||||
|
gc.importMapPrefix = d.Value
|
||||||
|
gc.importMapPrefixRel = rel
|
||||||
|
case "prefix":
|
||||||
|
setPrefix(d.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !gc.prefixSet {
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
switch r.Kind() {
|
||||||
|
case "go_prefix":
|
||||||
|
args := r.Args()
|
||||||
|
if len(args) != 1 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
s, ok := args[0].(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
setPrefix(s.Value)
|
||||||
|
|
||||||
|
case "gazelle":
|
||||||
|
if prefix := r.AttrString("prefix"); prefix != "" {
|
||||||
|
setPrefix(prefix)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkPrefix checks that a string may be used as a prefix. We forbid local
|
||||||
|
// (relative) imports and those beginning with "/". We allow the empty string,
|
||||||
|
// but generated rules must not have an empty importpath.
|
||||||
|
func checkPrefix(prefix string) error {
|
||||||
|
if strings.HasPrefix(prefix, "/") || build.IsLocalImport(prefix) {
|
||||||
|
return fmt.Errorf("invalid prefix: %q", prefix)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
27
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/constants.go
generated
vendored
Normal file
27
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/constants.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
const (
|
||||||
|
// legacyProtoFilegroupName is the anme of a filegroup created in legacy
|
||||||
|
// mode for libraries that contained .pb.go files and .proto files.
|
||||||
|
legacyProtoFilegroupName = "go_default_library_protos"
|
||||||
|
// wellKnownTypesGoPrefix is the import path for the Go repository containing
|
||||||
|
// pre-generated code for the Well Known Types.
|
||||||
|
wellKnownTypesGoPrefix = "github.com/golang/protobuf"
|
||||||
|
// wellKnownTypesPkg is the package name for the predefined WKTs in rules_go.
|
||||||
|
wellKnownTypesPkg = "proto/wkt"
|
||||||
|
)
|
||||||
26
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/def.bzl
generated
vendored
Normal file
26
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/def.bzl
generated
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_context", "go_rule")
|
||||||
|
|
||||||
|
def _std_package_list_impl(ctx):
|
||||||
|
go = go_context(ctx)
|
||||||
|
args = ctx.actions.args()
|
||||||
|
args.add_all([go.package_list, ctx.outputs.out])
|
||||||
|
ctx.actions.run(
|
||||||
|
inputs = [go.package_list],
|
||||||
|
outputs = [ctx.outputs.out],
|
||||||
|
executable = ctx.executable._gen_std_package_list,
|
||||||
|
arguments = [args],
|
||||||
|
mnemonic = "GoStdPackageList",
|
||||||
|
)
|
||||||
|
return [DefaultInfo(files = depset([ctx.outputs.out]))]
|
||||||
|
|
||||||
|
std_package_list = go_rule(
|
||||||
|
_std_package_list_impl,
|
||||||
|
attrs = {
|
||||||
|
"out": attr.output(mandatory = True),
|
||||||
|
"_gen_std_package_list": attr.label(
|
||||||
|
default = "//internal/language/go/gen_std_package_list",
|
||||||
|
cfg = "host",
|
||||||
|
executable = True,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
675
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/fileinfo.go
generated
vendored
Normal file
675
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/fileinfo.go
generated
vendored
Normal file
@@ -0,0 +1,675 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
"unicode/utf8"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
// fileInfo holds information used to decide how to build a file. This
|
||||||
|
// information comes from the file's name, from package and import declarations
|
||||||
|
// (in .go files), and from +build and cgo comments.
|
||||||
|
type fileInfo struct {
|
||||||
|
path string
|
||||||
|
name string
|
||||||
|
|
||||||
|
// ext is the type of file, based on extension.
|
||||||
|
ext ext
|
||||||
|
|
||||||
|
// packageName is the Go package name of a .go file, without the
|
||||||
|
// "_test" suffix if it was present. It is empty for non-Go files.
|
||||||
|
packageName string
|
||||||
|
|
||||||
|
// importPath is the canonical import path for this file's package.
|
||||||
|
// This may be read from a package comment (in Go) or a go_package
|
||||||
|
// option (in proto). This field is empty for files that don't specify
|
||||||
|
// an import path.
|
||||||
|
importPath string
|
||||||
|
|
||||||
|
// isTest is true if the file stem (the part before the extension)
|
||||||
|
// ends with "_test.go". This is never true for non-Go files.
|
||||||
|
isTest bool
|
||||||
|
|
||||||
|
// imports is a list of packages imported by a file. It does not include
|
||||||
|
// "C" or anything from the standard library.
|
||||||
|
imports []string
|
||||||
|
|
||||||
|
// isCgo is true for .go files that import "C".
|
||||||
|
isCgo bool
|
||||||
|
|
||||||
|
// goos and goarch contain the OS and architecture suffixes in the filename,
|
||||||
|
// if they were present.
|
||||||
|
goos, goarch string
|
||||||
|
|
||||||
|
// tags is a list of build tag lines. Each entry is the trimmed text of
|
||||||
|
// a line after a "+build" prefix.
|
||||||
|
tags []tagLine
|
||||||
|
|
||||||
|
// copts and clinkopts contain flags that are part of CFLAGS, CPPFLAGS,
|
||||||
|
// CXXFLAGS, and LDFLAGS directives in cgo comments.
|
||||||
|
copts, clinkopts []taggedOpts
|
||||||
|
|
||||||
|
// hasServices indicates whether a .proto file has service definitions.
|
||||||
|
hasServices bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// tagLine represents the space-separated disjunction of build tag groups
|
||||||
|
// in a line comment.
|
||||||
|
type tagLine []tagGroup
|
||||||
|
|
||||||
|
// check returns true if at least one of the tag groups is satisfied.
|
||||||
|
func (l tagLine) check(c *config.Config, os, arch string) bool {
|
||||||
|
if len(l) == 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, g := range l {
|
||||||
|
if g.check(c, os, arch) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// tagGroup represents a comma-separated conjuction of build tags.
|
||||||
|
type tagGroup []string
|
||||||
|
|
||||||
|
// check returns true if all of the tags are true. Tags that start with
|
||||||
|
// "!" are negated (but "!!") is not allowed. Go release tags (e.g., "go1.8")
|
||||||
|
// are ignored. If the group contains an os or arch tag, but the os or arch
|
||||||
|
// parameters are empty, check returns false even if the tag is negated.
|
||||||
|
func (g tagGroup) check(c *config.Config, os, arch string) bool {
|
||||||
|
goConf := getGoConfig(c)
|
||||||
|
for _, t := range g {
|
||||||
|
if strings.HasPrefix(t, "!!") { // bad syntax, reject always
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
not := strings.HasPrefix(t, "!")
|
||||||
|
if not {
|
||||||
|
t = t[1:]
|
||||||
|
}
|
||||||
|
if isIgnoredTag(t) {
|
||||||
|
// Release tags are treated as "unknown" and are considered true,
|
||||||
|
// whether or not they are negated.
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
var match bool
|
||||||
|
if _, ok := rule.KnownOSSet[t]; ok {
|
||||||
|
if os == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
match = os == t
|
||||||
|
} else if _, ok := rule.KnownArchSet[t]; ok {
|
||||||
|
if arch == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
match = arch == t
|
||||||
|
} else {
|
||||||
|
match = goConf.genericTags[t]
|
||||||
|
}
|
||||||
|
if not {
|
||||||
|
match = !match
|
||||||
|
}
|
||||||
|
if !match {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// taggedOpts a list of compile or link options which should only be applied
|
||||||
|
// if the given set of build tags are satisfied. These options have already
|
||||||
|
// been tokenized using the same algorithm that "go build" uses, then joined
|
||||||
|
// with OptSeparator.
|
||||||
|
type taggedOpts struct {
|
||||||
|
tags tagLine
|
||||||
|
opts string
|
||||||
|
}
|
||||||
|
|
||||||
|
// optSeparator is a special character inserted between options that appeared
|
||||||
|
// together in a #cgo directive. This allows options to be split, modified,
|
||||||
|
// and escaped by other packages.
|
||||||
|
//
|
||||||
|
// It's important to keep options grouped together in the same string. For
|
||||||
|
// example, if we have "-framework IOKit" together in a #cgo directive,
|
||||||
|
// "-framework" shouldn't be treated as a separate string for the purposes of
|
||||||
|
// sorting and de-duplicating.
|
||||||
|
const optSeparator = "\x1D"
|
||||||
|
|
||||||
|
// ext indicates how a file should be treated, based on extension.
|
||||||
|
type ext int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// unknownExt is applied files that aren't buildable with Go.
|
||||||
|
unknownExt ext = iota
|
||||||
|
|
||||||
|
// goExt is applied to .go files.
|
||||||
|
goExt
|
||||||
|
|
||||||
|
// cExt is applied to C and C++ files.
|
||||||
|
cExt
|
||||||
|
|
||||||
|
// hExt is applied to header files. If cgo code is present, these may be
|
||||||
|
// C or C++ headers. If not, they are treated as Go assembly headers.
|
||||||
|
hExt
|
||||||
|
|
||||||
|
// sExt is applied to Go assembly files, ending with .s.
|
||||||
|
sExt
|
||||||
|
|
||||||
|
// csExt is applied to other assembly files, ending with .S. These are built
|
||||||
|
// with the C compiler if cgo code is present.
|
||||||
|
csExt
|
||||||
|
|
||||||
|
// protoExt is applied to .proto files.
|
||||||
|
protoExt
|
||||||
|
)
|
||||||
|
|
||||||
|
// fileNameInfo returns information that can be inferred from the name of
|
||||||
|
// a file. It does not read data from the file.
|
||||||
|
func fileNameInfo(path_ string) fileInfo {
|
||||||
|
name := filepath.Base(path_)
|
||||||
|
var ext ext
|
||||||
|
switch path.Ext(name) {
|
||||||
|
case ".go":
|
||||||
|
ext = goExt
|
||||||
|
case ".c", ".cc", ".cpp", ".cxx", ".m", ".mm":
|
||||||
|
ext = cExt
|
||||||
|
case ".h", ".hh", ".hpp", ".hxx":
|
||||||
|
ext = hExt
|
||||||
|
case ".s":
|
||||||
|
ext = sExt
|
||||||
|
case ".S":
|
||||||
|
ext = csExt
|
||||||
|
case ".proto":
|
||||||
|
ext = protoExt
|
||||||
|
default:
|
||||||
|
ext = unknownExt
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine test, goos, and goarch. This is intended to match the logic
|
||||||
|
// in goodOSArchFile in go/build.
|
||||||
|
var isTest bool
|
||||||
|
var goos, goarch string
|
||||||
|
l := strings.Split(name[:len(name)-len(path.Ext(name))], "_")
|
||||||
|
if len(l) >= 2 && l[len(l)-1] == "test" {
|
||||||
|
isTest = ext == goExt
|
||||||
|
l = l[:len(l)-1]
|
||||||
|
}
|
||||||
|
switch {
|
||||||
|
case len(l) >= 3 && rule.KnownOSSet[l[len(l)-2]] && rule.KnownArchSet[l[len(l)-1]]:
|
||||||
|
goos = l[len(l)-2]
|
||||||
|
goarch = l[len(l)-1]
|
||||||
|
case len(l) >= 2 && rule.KnownOSSet[l[len(l)-1]]:
|
||||||
|
goos = l[len(l)-1]
|
||||||
|
case len(l) >= 2 && rule.KnownArchSet[l[len(l)-1]]:
|
||||||
|
goarch = l[len(l)-1]
|
||||||
|
}
|
||||||
|
|
||||||
|
return fileInfo{
|
||||||
|
path: path_,
|
||||||
|
name: name,
|
||||||
|
ext: ext,
|
||||||
|
isTest: isTest,
|
||||||
|
goos: goos,
|
||||||
|
goarch: goarch,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherFileInfo returns information about a non-.go file. It will parse
|
||||||
|
// part of the file to determine build tags. If the file can't be read, an
|
||||||
|
// error will be logged, and partial information will be returned.
|
||||||
|
func otherFileInfo(path string) fileInfo {
|
||||||
|
info := fileNameInfo(path)
|
||||||
|
if info.ext == unknownExt {
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
tags, err := readTags(info.path)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("%s: error reading file: %v", info.path, err)
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
info.tags = tags
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
// goFileInfo returns information about a .go file. It will parse part of the
|
||||||
|
// file to determine the package name, imports, and build constraints.
|
||||||
|
// If the file can't be read, an error will be logged, and partial information
|
||||||
|
// will be returned.
|
||||||
|
// This function is intended to match go/build.Context.Import.
|
||||||
|
// TODD(#53): extract canonical import path
|
||||||
|
func goFileInfo(path, rel string) fileInfo {
|
||||||
|
info := fileNameInfo(path)
|
||||||
|
fset := token.NewFileSet()
|
||||||
|
pf, err := parser.ParseFile(fset, info.path, nil, parser.ImportsOnly|parser.ParseComments)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("%s: error reading go file: %v", info.path, err)
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
info.packageName = pf.Name.Name
|
||||||
|
if info.isTest && strings.HasSuffix(info.packageName, "_test") {
|
||||||
|
info.packageName = info.packageName[:len(info.packageName)-len("_test")]
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, decl := range pf.Decls {
|
||||||
|
d, ok := decl.(*ast.GenDecl)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, dspec := range d.Specs {
|
||||||
|
spec, ok := dspec.(*ast.ImportSpec)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
quoted := spec.Path.Value
|
||||||
|
path, err := strconv.Unquote(quoted)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("%s: error reading go file: %v", info.path, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if path == "C" {
|
||||||
|
if info.isTest {
|
||||||
|
log.Printf("%s: warning: use of cgo in test not supported", info.path)
|
||||||
|
}
|
||||||
|
info.isCgo = true
|
||||||
|
cg := spec.Doc
|
||||||
|
if cg == nil && len(d.Specs) == 1 {
|
||||||
|
cg = d.Doc
|
||||||
|
}
|
||||||
|
if cg != nil {
|
||||||
|
if err := saveCgo(&info, rel, cg); err != nil {
|
||||||
|
log.Printf("%s: error reading go file: %v", info.path, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
info.imports = append(info.imports, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tags, err := readTags(info.path)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("%s: error reading go file: %v", info.path, err)
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
info.tags = tags
|
||||||
|
|
||||||
|
return info
|
||||||
|
}
|
||||||
|
|
||||||
|
// saveCgo extracts CFLAGS, CPPFLAGS, CXXFLAGS, and LDFLAGS directives
|
||||||
|
// from a comment above a "C" import. This is intended to match logic in
|
||||||
|
// go/build.Context.saveCgo.
|
||||||
|
func saveCgo(info *fileInfo, rel string, cg *ast.CommentGroup) error {
|
||||||
|
text := cg.Text()
|
||||||
|
for _, line := range strings.Split(text, "\n") {
|
||||||
|
orig := line
|
||||||
|
|
||||||
|
// Line is
|
||||||
|
// #cgo [GOOS/GOARCH...] LDFLAGS: stuff
|
||||||
|
//
|
||||||
|
line = strings.TrimSpace(line)
|
||||||
|
if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split at colon.
|
||||||
|
line = strings.TrimSpace(line[4:])
|
||||||
|
i := strings.Index(line, ":")
|
||||||
|
if i < 0 {
|
||||||
|
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
||||||
|
}
|
||||||
|
line, optstr := strings.TrimSpace(line[:i]), strings.TrimSpace(line[i+1:])
|
||||||
|
|
||||||
|
// Parse tags and verb.
|
||||||
|
f := strings.Fields(line)
|
||||||
|
if len(f) < 1 {
|
||||||
|
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
||||||
|
}
|
||||||
|
verb := f[len(f)-1]
|
||||||
|
tags := parseTagsInGroups(f[:len(f)-1])
|
||||||
|
|
||||||
|
// Parse options.
|
||||||
|
opts, err := splitQuoted(optstr)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
||||||
|
}
|
||||||
|
var ok bool
|
||||||
|
for i, opt := range opts {
|
||||||
|
if opt, ok = expandSrcDir(opt, rel); !ok {
|
||||||
|
return fmt.Errorf("%s: malformed #cgo argument: %s", info.path, orig)
|
||||||
|
}
|
||||||
|
opts[i] = opt
|
||||||
|
}
|
||||||
|
joinedStr := strings.Join(opts, optSeparator)
|
||||||
|
|
||||||
|
// Add tags to appropriate list.
|
||||||
|
switch verb {
|
||||||
|
case "CFLAGS", "CPPFLAGS", "CXXFLAGS":
|
||||||
|
info.copts = append(info.copts, taggedOpts{tags, joinedStr})
|
||||||
|
case "LDFLAGS":
|
||||||
|
info.clinkopts = append(info.clinkopts, taggedOpts{tags, joinedStr})
|
||||||
|
case "pkg-config":
|
||||||
|
return fmt.Errorf("%s: pkg-config not supported: %s", info.path, orig)
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("%s: invalid #cgo verb: %s", info.path, orig)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// splitQuoted splits the string s around each instance of one or more consecutive
|
||||||
|
// white space characters while taking into account quotes and escaping, and
|
||||||
|
// returns an array of substrings of s or an empty list if s contains only white space.
|
||||||
|
// Single quotes and double quotes are recognized to prevent splitting within the
|
||||||
|
// quoted region, and are removed from the resulting substrings. If a quote in s
|
||||||
|
// isn't closed err will be set and r will have the unclosed argument as the
|
||||||
|
// last element. The backslash is used for escaping.
|
||||||
|
//
|
||||||
|
// For example, the following string:
|
||||||
|
//
|
||||||
|
// a b:"c d" 'e''f' "g\""
|
||||||
|
//
|
||||||
|
// Would be parsed as:
|
||||||
|
//
|
||||||
|
// []string{"a", "b:c d", "ef", `g"`}
|
||||||
|
//
|
||||||
|
// Copied from go/build.splitQuoted
|
||||||
|
func splitQuoted(s string) (r []string, err error) {
|
||||||
|
var args []string
|
||||||
|
arg := make([]rune, len(s))
|
||||||
|
escaped := false
|
||||||
|
quoted := false
|
||||||
|
quote := '\x00'
|
||||||
|
i := 0
|
||||||
|
for _, rune := range s {
|
||||||
|
switch {
|
||||||
|
case escaped:
|
||||||
|
escaped = false
|
||||||
|
case rune == '\\':
|
||||||
|
escaped = true
|
||||||
|
continue
|
||||||
|
case quote != '\x00':
|
||||||
|
if rune == quote {
|
||||||
|
quote = '\x00'
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
case rune == '"' || rune == '\'':
|
||||||
|
quoted = true
|
||||||
|
quote = rune
|
||||||
|
continue
|
||||||
|
case unicode.IsSpace(rune):
|
||||||
|
if quoted || i > 0 {
|
||||||
|
quoted = false
|
||||||
|
args = append(args, string(arg[:i]))
|
||||||
|
i = 0
|
||||||
|
}
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
arg[i] = rune
|
||||||
|
i++
|
||||||
|
}
|
||||||
|
if quoted || i > 0 {
|
||||||
|
args = append(args, string(arg[:i]))
|
||||||
|
}
|
||||||
|
if quote != 0 {
|
||||||
|
err = errors.New("unclosed quote")
|
||||||
|
} else if escaped {
|
||||||
|
err = errors.New("unfinished escaping")
|
||||||
|
}
|
||||||
|
return args, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// expandSrcDir expands any occurrence of ${SRCDIR}, making sure
|
||||||
|
// the result is safe for the shell.
|
||||||
|
//
|
||||||
|
// Copied from go/build.expandSrcDir
|
||||||
|
func expandSrcDir(str string, srcdir string) (string, bool) {
|
||||||
|
// "\" delimited paths cause safeCgoName to fail
|
||||||
|
// so convert native paths with a different delimiter
|
||||||
|
// to "/" before starting (eg: on windows).
|
||||||
|
srcdir = filepath.ToSlash(srcdir)
|
||||||
|
|
||||||
|
// Spaces are tolerated in ${SRCDIR}, but not anywhere else.
|
||||||
|
chunks := strings.Split(str, "${SRCDIR}")
|
||||||
|
if len(chunks) < 2 {
|
||||||
|
return str, safeCgoName(str, false)
|
||||||
|
}
|
||||||
|
ok := true
|
||||||
|
for _, chunk := range chunks {
|
||||||
|
ok = ok && (chunk == "" || safeCgoName(chunk, false))
|
||||||
|
}
|
||||||
|
ok = ok && (srcdir == "" || safeCgoName(srcdir, true))
|
||||||
|
res := strings.Join(chunks, srcdir)
|
||||||
|
return res, ok && res != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN.
|
||||||
|
// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay.
|
||||||
|
// See golang.org/issue/6038.
|
||||||
|
// The @ is for OS X. See golang.org/issue/13720.
|
||||||
|
// The % is for Jenkins. See golang.org/issue/16959.
|
||||||
|
const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%"
|
||||||
|
const safeSpaces = " "
|
||||||
|
|
||||||
|
var safeBytes = []byte(safeSpaces + safeString)
|
||||||
|
|
||||||
|
// Copied from go/build.safeCgoName
|
||||||
|
func safeCgoName(s string, spaces bool) bool {
|
||||||
|
if s == "" {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
safe := safeBytes
|
||||||
|
if !spaces {
|
||||||
|
safe = safe[len(safeSpaces):]
|
||||||
|
}
|
||||||
|
for i := 0; i < len(s); i++ {
|
||||||
|
if c := s[i]; c < utf8.RuneSelf && bytes.IndexByte(safe, c) < 0 {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// readTags reads and extracts build tags from the block of comments
|
||||||
|
// and blank lines at the start of a file which is separated from the
|
||||||
|
// rest of the file by a blank line. Each string in the returned slice
|
||||||
|
// is the trimmed text of a line after a "+build" prefix.
|
||||||
|
// Based on go/build.Context.shouldBuild.
|
||||||
|
func readTags(path string) ([]tagLine, error) {
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
scanner := bufio.NewScanner(f)
|
||||||
|
|
||||||
|
// Pass 1: Identify leading run of // comments and blank lines,
|
||||||
|
// which must be followed by a blank line.
|
||||||
|
var lines []string
|
||||||
|
end := 0
|
||||||
|
for scanner.Scan() {
|
||||||
|
line := strings.TrimSpace(scanner.Text())
|
||||||
|
if line == "" {
|
||||||
|
end = len(lines)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.HasPrefix(line, "//") {
|
||||||
|
lines = append(lines, line[len("//"):])
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if err := scanner.Err(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
lines = lines[:end]
|
||||||
|
|
||||||
|
// Pass 2: Process each line in the run.
|
||||||
|
var tagLines []tagLine
|
||||||
|
for _, line := range lines {
|
||||||
|
fields := strings.Fields(line)
|
||||||
|
if len(fields) > 0 && fields[0] == "+build" {
|
||||||
|
tagLines = append(tagLines, parseTagsInGroups(fields[1:]))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return tagLines, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseTagsInGroups(groups []string) tagLine {
|
||||||
|
var l tagLine
|
||||||
|
for _, g := range groups {
|
||||||
|
l = append(l, tagGroup(strings.Split(g, ",")))
|
||||||
|
}
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
|
func isOSArchSpecific(info fileInfo, cgoTags tagLine) (osSpecific, archSpecific bool) {
|
||||||
|
if info.goos != "" {
|
||||||
|
osSpecific = true
|
||||||
|
}
|
||||||
|
if info.goarch != "" {
|
||||||
|
archSpecific = true
|
||||||
|
}
|
||||||
|
lines := info.tags
|
||||||
|
if len(cgoTags) > 0 {
|
||||||
|
lines = append(lines, cgoTags)
|
||||||
|
}
|
||||||
|
for _, line := range lines {
|
||||||
|
for _, group := range line {
|
||||||
|
for _, tag := range group {
|
||||||
|
if strings.HasPrefix(tag, "!") {
|
||||||
|
tag = tag[1:]
|
||||||
|
}
|
||||||
|
_, osOk := rule.KnownOSSet[tag]
|
||||||
|
if osOk {
|
||||||
|
osSpecific = true
|
||||||
|
}
|
||||||
|
_, archOk := rule.KnownArchSet[tag]
|
||||||
|
if archOk {
|
||||||
|
archSpecific = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return osSpecific, archSpecific
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkConstraints determines whether build constraints are satisfied on
|
||||||
|
// a given platform.
|
||||||
|
//
|
||||||
|
// The first few arguments describe the platform. genericTags is the set
|
||||||
|
// of build tags that are true on all platforms. os and arch are the platform
|
||||||
|
// GOOS and GOARCH strings. If os or arch is empty, checkConstraints will
|
||||||
|
// return false in the presence of OS and architecture constraints, even
|
||||||
|
// if they are negated.
|
||||||
|
//
|
||||||
|
// The remaining arguments describe the file being tested. All of these may
|
||||||
|
// be empty or nil. osSuffix and archSuffix are filename suffixes. fileTags
|
||||||
|
// is a list tags from +build comments found near the top of the file. cgoTags
|
||||||
|
// is an extra set of tags in a #cgo directive.
|
||||||
|
func checkConstraints(c *config.Config, os, arch, osSuffix, archSuffix string, fileTags []tagLine, cgoTags tagLine) bool {
|
||||||
|
if osSuffix != "" && osSuffix != os || archSuffix != "" && archSuffix != arch {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, l := range fileTags {
|
||||||
|
if !l.check(c, os, arch) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(cgoTags) > 0 && !cgoTags.check(c, os, arch) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// isIgnoredTag returns whether the tag is "cgo" or is a release tag.
|
||||||
|
// Release tags match the pattern "go[0-9]\.[0-9]+".
|
||||||
|
// Gazelle won't consider whether an ignored tag is satisfied when evaluating
|
||||||
|
// build constraints for a file.
|
||||||
|
func isIgnoredTag(tag string) bool {
|
||||||
|
if tag == "cgo" || tag == "race" || tag == "msan" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
if len(tag) < 5 || !strings.HasPrefix(tag, "go") {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if tag[2] < '0' || tag[2] > '9' || tag[3] != '.' {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, c := range tag[4:] {
|
||||||
|
if c < '0' || c > '9' {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
// protoFileInfo extracts metadata from a proto file. The proto extension
|
||||||
|
// already "parses" these and stores metadata in proto.FileInfo, so this is
|
||||||
|
// just processing relevant options.
|
||||||
|
func protoFileInfo(path_ string, protoInfo proto.FileInfo) fileInfo {
|
||||||
|
info := fileNameInfo(path_)
|
||||||
|
|
||||||
|
// Look for "option go_package". If there's no / in the package option, then
|
||||||
|
// it's just a simple package name, not a full import path.
|
||||||
|
for _, opt := range protoInfo.Options {
|
||||||
|
if opt.Key != "go_package" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if strings.LastIndexByte(opt.Value, '/') == -1 {
|
||||||
|
info.packageName = opt.Value
|
||||||
|
} else {
|
||||||
|
if i := strings.LastIndexByte(opt.Value, ';'); i != -1 {
|
||||||
|
info.importPath = opt.Value[:i]
|
||||||
|
info.packageName = opt.Value[i+1:]
|
||||||
|
} else {
|
||||||
|
info.importPath = opt.Value
|
||||||
|
info.packageName = path.Base(opt.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set the Go package name from the proto package name if there was no
|
||||||
|
// option go_package.
|
||||||
|
if info.packageName == "" && protoInfo.PackageName != "" {
|
||||||
|
info.packageName = strings.Replace(protoInfo.PackageName, ".", "_", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
info.imports = protoInfo.Imports
|
||||||
|
info.hasServices = protoInfo.HasServices
|
||||||
|
return info
|
||||||
|
}
|
||||||
254
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/fix.go
generated
vendored
Normal file
254
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/fix.go
generated
vendored
Normal file
@@ -0,0 +1,254 @@
|
|||||||
|
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_ *goLang) Fix(c *config.Config, f *rule.File) {
|
||||||
|
migrateLibraryEmbed(c, f)
|
||||||
|
migrateGrpcCompilers(c, f)
|
||||||
|
flattenSrcs(c, f)
|
||||||
|
squashCgoLibrary(c, f)
|
||||||
|
squashXtest(c, f)
|
||||||
|
removeLegacyProto(c, f)
|
||||||
|
removeLegacyGazelle(c, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateLibraryEmbed converts "library" attributes to "embed" attributes,
|
||||||
|
// preserving comments. This only applies to Go rules, and only if there is
|
||||||
|
// no keep comment on "library" and no existing "embed" attribute.
|
||||||
|
func migrateLibraryEmbed(c *config.Config, f *rule.File) {
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if !isGoRule(r.Kind()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
libExpr := r.Attr("library")
|
||||||
|
if libExpr == nil || rule.ShouldKeep(libExpr) || r.Attr("embed") != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.DelAttr("library")
|
||||||
|
r.SetAttr("embed", &bzl.ListExpr{List: []bzl.Expr{libExpr}})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// migrateGrpcCompilers converts "go_grpc_library" rules into "go_proto_library"
|
||||||
|
// rules with a "compilers" attribute.
|
||||||
|
func migrateGrpcCompilers(c *config.Config, f *rule.File) {
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() != "go_grpc_library" || r.ShouldKeep() || r.Attr("compilers") != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
r.SetKind("go_proto_library")
|
||||||
|
r.SetAttr("compilers", []string{config.GrpcCompilerLabel})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// squashCgoLibrary removes cgo_library rules with the default name and
|
||||||
|
// merges their attributes with go_library with the default name. If no
|
||||||
|
// go_library rule exists, a new one will be created.
|
||||||
|
//
|
||||||
|
// Note that the library attribute is disregarded, so cgo_library and
|
||||||
|
// go_library attributes will be squashed even if the cgo_library was unlinked.
|
||||||
|
// MergeFile will remove unused values and attributes later.
|
||||||
|
func squashCgoLibrary(c *config.Config, f *rule.File) {
|
||||||
|
// Find the default cgo_library and go_library rules.
|
||||||
|
var cgoLibrary, goLibrary *rule.Rule
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() == "cgo_library" && r.Name() == config.DefaultCgoLibName && !r.ShouldKeep() {
|
||||||
|
if cgoLibrary != nil {
|
||||||
|
log.Printf("%s: when fixing existing file, multiple cgo_library rules with default name found", f.Path)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
cgoLibrary = r
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if r.Kind() == "go_library" && r.Name() == config.DefaultLibName {
|
||||||
|
if goLibrary != nil {
|
||||||
|
log.Printf("%s: when fixing existing file, multiple go_library rules with default name referencing cgo_library found", f.Path)
|
||||||
|
}
|
||||||
|
goLibrary = r
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if cgoLibrary == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !c.ShouldFix {
|
||||||
|
log.Printf("%s: cgo_library is deprecated. Run 'gazelle fix' to squash with go_library.", f.Path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if goLibrary == nil {
|
||||||
|
cgoLibrary.SetKind("go_library")
|
||||||
|
cgoLibrary.SetName(config.DefaultLibName)
|
||||||
|
cgoLibrary.SetAttr("cgo", true)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := rule.SquashRules(cgoLibrary, goLibrary, f.Path); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
goLibrary.DelAttr("embed")
|
||||||
|
goLibrary.SetAttr("cgo", true)
|
||||||
|
cgoLibrary.Delete()
|
||||||
|
}
|
||||||
|
|
||||||
|
// squashXtest removes go_test rules with the default external name and merges
|
||||||
|
// their attributes with a go_test rule with the default internal name. If
|
||||||
|
// no internal go_test rule exists, a new one will be created (effectively
|
||||||
|
// renaming the old rule).
|
||||||
|
func squashXtest(c *config.Config, f *rule.File) {
|
||||||
|
// Search for internal and external tests.
|
||||||
|
var itest, xtest *rule.Rule
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() != "go_test" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if r.Name() == config.DefaultTestName {
|
||||||
|
itest = r
|
||||||
|
} else if r.Name() == config.DefaultXTestName {
|
||||||
|
xtest = r
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if xtest == nil || xtest.ShouldKeep() || (itest != nil && itest.ShouldKeep()) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !c.ShouldFix {
|
||||||
|
if itest == nil {
|
||||||
|
log.Printf("%s: go_default_xtest is no longer necessary. Run 'gazelle fix' to rename to go_default_test.", f.Path)
|
||||||
|
} else {
|
||||||
|
log.Printf("%s: go_default_xtest is no longer necessary. Run 'gazelle fix' to squash with go_default_test.", f.Path)
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there was no internal test, we can just rename the external test.
|
||||||
|
if itest == nil {
|
||||||
|
xtest.SetName(config.DefaultTestName)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attempt to squash.
|
||||||
|
if err := rule.SquashRules(xtest, itest, f.Path); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
xtest.Delete()
|
||||||
|
}
|
||||||
|
|
||||||
|
// flattenSrcs transforms srcs attributes structured as concatenations of
|
||||||
|
// lists and selects (generated from PlatformStrings; see
|
||||||
|
// extractPlatformStringsExprs for matching details) into a sorted,
|
||||||
|
// de-duplicated list. Comments are accumulated and de-duplicated across
|
||||||
|
// duplicate expressions.
|
||||||
|
func flattenSrcs(c *config.Config, f *rule.File) {
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if !isGoRule(r.Kind()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
oldSrcs := r.Attr("srcs")
|
||||||
|
if oldSrcs == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
flatSrcs := rule.FlattenExpr(oldSrcs)
|
||||||
|
if flatSrcs != oldSrcs {
|
||||||
|
r.SetAttr("srcs", flatSrcs)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeLegacyProto removes uses of the old proto rules. It deletes loads
|
||||||
|
// from go_proto_library.bzl. It deletes proto filegroups. It removes
|
||||||
|
// go_proto_library attributes which are no longer recognized. New rules
|
||||||
|
// are generated in place of the deleted rules, but attributes and comments
|
||||||
|
// are not migrated.
|
||||||
|
func removeLegacyProto(c *config.Config, f *rule.File) {
|
||||||
|
// Don't fix if the proto mode was set to something other than the default.
|
||||||
|
pc := proto.GetProtoConfig(c)
|
||||||
|
if pc.Mode != proto.DefaultMode {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Scan for definitions to delete.
|
||||||
|
var protoLoads []*rule.Load
|
||||||
|
for _, l := range f.Loads {
|
||||||
|
if l.Name() == "@io_bazel_rules_go//proto:go_proto_library.bzl" {
|
||||||
|
protoLoads = append(protoLoads, l)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var protoFilegroups, protoRules []*rule.Rule
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() == "filegroup" && r.Name() == legacyProtoFilegroupName {
|
||||||
|
protoFilegroups = append(protoFilegroups, r)
|
||||||
|
}
|
||||||
|
if r.Kind() == "go_proto_library" {
|
||||||
|
protoRules = append(protoRules, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(protoLoads)+len(protoFilegroups) == 0 {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if !c.ShouldFix {
|
||||||
|
log.Printf("%s: go_proto_library.bzl is deprecated. Run 'gazelle fix' to replace old rules.", f.Path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete legacy proto loads and filegroups. Only delete go_proto_library
|
||||||
|
// rules if we deleted a load.
|
||||||
|
for _, l := range protoLoads {
|
||||||
|
l.Delete()
|
||||||
|
}
|
||||||
|
for _, r := range protoFilegroups {
|
||||||
|
r.Delete()
|
||||||
|
}
|
||||||
|
if len(protoLoads) > 0 {
|
||||||
|
for _, r := range protoRules {
|
||||||
|
r.Delete()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// removeLegacyGazelle removes loads of the "gazelle" macro from
|
||||||
|
// @io_bazel_rules_go//go:def.bzl. The definition has moved to
|
||||||
|
// @bazel_gazelle//:def.bzl, and the old one will be deleted soon.
|
||||||
|
func removeLegacyGazelle(c *config.Config, f *rule.File) {
|
||||||
|
for _, l := range f.Loads {
|
||||||
|
if l.Name() == "@io_bazel_rules_go//go:def.bzl" && l.Has("gazelle") {
|
||||||
|
l.Remove("gazelle")
|
||||||
|
if l.IsEmpty() {
|
||||||
|
l.Delete()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isGoRule(kind string) bool {
|
||||||
|
return kind == "go_library" ||
|
||||||
|
kind == "go_binary" ||
|
||||||
|
kind == "go_test" ||
|
||||||
|
kind == "go_proto_library" ||
|
||||||
|
kind == "go_grpc_library"
|
||||||
|
}
|
||||||
570
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/generate.go
generated
vendored
Normal file
570
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/generate.go
generated
vendored
Normal file
@@ -0,0 +1,570 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (gl *goLang) GenerateRules(c *config.Config, dir, rel string, f *rule.File, subdirs, regularFiles, genFiles []string, otherEmpty, otherGen []*rule.Rule) (empty, gen []*rule.Rule) {
|
||||||
|
// Extract information about proto files. We need this to exclude .pb.go
|
||||||
|
// files and generate go_proto_library rules.
|
||||||
|
gc := getGoConfig(c)
|
||||||
|
pc := proto.GetProtoConfig(c)
|
||||||
|
var protoRuleNames []string
|
||||||
|
protoPackages := make(map[string]proto.Package)
|
||||||
|
protoFileInfo := make(map[string]proto.FileInfo)
|
||||||
|
for _, r := range otherGen {
|
||||||
|
if r.Kind() != "proto_library" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pkg := r.PrivateAttr(proto.PackageKey).(proto.Package)
|
||||||
|
protoPackages[r.Name()] = pkg
|
||||||
|
for name, info := range pkg.Files {
|
||||||
|
protoFileInfo[name] = info
|
||||||
|
}
|
||||||
|
protoRuleNames = append(protoRuleNames, r.Name())
|
||||||
|
}
|
||||||
|
sort.Strings(protoRuleNames)
|
||||||
|
var emptyProtoRuleNames []string
|
||||||
|
for _, r := range otherEmpty {
|
||||||
|
if r.Kind() == "proto_library" {
|
||||||
|
emptyProtoRuleNames = append(emptyProtoRuleNames, r.Name())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If proto rule generation is enabled, exclude .pb.go files that correspond
|
||||||
|
// to any .proto files present.
|
||||||
|
if !pc.Mode.ShouldIncludePregeneratedFiles() {
|
||||||
|
keep := func(f string) bool {
|
||||||
|
if strings.HasSuffix(f, ".pb.go") {
|
||||||
|
_, ok := protoFileInfo[strings.TrimSuffix(f, ".pb.go")+".proto"]
|
||||||
|
return !ok
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
filterFiles(®ularFiles, keep)
|
||||||
|
filterFiles(&genFiles, keep)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split regular files into files which can determine the package name and
|
||||||
|
// import path and other files.
|
||||||
|
var goFiles, otherFiles []string
|
||||||
|
for _, f := range regularFiles {
|
||||||
|
if strings.HasSuffix(f, ".go") {
|
||||||
|
goFiles = append(goFiles, f)
|
||||||
|
} else {
|
||||||
|
otherFiles = append(otherFiles, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Look for a subdirectory named testdata. Only treat it as data if it does
|
||||||
|
// not contain a buildable package.
|
||||||
|
var hasTestdata bool
|
||||||
|
for _, sub := range subdirs {
|
||||||
|
if sub == "testdata" {
|
||||||
|
hasTestdata = !gl.goPkgRels[path.Join(rel, "testdata")]
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a set of packages from files in this directory.
|
||||||
|
goPackageMap, goFilesWithUnknownPackage := buildPackages(c, dir, rel, goFiles, hasTestdata)
|
||||||
|
|
||||||
|
// Select a package to generate rules for. If there is no package, create
|
||||||
|
// an empty package so we can generate empty rules.
|
||||||
|
var protoName string
|
||||||
|
pkg, err := selectPackage(c, dir, goPackageMap)
|
||||||
|
if err != nil {
|
||||||
|
if _, ok := err.(*build.NoGoError); ok {
|
||||||
|
if len(protoPackages) == 1 {
|
||||||
|
for name, ppkg := range protoPackages {
|
||||||
|
pkg = &goPackage{
|
||||||
|
name: goProtoPackageName(ppkg),
|
||||||
|
importPath: goProtoImportPath(gc, ppkg, rel),
|
||||||
|
proto: protoTargetFromProtoPackage(name, ppkg),
|
||||||
|
}
|
||||||
|
protoName = name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
pkg = emptyPackage(c, dir, rel)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to link the selected package with a proto package.
|
||||||
|
if pkg != nil {
|
||||||
|
if pkg.importPath == "" {
|
||||||
|
if err := pkg.inferImportPath(c); err != nil && pkg.firstGoFile() != "" {
|
||||||
|
inferImportPathErrorOnce.Do(func() { log.Print(err) })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, name := range protoRuleNames {
|
||||||
|
ppkg := protoPackages[name]
|
||||||
|
if pkg.importPath == goProtoImportPath(gc, ppkg, rel) {
|
||||||
|
protoName = name
|
||||||
|
pkg.proto = protoTargetFromProtoPackage(name, ppkg)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate rules for proto packages. These should come before the other
|
||||||
|
// Go rules.
|
||||||
|
g := newGenerator(c, f, rel)
|
||||||
|
var rules []*rule.Rule
|
||||||
|
var protoEmbed string
|
||||||
|
for _, name := range protoRuleNames {
|
||||||
|
ppkg := protoPackages[name]
|
||||||
|
var rs []*rule.Rule
|
||||||
|
if name == protoName {
|
||||||
|
protoEmbed, rs = g.generateProto(pc.Mode, pkg.proto, pkg.importPath)
|
||||||
|
} else {
|
||||||
|
target := protoTargetFromProtoPackage(name, ppkg)
|
||||||
|
importPath := goProtoImportPath(gc, ppkg, rel)
|
||||||
|
_, rs = g.generateProto(pc.Mode, target, importPath)
|
||||||
|
}
|
||||||
|
rules = append(rules, rs...)
|
||||||
|
}
|
||||||
|
for _, name := range emptyProtoRuleNames {
|
||||||
|
goProtoName := strings.TrimSuffix(name, "_proto") + "_go_proto"
|
||||||
|
empty = append(empty, rule.NewRule("go_proto_library", goProtoName))
|
||||||
|
}
|
||||||
|
if pkg != nil && pc.Mode == proto.PackageMode && pkg.firstGoFile() == "" {
|
||||||
|
// In proto package mode, don't generate a go_library embedding a
|
||||||
|
// go_proto_library unless there are actually go files.
|
||||||
|
protoEmbed = ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Complete the Go package and generate rules for that.
|
||||||
|
if pkg != nil {
|
||||||
|
// Add files with unknown packages. This happens when there are parse
|
||||||
|
// or I/O errors. We should keep the file in the srcs list and let the
|
||||||
|
// compiler deal with the error.
|
||||||
|
cgo := pkg.haveCgo()
|
||||||
|
for _, info := range goFilesWithUnknownPackage {
|
||||||
|
if err := pkg.addFile(c, info, cgo); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process the other static files.
|
||||||
|
for _, file := range otherFiles {
|
||||||
|
info := otherFileInfo(filepath.Join(dir, file))
|
||||||
|
if err := pkg.addFile(c, info, cgo); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process generated files. Note that generated files may have the same names
|
||||||
|
// as static files. Bazel will use the generated files, but we will look at
|
||||||
|
// the content of static files, assuming they will be the same.
|
||||||
|
regularFileSet := make(map[string]bool)
|
||||||
|
for _, f := range regularFiles {
|
||||||
|
regularFileSet[f] = true
|
||||||
|
}
|
||||||
|
for _, f := range genFiles {
|
||||||
|
if regularFileSet[f] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
info := fileNameInfo(filepath.Join(dir, f))
|
||||||
|
if err := pkg.addFile(c, info, cgo); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate Go rules.
|
||||||
|
if protoName == "" {
|
||||||
|
// Empty proto rules for deletion.
|
||||||
|
_, rs := g.generateProto(pc.Mode, pkg.proto, pkg.importPath)
|
||||||
|
rules = append(rules, rs...)
|
||||||
|
}
|
||||||
|
lib := g.generateLib(pkg, protoEmbed)
|
||||||
|
var libName string
|
||||||
|
if !lib.IsEmpty(goKinds[lib.Kind()]) {
|
||||||
|
libName = lib.Name()
|
||||||
|
}
|
||||||
|
rules = append(rules, lib)
|
||||||
|
rules = append(rules,
|
||||||
|
g.generateBin(pkg, libName),
|
||||||
|
g.generateTest(pkg, libName))
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, r := range rules {
|
||||||
|
if r.IsEmpty(goKinds[r.Kind()]) {
|
||||||
|
empty = append(empty, r)
|
||||||
|
} else {
|
||||||
|
gen = append(gen, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != nil || len(gen) > 0 {
|
||||||
|
gl.goPkgRels[rel] = true
|
||||||
|
} else {
|
||||||
|
for _, sub := range subdirs {
|
||||||
|
if gl.goPkgRels[path.Join(rel, sub)] {
|
||||||
|
gl.goPkgRels[rel] = true
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return empty, gen
|
||||||
|
}
|
||||||
|
|
||||||
|
func filterFiles(files *[]string, pred func(string) bool) {
|
||||||
|
w := 0
|
||||||
|
for r := 0; r < len(*files); r++ {
|
||||||
|
f := (*files)[r]
|
||||||
|
if pred(f) {
|
||||||
|
(*files)[w] = f
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
*files = (*files)[:w]
|
||||||
|
}
|
||||||
|
|
||||||
|
func buildPackages(c *config.Config, dir, rel string, goFiles []string, hasTestdata bool) (packageMap map[string]*goPackage, goFilesWithUnknownPackage []fileInfo) {
|
||||||
|
// Process .go and .proto files first, since these determine the package name.
|
||||||
|
packageMap = make(map[string]*goPackage)
|
||||||
|
for _, f := range goFiles {
|
||||||
|
path := filepath.Join(dir, f)
|
||||||
|
info := goFileInfo(path, rel)
|
||||||
|
if info.packageName == "" {
|
||||||
|
goFilesWithUnknownPackage = append(goFilesWithUnknownPackage, info)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if info.packageName == "documentation" {
|
||||||
|
// go/build ignores this package
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := packageMap[info.packageName]; !ok {
|
||||||
|
packageMap[info.packageName] = &goPackage{
|
||||||
|
name: info.packageName,
|
||||||
|
dir: dir,
|
||||||
|
rel: rel,
|
||||||
|
hasTestdata: hasTestdata,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err := packageMap[info.packageName].addFile(c, info, false); err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return packageMap, goFilesWithUnknownPackage
|
||||||
|
}
|
||||||
|
|
||||||
|
var inferImportPathErrorOnce sync.Once
|
||||||
|
|
||||||
|
// selectPackages selects one Go packages out of the buildable packages found
|
||||||
|
// in a directory. If multiple packages are found, it returns the package
|
||||||
|
// whose name matches the directory if such a package exists.
|
||||||
|
func selectPackage(c *config.Config, dir string, packageMap map[string]*goPackage) (*goPackage, error) {
|
||||||
|
buildablePackages := make(map[string]*goPackage)
|
||||||
|
for name, pkg := range packageMap {
|
||||||
|
if pkg.isBuildable(c) {
|
||||||
|
buildablePackages[name] = pkg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(buildablePackages) == 0 {
|
||||||
|
return nil, &build.NoGoError{Dir: dir}
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(buildablePackages) == 1 {
|
||||||
|
for _, pkg := range buildablePackages {
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if pkg, ok := buildablePackages[defaultPackageName(c, dir)]; ok {
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
err := &build.MultiplePackageError{Dir: dir}
|
||||||
|
for name, pkg := range buildablePackages {
|
||||||
|
// Add the first file for each package for the error message.
|
||||||
|
// Error() method expects these lists to be the same length. File
|
||||||
|
// lists must be non-empty. These lists are only created by
|
||||||
|
// buildPackage for packages with .go files present.
|
||||||
|
err.Packages = append(err.Packages, name)
|
||||||
|
err.Files = append(err.Files, pkg.firstGoFile())
|
||||||
|
}
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func emptyPackage(c *config.Config, dir, rel string) *goPackage {
|
||||||
|
pkg := &goPackage{
|
||||||
|
name: defaultPackageName(c, dir),
|
||||||
|
dir: dir,
|
||||||
|
rel: rel,
|
||||||
|
}
|
||||||
|
pkg.inferImportPath(c)
|
||||||
|
return pkg
|
||||||
|
}
|
||||||
|
|
||||||
|
func defaultPackageName(c *config.Config, rel string) string {
|
||||||
|
gc := getGoConfig(c)
|
||||||
|
return pathtools.RelBaseName(rel, gc.prefix, "")
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDefaultVisibility returns whether oldFile contains a "package" rule with
|
||||||
|
// a "default_visibility" attribute. Rules generated by Gazelle should not
|
||||||
|
// have their own visibility attributes if this is the case.
|
||||||
|
func hasDefaultVisibility(oldFile *rule.File) bool {
|
||||||
|
for _, r := range oldFile.Rules {
|
||||||
|
if r.Kind() == "package" && r.Attr("default_visibility") != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkInternalVisibility overrides the given visibility if the package is
|
||||||
|
// internal.
|
||||||
|
func checkInternalVisibility(rel, visibility string) string {
|
||||||
|
if i := strings.LastIndex(rel, "/internal/"); i >= 0 {
|
||||||
|
visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i])
|
||||||
|
} else if strings.HasPrefix(rel, "internal/") {
|
||||||
|
visibility = "//:__subpackages__"
|
||||||
|
}
|
||||||
|
return visibility
|
||||||
|
}
|
||||||
|
|
||||||
|
type generator struct {
|
||||||
|
c *config.Config
|
||||||
|
rel string
|
||||||
|
shouldSetVisibility bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newGenerator(c *config.Config, f *rule.File, rel string) *generator {
|
||||||
|
shouldSetVisibility := f == nil || !hasDefaultVisibility(f)
|
||||||
|
return &generator{c: c, rel: rel, shouldSetVisibility: shouldSetVisibility}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) generateProto(mode proto.Mode, target protoTarget, importPath string) (string, []*rule.Rule) {
|
||||||
|
if !mode.ShouldGenerateRules() && mode != proto.LegacyMode {
|
||||||
|
// Don't create or delete proto rules in this mode. Any existing rules
|
||||||
|
// are likely hand-written.
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
|
||||||
|
filegroupName := config.DefaultProtosName
|
||||||
|
protoName := target.name
|
||||||
|
if protoName == "" {
|
||||||
|
importPath := inferImportPath(getGoConfig(g.c), g.rel)
|
||||||
|
protoName = proto.RuleName(importPath)
|
||||||
|
}
|
||||||
|
goProtoName := strings.TrimSuffix(protoName, "_proto") + "_go_proto"
|
||||||
|
visibility := []string{checkInternalVisibility(g.rel, "//visibility:public")}
|
||||||
|
|
||||||
|
if mode == proto.LegacyMode {
|
||||||
|
filegroup := rule.NewRule("filegroup", filegroupName)
|
||||||
|
if target.sources.isEmpty() {
|
||||||
|
return "", []*rule.Rule{filegroup}
|
||||||
|
}
|
||||||
|
filegroup.SetAttr("srcs", target.sources.build())
|
||||||
|
if g.shouldSetVisibility {
|
||||||
|
filegroup.SetAttr("visibility", visibility)
|
||||||
|
}
|
||||||
|
return "", []*rule.Rule{filegroup}
|
||||||
|
}
|
||||||
|
|
||||||
|
if target.sources.isEmpty() {
|
||||||
|
return "", []*rule.Rule{
|
||||||
|
rule.NewRule("filegroup", filegroupName),
|
||||||
|
rule.NewRule("go_proto_library", goProtoName),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
goProtoLibrary := rule.NewRule("go_proto_library", goProtoName)
|
||||||
|
goProtoLibrary.SetAttr("proto", ":"+protoName)
|
||||||
|
g.setImportAttrs(goProtoLibrary, importPath)
|
||||||
|
if target.hasServices {
|
||||||
|
goProtoLibrary.SetAttr("compilers", []string{"@io_bazel_rules_go//proto:go_grpc"})
|
||||||
|
}
|
||||||
|
if g.shouldSetVisibility {
|
||||||
|
goProtoLibrary.SetAttr("visibility", visibility)
|
||||||
|
}
|
||||||
|
goProtoLibrary.SetPrivateAttr(config.GazelleImportsKey, target.imports.build())
|
||||||
|
return goProtoName, []*rule.Rule{goProtoLibrary}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) generateLib(pkg *goPackage, embed string) *rule.Rule {
|
||||||
|
goLibrary := rule.NewRule("go_library", config.DefaultLibName)
|
||||||
|
if !pkg.library.sources.hasGo() && embed == "" {
|
||||||
|
return goLibrary // empty
|
||||||
|
}
|
||||||
|
var visibility string
|
||||||
|
if pkg.isCommand() {
|
||||||
|
// Libraries made for a go_binary should not be exposed to the public.
|
||||||
|
visibility = "//visibility:private"
|
||||||
|
} else {
|
||||||
|
visibility = checkInternalVisibility(pkg.rel, "//visibility:public")
|
||||||
|
}
|
||||||
|
g.setCommonAttrs(goLibrary, pkg.rel, visibility, pkg.library, embed)
|
||||||
|
g.setImportAttrs(goLibrary, pkg.importPath)
|
||||||
|
return goLibrary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) generateBin(pkg *goPackage, library string) *rule.Rule {
|
||||||
|
name := pathtools.RelBaseName(pkg.rel, getGoConfig(g.c).prefix, g.c.RepoRoot)
|
||||||
|
goBinary := rule.NewRule("go_binary", name)
|
||||||
|
if !pkg.isCommand() || pkg.binary.sources.isEmpty() && library == "" {
|
||||||
|
return goBinary // empty
|
||||||
|
}
|
||||||
|
visibility := checkInternalVisibility(pkg.rel, "//visibility:public")
|
||||||
|
g.setCommonAttrs(goBinary, pkg.rel, visibility, pkg.binary, library)
|
||||||
|
return goBinary
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) generateTest(pkg *goPackage, library string) *rule.Rule {
|
||||||
|
goTest := rule.NewRule("go_test", config.DefaultTestName)
|
||||||
|
if !pkg.test.sources.hasGo() {
|
||||||
|
return goTest // empty
|
||||||
|
}
|
||||||
|
g.setCommonAttrs(goTest, pkg.rel, "", pkg.test, library)
|
||||||
|
if pkg.hasTestdata {
|
||||||
|
goTest.SetAttr("data", rule.GlobValue{Patterns: []string{"testdata/**"}})
|
||||||
|
}
|
||||||
|
return goTest
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) setCommonAttrs(r *rule.Rule, pkgRel, visibility string, target goTarget, embed string) {
|
||||||
|
if !target.sources.isEmpty() {
|
||||||
|
r.SetAttr("srcs", target.sources.buildFlat())
|
||||||
|
}
|
||||||
|
if target.cgo {
|
||||||
|
r.SetAttr("cgo", true)
|
||||||
|
}
|
||||||
|
if !target.clinkopts.isEmpty() {
|
||||||
|
r.SetAttr("clinkopts", g.options(target.clinkopts.build(), pkgRel))
|
||||||
|
}
|
||||||
|
if !target.copts.isEmpty() {
|
||||||
|
r.SetAttr("copts", g.options(target.copts.build(), pkgRel))
|
||||||
|
}
|
||||||
|
if g.shouldSetVisibility && visibility != "" {
|
||||||
|
r.SetAttr("visibility", []string{visibility})
|
||||||
|
}
|
||||||
|
if embed != "" {
|
||||||
|
r.SetAttr("embed", []string{":" + embed})
|
||||||
|
}
|
||||||
|
r.SetPrivateAttr(config.GazelleImportsKey, target.imports.build())
|
||||||
|
}
|
||||||
|
|
||||||
|
func (g *generator) setImportAttrs(r *rule.Rule, importPath string) {
|
||||||
|
r.SetAttr("importpath", importPath)
|
||||||
|
goConf := getGoConfig(g.c)
|
||||||
|
if goConf.importMapPrefix != "" {
|
||||||
|
fromPrefixRel := pathtools.TrimPrefix(g.rel, goConf.importMapPrefixRel)
|
||||||
|
importMap := path.Join(goConf.importMapPrefix, fromPrefixRel)
|
||||||
|
if importMap != importPath {
|
||||||
|
r.SetAttr("importmap", importMap)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// shortOptPrefixes are strings that come at the beginning of an option
|
||||||
|
// argument that includes a path, e.g., -Ifoo/bar.
|
||||||
|
shortOptPrefixes = []string{"-I", "-L", "-F"}
|
||||||
|
|
||||||
|
// longOptPrefixes are separate arguments that come before a path argument,
|
||||||
|
// e.g., -iquote foo/bar.
|
||||||
|
longOptPrefixes = []string{"-I", "-L", "-F", "-iquote", "-isystem"}
|
||||||
|
)
|
||||||
|
|
||||||
|
// options transforms package-relative paths in cgo options into repository-
|
||||||
|
// root-relative paths that Bazel can understand. For example, if a cgo file
|
||||||
|
// in //foo declares an include flag in its copts: "-Ibar", this method
|
||||||
|
// will transform that flag into "-Ifoo/bar".
|
||||||
|
func (g *generator) options(opts rule.PlatformStrings, pkgRel string) rule.PlatformStrings {
|
||||||
|
fixPath := func(opt string) string {
|
||||||
|
if strings.HasPrefix(opt, "/") {
|
||||||
|
return opt
|
||||||
|
}
|
||||||
|
return path.Clean(path.Join(pkgRel, opt))
|
||||||
|
}
|
||||||
|
|
||||||
|
fixGroups := func(groups []string) ([]string, error) {
|
||||||
|
fixedGroups := make([]string, len(groups))
|
||||||
|
for i, group := range groups {
|
||||||
|
opts := strings.Split(group, optSeparator)
|
||||||
|
fixedOpts := make([]string, len(opts))
|
||||||
|
isPath := false
|
||||||
|
for j, opt := range opts {
|
||||||
|
if isPath {
|
||||||
|
opt = fixPath(opt)
|
||||||
|
isPath = false
|
||||||
|
goto next
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, short := range shortOptPrefixes {
|
||||||
|
if strings.HasPrefix(opt, short) && len(opt) > len(short) {
|
||||||
|
opt = short + fixPath(opt[len(short):])
|
||||||
|
goto next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, long := range longOptPrefixes {
|
||||||
|
if opt == long {
|
||||||
|
isPath = true
|
||||||
|
goto next
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
next:
|
||||||
|
fixedOpts[j] = escapeOption(opt)
|
||||||
|
}
|
||||||
|
fixedGroups[i] = strings.Join(fixedOpts, " ")
|
||||||
|
}
|
||||||
|
|
||||||
|
return fixedGroups, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
opts, errs := opts.MapSlice(fixGroups)
|
||||||
|
if errs != nil {
|
||||||
|
log.Panicf("unexpected error when transforming options with pkg %q: %v", pkgRel, errs)
|
||||||
|
}
|
||||||
|
return opts
|
||||||
|
}
|
||||||
|
|
||||||
|
func escapeOption(opt string) string {
|
||||||
|
return strings.NewReplacer(
|
||||||
|
`\`, `\\`,
|
||||||
|
`'`, `\'`,
|
||||||
|
`"`, `\"`,
|
||||||
|
` `, `\ `,
|
||||||
|
"\t", "\\\t",
|
||||||
|
"\n", "\\\n",
|
||||||
|
"\r", "\\\r",
|
||||||
|
).Replace(opt)
|
||||||
|
}
|
||||||
147
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/kinds.go
generated
vendored
Normal file
147
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/kinds.go
generated
vendored
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
|
||||||
|
var goKinds = map[string]rule.KindInfo{
|
||||||
|
"filegroup": {
|
||||||
|
NonEmptyAttrs: map[string]bool{"srcs": true},
|
||||||
|
MergeableAttrs: map[string]bool{"srcs": true},
|
||||||
|
},
|
||||||
|
"go_binary": {
|
||||||
|
MatchAny: true,
|
||||||
|
NonEmptyAttrs: map[string]bool{
|
||||||
|
"deps": true,
|
||||||
|
"embed": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
SubstituteAttrs: map[string]bool{"embed": true},
|
||||||
|
MergeableAttrs: map[string]bool{
|
||||||
|
"cgo": true,
|
||||||
|
"clinkopts": true,
|
||||||
|
"copts": true,
|
||||||
|
"embed": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
ResolveAttrs: map[string]bool{"deps": true},
|
||||||
|
},
|
||||||
|
"go_library": {
|
||||||
|
MatchAttrs: []string{"importpath"},
|
||||||
|
NonEmptyAttrs: map[string]bool{
|
||||||
|
"deps": true,
|
||||||
|
"embed": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
SubstituteAttrs: map[string]bool{
|
||||||
|
"embed": true,
|
||||||
|
},
|
||||||
|
MergeableAttrs: map[string]bool{
|
||||||
|
"cgo": true,
|
||||||
|
"clinkopts": true,
|
||||||
|
"copts": true,
|
||||||
|
"embed": true,
|
||||||
|
"importmap": true,
|
||||||
|
"importpath": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
ResolveAttrs: map[string]bool{"deps": true},
|
||||||
|
},
|
||||||
|
"go_proto_library": {
|
||||||
|
MatchAttrs: []string{"importpath"},
|
||||||
|
NonEmptyAttrs: map[string]bool{
|
||||||
|
"deps": true,
|
||||||
|
"embed": true,
|
||||||
|
"proto": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
SubstituteAttrs: map[string]bool{"proto": true},
|
||||||
|
MergeableAttrs: map[string]bool{
|
||||||
|
"srcs": true,
|
||||||
|
"importpath": true,
|
||||||
|
"importmap": true,
|
||||||
|
"cgo": true,
|
||||||
|
"clinkopts": true,
|
||||||
|
"copts": true,
|
||||||
|
"embed": true,
|
||||||
|
"proto": true,
|
||||||
|
},
|
||||||
|
ResolveAttrs: map[string]bool{"deps": true},
|
||||||
|
},
|
||||||
|
"go_repository": {
|
||||||
|
MatchAttrs: []string{"importpath"},
|
||||||
|
NonEmptyAttrs: nil, // never empty
|
||||||
|
MergeableAttrs: map[string]bool{
|
||||||
|
"commit": true,
|
||||||
|
"importpath": true,
|
||||||
|
"remote": true,
|
||||||
|
"sha256": true,
|
||||||
|
"strip_prefix": true,
|
||||||
|
"tag": true,
|
||||||
|
"type": true,
|
||||||
|
"urls": true,
|
||||||
|
"vcs": true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"go_test": {
|
||||||
|
NonEmptyAttrs: map[string]bool{
|
||||||
|
"deps": true,
|
||||||
|
"embed": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
MergeableAttrs: map[string]bool{
|
||||||
|
"cgo": true,
|
||||||
|
"clinkopts": true,
|
||||||
|
"copts": true,
|
||||||
|
"embed": true,
|
||||||
|
"srcs": true,
|
||||||
|
},
|
||||||
|
ResolveAttrs: map[string]bool{"deps": true},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var goLoads = []rule.LoadInfo{
|
||||||
|
{
|
||||||
|
Name: "@io_bazel_rules_go//go:def.bzl",
|
||||||
|
Symbols: []string{
|
||||||
|
"cgo_library",
|
||||||
|
"go_binary",
|
||||||
|
"go_library",
|
||||||
|
"go_prefix",
|
||||||
|
"go_repository",
|
||||||
|
"go_test",
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Name: "@io_bazel_rules_go//proto:def.bzl",
|
||||||
|
Symbols: []string{
|
||||||
|
"go_grpc_library",
|
||||||
|
"go_proto_library",
|
||||||
|
},
|
||||||
|
}, {
|
||||||
|
Name: "@bazel_gazelle//:deps.bzl",
|
||||||
|
Symbols: []string{
|
||||||
|
"go_repository",
|
||||||
|
},
|
||||||
|
After: []string{
|
||||||
|
"go_rules_dependencies",
|
||||||
|
"go_register_toolchains",
|
||||||
|
"gazelle_dependencies",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) Kinds() map[string]rule.KindInfo { return goKinds }
|
||||||
|
func (_ *goLang) Loads() []rule.LoadInfo { return goLoads }
|
||||||
136
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/known_go_imports.go
generated
vendored
Normal file
136
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/known_go_imports.go
generated
vendored
Normal file
@@ -0,0 +1,136 @@
|
|||||||
|
|
||||||
|
// Generated by internal/language/proto/gen/gen_known_imports.go
|
||||||
|
// From internal/language/proto/proto.csv
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
|
||||||
|
var knownGoProtoImports = map[string]label.Label{
|
||||||
|
|
||||||
|
"github.com/golang/protobuf/ptypes/any": label.New("io_bazel_rules_go", "proto/wkt", "any_go_proto"),
|
||||||
|
"google.golang.org/genproto/protobuf/api": label.New("io_bazel_rules_go", "proto/wkt", "api_go_proto"),
|
||||||
|
"github.com/golang/protobuf/protoc-gen-go/plugin": label.New("io_bazel_rules_go", "proto/wkt", "compiler_plugin_go_proto"),
|
||||||
|
"github.com/golang/protobuf/protoc-gen-go/descriptor": label.New("io_bazel_rules_go", "proto/wkt", "descriptor_go_proto"),
|
||||||
|
"github.com/golang/protobuf/ptypes/duration": label.New("io_bazel_rules_go", "proto/wkt", "duration_go_proto"),
|
||||||
|
"github.com/golang/protobuf/ptypes/empty": label.New("io_bazel_rules_go", "proto/wkt", "empty_go_proto"),
|
||||||
|
"google.golang.org/genproto/protobuf/field_mask": label.New("io_bazel_rules_go", "proto/wkt", "field_mask_go_proto"),
|
||||||
|
"google.golang.org/genproto/protobuf/source_context": label.New("io_bazel_rules_go", "proto/wkt", "source_context_go_proto"),
|
||||||
|
"github.com/golang/protobuf/ptypes/struct": label.New("io_bazel_rules_go", "proto/wkt", "struct_go_proto"),
|
||||||
|
"github.com/golang/protobuf/ptypes/timestamp": label.New("io_bazel_rules_go", "proto/wkt", "timestamp_go_proto"),
|
||||||
|
"google.golang.org/genproto/protobuf/ptype": label.New("io_bazel_rules_go", "proto/wkt", "type_go_proto"),
|
||||||
|
"github.com/golang/protobuf/ptypes/wrappers": label.New("io_bazel_rules_go", "proto/wkt", "wrappers_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/home/graph/v1": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/genomics/v1": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/genomics/v1alpha2": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bigtable/v1": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bigtable/admin/v2": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bigtable/admin/table/v1": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bigtable/v2": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/privacy/dlp/v2": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/watcher/v1": label.New("go_googleapis", "google/watcher/v1", "watcher_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/firestore/admin/v1beta1": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/firestore/v1beta1": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/example/library/v1": label.New("go_googleapis", "google/example/library/v1", "library_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/appengine/v1": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/appengine/legacy": label.New("go_googleapis", "google/appengine/legacy", "legacy_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/appengine/logging/v1": label.New("go_googleapis", "google/appengine/logging/v1", "logging_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/storagetransfer/v1": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/longrunning": label.New("go_googleapis", "google/longrunning", "longrunning_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/container/v1": label.New("go_googleapis", "google/container/v1", "container_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/container/v1beta1": label.New("go_googleapis", "google/container/v1beta1", "container_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/container/v1alpha1": label.New("go_googleapis", "google/container/v1alpha1", "container_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/datastore/v1beta3": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/datastore/v1": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/datastore/admin/v1": label.New("go_googleapis", "google/datastore/admin/v1", "admin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/datastore/admin/v1beta1": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/bytestream": label.New("go_googleapis", "google/bytestream", "bytestream_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/iam/v1": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/iam/v1/logging": label.New("go_googleapis", "google/iam/v1/logging", "logging_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/iam/admin/v1": label.New("go_googleapis", "google/iam/admin/v1", "admin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/money": label.New("go_googleapis", "google/type", "money_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/latlng": label.New("go_googleapis", "google/type", "latlng_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/color": label.New("go_googleapis", "google/type", "color_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/timeofday": label.New("go_googleapis", "google/type", "timeofday_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/date": label.New("go_googleapis", "google/type", "date_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/dayofweek": label.New("go_googleapis", "google/type", "dayofweek_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/type/postaladdress": label.New("go_googleapis", "google/type", "postaladdress_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/resultstore/v2": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/source/v1": label.New("go_googleapis", "google/devtools/source/v1", "source_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/cloudbuild/v1": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/sourcerepo/v1": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/cloudtrace/v1": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/cloudtrace/v2": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/build/v1": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/devtools/clouddebugger/v2": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/resourcemanager/v2": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/kms/v1": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/tasks/v2beta2": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/oslogin/v1": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/oslogin/common": label.New("go_googleapis", "google/cloud/oslogin/common", "common_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/oslogin/v1beta": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/dialogflow/v2": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/redis/v1beta1": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/location": label.New("go_googleapis", "google/cloud/location", "location_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/language/v1": label.New("go_googleapis", "google/cloud/language/v1", "language_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/language/v1beta2": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/language/v1beta1": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/vision/v1": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/speech/v1": label.New("go_googleapis", "google/cloud/speech/v1", "speech_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/speech/v1beta1": label.New("go_googleapis", "google/cloud/speech/v1beta1", "speech_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/iot/v1": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/audit": label.New("go_googleapis", "google/cloud/audit", "audit_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/support/common": label.New("go_googleapis", "google/cloud/support", "common_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/support/v1alpha1": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/ml/v1": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/texttospeech/v1": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/functions/v1beta2": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/billing/v1": label.New("go_googleapis", "google/cloud/billing/v1", "billing_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/dataproc/v1": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/serviceconfig": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/annotations": label.New("go_googleapis", "google/api", "annotations_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/configchange": label.New("go_googleapis", "google/api", "configchange_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/distribution": label.New("go_googleapis", "google/api", "distribution_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/monitoredres": label.New("go_googleapis", "google/api", "monitoredres_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/metric": label.New("go_googleapis", "google/api", "metric_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/label": label.New("go_googleapis", "google/api", "label_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/httpbody": label.New("go_googleapis", "google/api", "httpbody_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api": label.New("go_googleapis", "google/api/experimental", "api_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/servicemanagement/v1": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/api/servicecontrol/v1": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/pubsub/v1": label.New("go_googleapis", "google/pubsub/v1", "pubsub_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/pubsub/v1beta2": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/spanner/v1": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/spanner/admin/database/v1": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/spanner/admin/instance/v1": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/monitoring/v3": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/rpc/code": label.New("go_googleapis", "google/rpc", "code_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/rpc/status": label.New("go_googleapis", "google/rpc", "status_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/rpc/errdetails": label.New("go_googleapis", "google/rpc", "errdetails_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/streetview/publish/v1": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/logging/v2": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"),
|
||||||
|
"google.golang.org/genproto/googleapis/logging/type": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"),
|
||||||
|
}
|
||||||
300
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/known_proto_imports.go
generated
vendored
Normal file
300
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/known_proto_imports.go
generated
vendored
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
|
||||||
|
// Generated by internal/language/proto/gen/gen_known_imports.go
|
||||||
|
// From internal/language/proto/proto.csv
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
|
||||||
|
var knownProtoImports = map[string]label.Label{
|
||||||
|
|
||||||
|
"google/protobuf/any.proto": label.New("io_bazel_rules_go", "proto/wkt", "any_go_proto"),
|
||||||
|
"google/protobuf/api.proto": label.New("io_bazel_rules_go", "proto/wkt", "api_go_proto"),
|
||||||
|
"google/protobuf/compiler/plugin.proto": label.New("io_bazel_rules_go", "proto/wkt", "compiler_plugin_go_proto"),
|
||||||
|
"google/protobuf/descriptor.proto": label.New("io_bazel_rules_go", "proto/wkt", "descriptor_go_proto"),
|
||||||
|
"google/protobuf/duration.proto": label.New("io_bazel_rules_go", "proto/wkt", "duration_go_proto"),
|
||||||
|
"google/protobuf/empty.proto": label.New("io_bazel_rules_go", "proto/wkt", "empty_go_proto"),
|
||||||
|
"google/protobuf/field_mask.proto": label.New("io_bazel_rules_go", "proto/wkt", "field_mask_go_proto"),
|
||||||
|
"google/protobuf/source_context.proto": label.New("io_bazel_rules_go", "proto/wkt", "source_context_go_proto"),
|
||||||
|
"google/protobuf/struct.proto": label.New("io_bazel_rules_go", "proto/wkt", "struct_go_proto"),
|
||||||
|
"google/protobuf/timestamp.proto": label.New("io_bazel_rules_go", "proto/wkt", "timestamp_go_proto"),
|
||||||
|
"google/protobuf/type.proto": label.New("io_bazel_rules_go", "proto/wkt", "type_go_proto"),
|
||||||
|
"google/protobuf/wrappers.proto": label.New("io_bazel_rules_go", "proto/wkt", "wrappers_go_proto"),
|
||||||
|
"google/assistant/embedded/v1alpha2/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_go_proto"),
|
||||||
|
"google/assistant/embedded/v1alpha1/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_go_proto"),
|
||||||
|
"google/home/graph/v1/device.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"),
|
||||||
|
"google/home/graph/v1/homegraph.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_go_proto"),
|
||||||
|
"google/genomics/v1/operations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/variants.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/position.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/references.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/cigar.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/datasets.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/readalignment.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/annotations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/reads.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/readgroup.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/readgroupset.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1/range.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_go_proto"),
|
||||||
|
"google/genomics/v1alpha2/pipelines.proto": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_go_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_service_messages.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_service.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_data.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_go_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_go_proto"),
|
||||||
|
"google/bigtable/admin/v2/bigtable_instance_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google/bigtable/admin/v2/instance.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google/bigtable/admin/v2/table.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google/bigtable/admin/v2/bigtable_table_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google/bigtable/admin/v2/common.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_go_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_service.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_data.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_go_proto"),
|
||||||
|
"google/bigtable/v2/bigtable.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"),
|
||||||
|
"google/bigtable/v2/data.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_go_proto"),
|
||||||
|
"google/privacy/dlp/v2/storage.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"),
|
||||||
|
"google/privacy/dlp/v2/dlp.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_go_proto"),
|
||||||
|
"google/watcher/v1/watch.proto": label.New("go_googleapis", "google/watcher/v1", "watcher_go_proto"),
|
||||||
|
"google/firestore/admin/v1beta1/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"),
|
||||||
|
"google/firestore/admin/v1beta1/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_go_proto"),
|
||||||
|
"google/firestore/v1beta1/write.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google/firestore/v1beta1/document.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google/firestore/v1beta1/firestore.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google/firestore/v1beta1/query.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google/firestore/v1beta1/common.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_go_proto"),
|
||||||
|
"google/example/library/v1/library.proto": label.New("go_googleapis", "google/example/library/v1", "library_go_proto"),
|
||||||
|
"google/appengine/v1/instance.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/audit_data.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/appengine.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/application.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/operation.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/app_yaml.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/location.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/service.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/deploy.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/v1/version.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_go_proto"),
|
||||||
|
"google/appengine/legacy/audit_data.proto": label.New("go_googleapis", "google/appengine/legacy", "legacy_go_proto"),
|
||||||
|
"google/appengine/logging/v1/request_log.proto": label.New("go_googleapis", "google/appengine/logging/v1", "logging_go_proto"),
|
||||||
|
"google/storagetransfer/v1/transfer.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"),
|
||||||
|
"google/storagetransfer/v1/transfer_types.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_go_proto"),
|
||||||
|
"google/longrunning/operations.proto": label.New("go_googleapis", "google/longrunning", "longrunning_go_proto"),
|
||||||
|
"google/container/v1/cluster_service.proto": label.New("go_googleapis", "google/container/v1", "container_go_proto"),
|
||||||
|
"google/container/v1beta1/cluster_service.proto": label.New("go_googleapis", "google/container/v1beta1", "container_go_proto"),
|
||||||
|
"google/container/v1alpha1/cluster_service.proto": label.New("go_googleapis", "google/container/v1alpha1", "container_go_proto"),
|
||||||
|
"google/datastore/v1beta3/datastore.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"),
|
||||||
|
"google/datastore/v1beta3/query.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"),
|
||||||
|
"google/datastore/v1beta3/entity.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_go_proto"),
|
||||||
|
"google/datastore/v1/datastore.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"),
|
||||||
|
"google/datastore/v1/query.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"),
|
||||||
|
"google/datastore/v1/entity.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_go_proto"),
|
||||||
|
"google/datastore/admin/v1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_go_proto"),
|
||||||
|
"google/datastore/admin/v1beta1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_go_proto"),
|
||||||
|
"google/bytestream/bytestream.proto": label.New("go_googleapis", "google/bytestream", "bytestream_go_proto"),
|
||||||
|
"google/iam/v1/iam_policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"),
|
||||||
|
"google/iam/v1/policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_go_proto"),
|
||||||
|
"google/iam/v1/logging/audit_data.proto": label.New("go_googleapis", "google/iam/v1/logging", "logging_go_proto"),
|
||||||
|
"google/iam/admin/v1/iam.proto": label.New("go_googleapis", "google/iam/admin/v1", "admin_go_proto"),
|
||||||
|
"google/type/money.proto": label.New("go_googleapis", "google/type", "money_go_proto"),
|
||||||
|
"google/type/latlng.proto": label.New("go_googleapis", "google/type", "latlng_go_proto"),
|
||||||
|
"google/type/color.proto": label.New("go_googleapis", "google/type", "color_go_proto"),
|
||||||
|
"google/type/timeofday.proto": label.New("go_googleapis", "google/type", "timeofday_go_proto"),
|
||||||
|
"google/type/date.proto": label.New("go_googleapis", "google/type", "date_go_proto"),
|
||||||
|
"google/type/dayofweek.proto": label.New("go_googleapis", "google/type", "dayofweek_go_proto"),
|
||||||
|
"google/type/postal_address.proto": label.New("go_googleapis", "google/type", "postaladdress_go_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/error_group_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/common.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/file.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/resultstore_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/configuration.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/action.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/resultstore_file_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/test_suite.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/file_set.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/coverage.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/coverage_summary.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/configured_target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/invocation.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/resultstore/v2/common.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_go_proto"),
|
||||||
|
"google/devtools/source/v1/source_context.proto": label.New("go_googleapis", "google/devtools/source/v1", "source_go_proto"),
|
||||||
|
"google/devtools/remoteexecution/v1test/remote_execution.proto": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_go_proto"),
|
||||||
|
"google/devtools/cloudbuild/v1/cloudbuild.proto": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_go_proto"),
|
||||||
|
"google/devtools/sourcerepo/v1/sourcerepo.proto": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_go_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/worker.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/tasks.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/bots.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/command.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_go_proto"),
|
||||||
|
"google/devtools/cloudtrace/v1/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_go_proto"),
|
||||||
|
"google/devtools/cloudtrace/v2/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"),
|
||||||
|
"google/devtools/cloudtrace/v2/tracing.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_go_proto"),
|
||||||
|
"google/devtools/cloudprofiler/v2/profiler.proto": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/source_context.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/image_basis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_go_proto"),
|
||||||
|
"google/devtools/build/v1/build_events.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"),
|
||||||
|
"google/devtools/build/v1/build_status.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"),
|
||||||
|
"google/devtools/build/v1/publish_build_event.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_go_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/debugger.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/data.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/controller.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_go_proto"),
|
||||||
|
"google/cloud/resourcemanager/v2/folders.proto": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_go_proto"),
|
||||||
|
"google/cloud/kms/v1/resources.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"),
|
||||||
|
"google/cloud/kms/v1/service.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_go_proto"),
|
||||||
|
"google/cloud/runtimeconfig/v1beta1/resources.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"),
|
||||||
|
"google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_go_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_go_proto"),
|
||||||
|
"google/cloud/oslogin/v1/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_go_proto"),
|
||||||
|
"google/cloud/oslogin/v1alpha/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_go_proto"),
|
||||||
|
"google/cloud/oslogin/common/common.proto": label.New("go_googleapis", "google/cloud/oslogin/common", "common_go_proto"),
|
||||||
|
"google/cloud/oslogin/v1beta/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_go_proto"),
|
||||||
|
"google/cloud/redis/v1beta1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_go_proto"),
|
||||||
|
"google/cloud/location/locations.proto": label.New("go_googleapis", "google/cloud/location", "location_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/scan_config.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/crawled_url.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/scan_run.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding_addon.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_go_proto"),
|
||||||
|
"google/cloud/language/v1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1", "language_go_proto"),
|
||||||
|
"google/cloud/language/v1beta2/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_go_proto"),
|
||||||
|
"google/cloud/language/v1beta1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_go_proto"),
|
||||||
|
"google/cloud/bigquery/datatransfer/v1/transfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"),
|
||||||
|
"google/cloud/bigquery/datatransfer/v1/datatransfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_go_proto"),
|
||||||
|
"google/cloud/bigquery/logging/v1/audit_data.proto": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_go_proto"),
|
||||||
|
"google/cloud/vision/v1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_go_proto"),
|
||||||
|
"google/cloud/speech/v1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1", "speech_go_proto"),
|
||||||
|
"google/cloud/speech/v1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1beta1", "speech_go_proto"),
|
||||||
|
"google/cloud/speech/v1p1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_go_proto"),
|
||||||
|
"google/cloud/iot/v1/device_manager.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"),
|
||||||
|
"google/cloud/iot/v1/resources.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_go_proto"),
|
||||||
|
"google/cloud/videointelligence/v1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_go_proto"),
|
||||||
|
"google/cloud/videointelligence/v1beta2/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_go_proto"),
|
||||||
|
"google/cloud/videointelligence/v1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_go_proto"),
|
||||||
|
"google/cloud/videointelligence/v1p1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_go_proto"),
|
||||||
|
"google/cloud/audit/audit_log.proto": label.New("go_googleapis", "google/cloud/audit", "audit_go_proto"),
|
||||||
|
"google/cloud/support/common.proto": label.New("go_googleapis", "google/cloud/support", "common_go_proto"),
|
||||||
|
"google/cloud/support/v1alpha1/cloud_support.proto": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_go_proto"),
|
||||||
|
"google/cloud/ml/v1/operation_metadata.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google/cloud/ml/v1/job_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google/cloud/ml/v1/prediction_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google/cloud/ml/v1/model_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google/cloud/ml/v1/project_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_go_proto"),
|
||||||
|
"google/cloud/texttospeech/v1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_go_proto"),
|
||||||
|
"google/cloud/texttospeech/v1beta1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_go_proto"),
|
||||||
|
"google/cloud/functions/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"),
|
||||||
|
"google/cloud/functions/v1beta2/functions.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_go_proto"),
|
||||||
|
"google/cloud/billing/v1/cloud_billing.proto": label.New("go_googleapis", "google/cloud/billing/v1", "billing_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/workflow_templates.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/shared.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_go_proto"),
|
||||||
|
"google/api/context.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/http.proto": label.New("go_googleapis", "google/api", "annotations_go_proto"),
|
||||||
|
"google/api/config_change.proto": label.New("go_googleapis", "google/api", "configchange_go_proto"),
|
||||||
|
"google/api/system_parameter.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/monitoring.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/distribution.proto": label.New("go_googleapis", "google/api", "distribution_go_proto"),
|
||||||
|
"google/api/endpoint.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/usage.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/monitored_resource.proto": label.New("go_googleapis", "google/api", "monitoredres_go_proto"),
|
||||||
|
"google/api/annotations.proto": label.New("go_googleapis", "google/api", "annotations_go_proto"),
|
||||||
|
"google/api/control.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/metric.proto": label.New("go_googleapis", "google/api", "metric_go_proto"),
|
||||||
|
"google/api/label.proto": label.New("go_googleapis", "google/api", "label_go_proto"),
|
||||||
|
"google/api/consumer.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/log.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/billing.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/service.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/logging.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/documentation.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/quota.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/auth.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/backend.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/source_info.proto": label.New("go_googleapis", "google/api", "serviceconfig_go_proto"),
|
||||||
|
"google/api/httpbody.proto": label.New("go_googleapis", "google/api", "httpbody_go_proto"),
|
||||||
|
"google/api/experimental/authorization_config.proto": label.New("go_googleapis", "google/api/experimental", "api_go_proto"),
|
||||||
|
"google/api/experimental/experimental.proto": label.New("go_googleapis", "google/api/experimental", "api_go_proto"),
|
||||||
|
"google/api/servicemanagement/v1/servicemanager.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"),
|
||||||
|
"google/api/servicemanagement/v1/resources.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/quota_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/distribution.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/check_error.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/operation.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/metric_value.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/log_entry.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/api/servicecontrol/v1/service_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_go_proto"),
|
||||||
|
"google/pubsub/v1/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1", "pubsub_go_proto"),
|
||||||
|
"google/pubsub/v1beta2/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_go_proto"),
|
||||||
|
"google/spanner/v1/mutation.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/spanner.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/transaction.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/keys.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/type.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/query_plan.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/v1/result_set.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_go_proto"),
|
||||||
|
"google/spanner/admin/database/v1/spanner_database_admin.proto": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_go_proto"),
|
||||||
|
"google/spanner/admin/instance/v1/spanner_instance_admin.proto": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_go_proto"),
|
||||||
|
"google/monitoring/v3/group.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/mutation_record.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/notification.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/alert_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/uptime_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/group_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/alert.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/uptime.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/metric.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/notification_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/metric_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/monitoring/v3/common.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_go_proto"),
|
||||||
|
"google/rpc/code.proto": label.New("go_googleapis", "google/rpc", "code_go_proto"),
|
||||||
|
"google/rpc/status.proto": label.New("go_googleapis", "google/rpc", "status_go_proto"),
|
||||||
|
"google/rpc/error_details.proto": label.New("go_googleapis", "google/rpc", "errdetails_go_proto"),
|
||||||
|
"google/streetview/publish/v1/resources.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"),
|
||||||
|
"google/streetview/publish/v1/rpcmessages.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"),
|
||||||
|
"google/streetview/publish/v1/streetview_publish.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_go_proto"),
|
||||||
|
"google/logging/v2/logging_metrics.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"),
|
||||||
|
"google/logging/v2/logging_config.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"),
|
||||||
|
"google/logging/v2/log_entry.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"),
|
||||||
|
"google/logging/v2/logging.proto": label.New("go_googleapis", "google/logging/v2", "logging_go_proto"),
|
||||||
|
"google/logging/type/log_severity.proto": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"),
|
||||||
|
"google/logging/type/http_request.proto": label.New("go_googleapis", "google/logging/type", "ltype_go_proto"),
|
||||||
|
}
|
||||||
71
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/lang.go
generated
vendored
Normal file
71
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/lang.go
generated
vendored
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Package golang provides support for Go and Go proto rules. It generates
|
||||||
|
// go_library, go_binary, go_test, and go_proto_library rules.
|
||||||
|
//
|
||||||
|
// Configuration
|
||||||
|
//
|
||||||
|
// Go rules support the flags -build_tags, -go_prefix, and -external.
|
||||||
|
// They also support the directives # gazelle:build_tags, # gazelle:prefix,
|
||||||
|
// and # gazelle:importmap_prefix. See
|
||||||
|
// https://github.com/bazelbuild/bazel-gazelle/blob/master/README.rst#directives
|
||||||
|
// for information on these.
|
||||||
|
//
|
||||||
|
// Rule generation
|
||||||
|
//
|
||||||
|
// Currently, Gazelle generates rules for one Go package per directory. In
|
||||||
|
// general, we aim to support Go code which is compatible with "go build". If
|
||||||
|
// there are no buildable packages, Gazelle will delete existing rules with
|
||||||
|
// default names. If there are multiple packages, Gazelle will pick one that
|
||||||
|
// matches the directory name or will print an error if no such package is
|
||||||
|
// found.
|
||||||
|
//
|
||||||
|
// Gazelle names library and test rules somewhat oddly: go_default_library, and
|
||||||
|
// go_default_test. This is for historic reasons: before the importpath
|
||||||
|
// attribute was mandatory, import paths were inferred from label names. Even if
|
||||||
|
// we never support multiple packages in the future (we should), we should
|
||||||
|
// migrate away from this because it's surprising. Libraries should generally
|
||||||
|
// be named after their directories.
|
||||||
|
//
|
||||||
|
// Dependency resolution
|
||||||
|
//
|
||||||
|
// Go libraries are indexed by their importpath attribute. Gazelle attempts to
|
||||||
|
// resolve libraries by import path using the index, filtered using the
|
||||||
|
// vendoring algorithm. If an import doesn't match any known library, Gazelle
|
||||||
|
// guesses a name for it, locally (if the import path is under the current
|
||||||
|
// prefix), or in an external repository or vendor directory (depending
|
||||||
|
// on external mode).
|
||||||
|
//
|
||||||
|
// Gazelle has special cases for import paths associated with proto Well
|
||||||
|
// Known Types and Google APIs. rules_go declares canonical rules for these.
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/language"
|
||||||
|
|
||||||
|
const goName = "go"
|
||||||
|
|
||||||
|
type goLang struct {
|
||||||
|
// goPkgDirs is a set of relative paths to directories containing buildable
|
||||||
|
// Go code, including in subdirectories.
|
||||||
|
goPkgRels map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) Name() string { return goName }
|
||||||
|
|
||||||
|
func New() language.Language {
|
||||||
|
return &goLang{goPkgRels: make(map[string]bool)}
|
||||||
|
}
|
||||||
488
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/package.go
generated
vendored
Normal file
488
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/package.go
generated
vendored
Normal file
@@ -0,0 +1,488 @@
|
|||||||
|
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
// goPackage contains metadata for a set of .go and .proto files that can be
|
||||||
|
// used to generate Go rules.
|
||||||
|
type goPackage struct {
|
||||||
|
name, dir, rel string
|
||||||
|
library, binary, test goTarget
|
||||||
|
proto protoTarget
|
||||||
|
hasTestdata bool
|
||||||
|
importPath string
|
||||||
|
}
|
||||||
|
|
||||||
|
// goTarget contains information used to generate an individual Go rule
|
||||||
|
// (library, binary, or test).
|
||||||
|
type goTarget struct {
|
||||||
|
sources, imports, copts, clinkopts platformStringsBuilder
|
||||||
|
cgo bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// protoTarget contains information used to generate a go_proto_library rule.
|
||||||
|
type protoTarget struct {
|
||||||
|
name string
|
||||||
|
sources platformStringsBuilder
|
||||||
|
imports platformStringsBuilder
|
||||||
|
hasServices bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// platformStringsBuilder is used to construct rule.PlatformStrings. Bazel
|
||||||
|
// has some requirements for deps list (a dependency cannot appear in more
|
||||||
|
// than one select expression; dependencies cannot be duplicated), so we need
|
||||||
|
// to build these carefully.
|
||||||
|
type platformStringsBuilder struct {
|
||||||
|
strs map[string]platformStringInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
// platformStringInfo contains information about a single string (source,
|
||||||
|
// import, or option).
|
||||||
|
type platformStringInfo struct {
|
||||||
|
set platformStringSet
|
||||||
|
oss map[string]bool
|
||||||
|
archs map[string]bool
|
||||||
|
platforms map[rule.Platform]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type platformStringSet int
|
||||||
|
|
||||||
|
const (
|
||||||
|
genericSet platformStringSet = iota
|
||||||
|
osSet
|
||||||
|
archSet
|
||||||
|
platformSet
|
||||||
|
)
|
||||||
|
|
||||||
|
// addFile adds the file described by "info" to a target in the package "p" if
|
||||||
|
// the file is buildable.
|
||||||
|
//
|
||||||
|
// "cgo" tells whether any ".go" file in the package contains cgo code. This
|
||||||
|
// affects whether C files are added to targets.
|
||||||
|
//
|
||||||
|
// An error is returned if a file is buildable but invalid (for example, a
|
||||||
|
// test .go file containing cgo code). Files that are not buildable will not
|
||||||
|
// be added to any target (for example, .txt files).
|
||||||
|
func (pkg *goPackage) addFile(c *config.Config, info fileInfo, cgo bool) error {
|
||||||
|
switch {
|
||||||
|
case info.ext == unknownExt || !cgo && (info.ext == cExt || info.ext == csExt):
|
||||||
|
return nil
|
||||||
|
case info.ext == protoExt:
|
||||||
|
if proto.GetProtoConfig(c).Mode == proto.LegacyMode {
|
||||||
|
// Only add files in legacy mode. This is used to generate a filegroup
|
||||||
|
// that contains all protos. In order modes, we get the .proto files
|
||||||
|
// from information emitted by the proto language extension.
|
||||||
|
pkg.proto.addFile(c, info)
|
||||||
|
}
|
||||||
|
case info.isTest:
|
||||||
|
if info.isCgo {
|
||||||
|
return fmt.Errorf("%s: use of cgo in test not supported", info.path)
|
||||||
|
}
|
||||||
|
pkg.test.addFile(c, info)
|
||||||
|
default:
|
||||||
|
pkg.library.addFile(c, info)
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// isCommand returns true if the package name is "main".
|
||||||
|
func (pkg *goPackage) isCommand() bool {
|
||||||
|
return pkg.name == "main"
|
||||||
|
}
|
||||||
|
|
||||||
|
// isBuildable returns true if anything in the package is buildable.
|
||||||
|
// This is true if the package has Go code that satisfies build constraints
|
||||||
|
// on any platform or has proto files not in legacy mode.
|
||||||
|
func (pkg *goPackage) isBuildable(c *config.Config) bool {
|
||||||
|
return pkg.firstGoFile() != "" || !pkg.proto.sources.isEmpty()
|
||||||
|
}
|
||||||
|
|
||||||
|
// firstGoFile returns the name of a .go file if the package contains at least
|
||||||
|
// one .go file, or "" otherwise.
|
||||||
|
func (pkg *goPackage) firstGoFile() string {
|
||||||
|
goSrcs := []platformStringsBuilder{
|
||||||
|
pkg.library.sources,
|
||||||
|
pkg.binary.sources,
|
||||||
|
pkg.test.sources,
|
||||||
|
}
|
||||||
|
for _, sb := range goSrcs {
|
||||||
|
if sb.strs != nil {
|
||||||
|
for s := range sb.strs {
|
||||||
|
if strings.HasSuffix(s, ".go") {
|
||||||
|
return s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pkg *goPackage) haveCgo() bool {
|
||||||
|
return pkg.library.cgo || pkg.binary.cgo || pkg.test.cgo
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pkg *goPackage) inferImportPath(c *config.Config) error {
|
||||||
|
if pkg.importPath != "" {
|
||||||
|
log.Panic("importPath already set")
|
||||||
|
}
|
||||||
|
gc := getGoConfig(c)
|
||||||
|
if !gc.prefixSet {
|
||||||
|
return fmt.Errorf("%s: go prefix is not set, so importpath can't be determined for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line", pkg.dir)
|
||||||
|
}
|
||||||
|
pkg.importPath = inferImportPath(gc, pkg.rel)
|
||||||
|
|
||||||
|
if pkg.rel == gc.prefixRel {
|
||||||
|
pkg.importPath = gc.prefix
|
||||||
|
} else {
|
||||||
|
fromPrefixRel := strings.TrimPrefix(pkg.rel, gc.prefixRel+"/")
|
||||||
|
pkg.importPath = path.Join(gc.prefix, fromPrefixRel)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func inferImportPath(gc *goConfig, rel string) string {
|
||||||
|
if rel == gc.prefixRel {
|
||||||
|
return gc.prefix
|
||||||
|
} else {
|
||||||
|
fromPrefixRel := strings.TrimPrefix(rel, gc.prefixRel+"/")
|
||||||
|
return path.Join(gc.prefix, fromPrefixRel)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func goProtoPackageName(pkg proto.Package) string {
|
||||||
|
if value, ok := pkg.Options["go_package"]; ok {
|
||||||
|
if strings.LastIndexByte(value, '/') == -1 {
|
||||||
|
return value
|
||||||
|
} else {
|
||||||
|
if i := strings.LastIndexByte(value, ';'); i != -1 {
|
||||||
|
return value[i+1:]
|
||||||
|
} else {
|
||||||
|
return path.Base(value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strings.Replace(pkg.Name, ".", "_", -1)
|
||||||
|
}
|
||||||
|
|
||||||
|
func goProtoImportPath(gc *goConfig, pkg proto.Package, rel string) string {
|
||||||
|
if value, ok := pkg.Options["go_package"]; ok {
|
||||||
|
if strings.LastIndexByte(value, '/') == -1 {
|
||||||
|
return inferImportPath(gc, rel)
|
||||||
|
} else if i := strings.LastIndexByte(value, ';'); i != -1 {
|
||||||
|
return value[:i]
|
||||||
|
} else {
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return inferImportPath(gc, rel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *goTarget) addFile(c *config.Config, info fileInfo) {
|
||||||
|
t.cgo = t.cgo || info.isCgo
|
||||||
|
add := getPlatformStringsAddFunction(c, info, nil)
|
||||||
|
add(&t.sources, info.name)
|
||||||
|
add(&t.imports, info.imports...)
|
||||||
|
for _, copts := range info.copts {
|
||||||
|
optAdd := add
|
||||||
|
if len(copts.tags) > 0 {
|
||||||
|
optAdd = getPlatformStringsAddFunction(c, info, copts.tags)
|
||||||
|
}
|
||||||
|
optAdd(&t.copts, copts.opts)
|
||||||
|
}
|
||||||
|
for _, clinkopts := range info.clinkopts {
|
||||||
|
optAdd := add
|
||||||
|
if len(clinkopts.tags) > 0 {
|
||||||
|
optAdd = getPlatformStringsAddFunction(c, info, clinkopts.tags)
|
||||||
|
}
|
||||||
|
optAdd(&t.clinkopts, clinkopts.opts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func protoTargetFromProtoPackage(name string, pkg proto.Package) protoTarget {
|
||||||
|
target := protoTarget{name: name}
|
||||||
|
for f := range pkg.Files {
|
||||||
|
target.sources.addGenericString(f)
|
||||||
|
}
|
||||||
|
for i := range pkg.Imports {
|
||||||
|
target.imports.addGenericString(i)
|
||||||
|
}
|
||||||
|
target.hasServices = pkg.HasServices
|
||||||
|
return target
|
||||||
|
}
|
||||||
|
|
||||||
|
func (t *protoTarget) addFile(c *config.Config, info fileInfo) {
|
||||||
|
t.sources.addGenericString(info.name)
|
||||||
|
for _, imp := range info.imports {
|
||||||
|
t.imports.addGenericString(imp)
|
||||||
|
}
|
||||||
|
t.hasServices = t.hasServices || info.hasServices
|
||||||
|
}
|
||||||
|
|
||||||
|
// getPlatformStringsAddFunction returns a function used to add strings to
|
||||||
|
// a *platformStringsBuilder under the same set of constraints. This is a
|
||||||
|
// performance optimization to avoid evaluating constraints repeatedly.
|
||||||
|
func getPlatformStringsAddFunction(c *config.Config, info fileInfo, cgoTags tagLine) func(sb *platformStringsBuilder, ss ...string) {
|
||||||
|
isOSSpecific, isArchSpecific := isOSArchSpecific(info, cgoTags)
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case !isOSSpecific && !isArchSpecific:
|
||||||
|
if checkConstraints(c, "", "", info.goos, info.goarch, info.tags, cgoTags) {
|
||||||
|
return func(sb *platformStringsBuilder, ss ...string) {
|
||||||
|
for _, s := range ss {
|
||||||
|
sb.addGenericString(s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case isOSSpecific && !isArchSpecific:
|
||||||
|
var osMatch []string
|
||||||
|
for _, os := range rule.KnownOSs {
|
||||||
|
if checkConstraints(c, os, "", info.goos, info.goarch, info.tags, cgoTags) {
|
||||||
|
osMatch = append(osMatch, os)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(osMatch) > 0 {
|
||||||
|
return func(sb *platformStringsBuilder, ss ...string) {
|
||||||
|
for _, s := range ss {
|
||||||
|
sb.addOSString(s, osMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
case !isOSSpecific && isArchSpecific:
|
||||||
|
var archMatch []string
|
||||||
|
for _, arch := range rule.KnownArchs {
|
||||||
|
if checkConstraints(c, "", arch, info.goos, info.goarch, info.tags, cgoTags) {
|
||||||
|
archMatch = append(archMatch, arch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(archMatch) > 0 {
|
||||||
|
return func(sb *platformStringsBuilder, ss ...string) {
|
||||||
|
for _, s := range ss {
|
||||||
|
sb.addArchString(s, archMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
var platformMatch []rule.Platform
|
||||||
|
for _, platform := range rule.KnownPlatforms {
|
||||||
|
if checkConstraints(c, platform.OS, platform.Arch, info.goos, info.goarch, info.tags, cgoTags) {
|
||||||
|
platformMatch = append(platformMatch, platform)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(platformMatch) > 0 {
|
||||||
|
return func(sb *platformStringsBuilder, ss ...string) {
|
||||||
|
for _, s := range ss {
|
||||||
|
sb.addPlatformString(s, platformMatch)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return func(_ *platformStringsBuilder, _ ...string) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) isEmpty() bool {
|
||||||
|
return sb.strs == nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) hasGo() bool {
|
||||||
|
for s := range sb.strs {
|
||||||
|
if strings.HasSuffix(s, ".go") {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) addGenericString(s string) {
|
||||||
|
if sb.strs == nil {
|
||||||
|
sb.strs = make(map[string]platformStringInfo)
|
||||||
|
}
|
||||||
|
sb.strs[s] = platformStringInfo{set: genericSet}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) addOSString(s string, oss []string) {
|
||||||
|
if sb.strs == nil {
|
||||||
|
sb.strs = make(map[string]platformStringInfo)
|
||||||
|
}
|
||||||
|
si, ok := sb.strs[s]
|
||||||
|
if !ok {
|
||||||
|
si.set = osSet
|
||||||
|
si.oss = make(map[string]bool)
|
||||||
|
}
|
||||||
|
switch si.set {
|
||||||
|
case genericSet:
|
||||||
|
return
|
||||||
|
case osSet:
|
||||||
|
for _, os := range oss {
|
||||||
|
si.oss[os] = true
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
si.convertToPlatforms()
|
||||||
|
for _, os := range oss {
|
||||||
|
for _, arch := range rule.KnownOSArchs[os] {
|
||||||
|
si.platforms[rule.Platform{OS: os, Arch: arch}] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.strs[s] = si
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) addArchString(s string, archs []string) {
|
||||||
|
if sb.strs == nil {
|
||||||
|
sb.strs = make(map[string]platformStringInfo)
|
||||||
|
}
|
||||||
|
si, ok := sb.strs[s]
|
||||||
|
if !ok {
|
||||||
|
si.set = archSet
|
||||||
|
si.archs = make(map[string]bool)
|
||||||
|
}
|
||||||
|
switch si.set {
|
||||||
|
case genericSet:
|
||||||
|
return
|
||||||
|
case archSet:
|
||||||
|
for _, arch := range archs {
|
||||||
|
si.archs[arch] = true
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
si.convertToPlatforms()
|
||||||
|
for _, arch := range archs {
|
||||||
|
for _, os := range rule.KnownArchOSs[arch] {
|
||||||
|
si.platforms[rule.Platform{OS: os, Arch: arch}] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.strs[s] = si
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) addPlatformString(s string, platforms []rule.Platform) {
|
||||||
|
if sb.strs == nil {
|
||||||
|
sb.strs = make(map[string]platformStringInfo)
|
||||||
|
}
|
||||||
|
si, ok := sb.strs[s]
|
||||||
|
if !ok {
|
||||||
|
si.set = platformSet
|
||||||
|
si.platforms = make(map[rule.Platform]bool)
|
||||||
|
}
|
||||||
|
switch si.set {
|
||||||
|
case genericSet:
|
||||||
|
return
|
||||||
|
default:
|
||||||
|
si.convertToPlatforms()
|
||||||
|
for _, p := range platforms {
|
||||||
|
si.platforms[p] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sb.strs[s] = si
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) build() rule.PlatformStrings {
|
||||||
|
var ps rule.PlatformStrings
|
||||||
|
for s, si := range sb.strs {
|
||||||
|
switch si.set {
|
||||||
|
case genericSet:
|
||||||
|
ps.Generic = append(ps.Generic, s)
|
||||||
|
case osSet:
|
||||||
|
if ps.OS == nil {
|
||||||
|
ps.OS = make(map[string][]string)
|
||||||
|
}
|
||||||
|
for os := range si.oss {
|
||||||
|
ps.OS[os] = append(ps.OS[os], s)
|
||||||
|
}
|
||||||
|
case archSet:
|
||||||
|
if ps.Arch == nil {
|
||||||
|
ps.Arch = make(map[string][]string)
|
||||||
|
}
|
||||||
|
for arch := range si.archs {
|
||||||
|
ps.Arch[arch] = append(ps.Arch[arch], s)
|
||||||
|
}
|
||||||
|
case platformSet:
|
||||||
|
if ps.Platform == nil {
|
||||||
|
ps.Platform = make(map[rule.Platform][]string)
|
||||||
|
}
|
||||||
|
for p := range si.platforms {
|
||||||
|
ps.Platform[p] = append(ps.Platform[p], s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Strings(ps.Generic)
|
||||||
|
if ps.OS != nil {
|
||||||
|
for _, ss := range ps.OS {
|
||||||
|
sort.Strings(ss)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps.Arch != nil {
|
||||||
|
for _, ss := range ps.Arch {
|
||||||
|
sort.Strings(ss)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ps.Platform != nil {
|
||||||
|
for _, ss := range ps.Platform {
|
||||||
|
sort.Strings(ss)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ps
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sb *platformStringsBuilder) buildFlat() []string {
|
||||||
|
strs := make([]string, 0, len(sb.strs))
|
||||||
|
for s := range sb.strs {
|
||||||
|
strs = append(strs, s)
|
||||||
|
}
|
||||||
|
sort.Strings(strs)
|
||||||
|
return strs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (si *platformStringInfo) convertToPlatforms() {
|
||||||
|
switch si.set {
|
||||||
|
case genericSet:
|
||||||
|
log.Panic("cannot convert generic string to platforms")
|
||||||
|
case platformSet:
|
||||||
|
return
|
||||||
|
case osSet:
|
||||||
|
si.set = platformSet
|
||||||
|
si.platforms = make(map[rule.Platform]bool)
|
||||||
|
for os := range si.oss {
|
||||||
|
for _, arch := range rule.KnownOSArchs[os] {
|
||||||
|
si.platforms[rule.Platform{OS: os, Arch: arch}] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
si.oss = nil
|
||||||
|
case archSet:
|
||||||
|
si.set = platformSet
|
||||||
|
si.platforms = make(map[rule.Platform]bool)
|
||||||
|
for arch := range si.archs {
|
||||||
|
for _, os := range rule.KnownArchOSs[arch] {
|
||||||
|
si.platforms[rule.Platform{OS: os, Arch: arch}] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
si.archs = nil
|
||||||
|
}
|
||||||
|
}
|
||||||
306
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/resolve.go
generated
vendored
Normal file
306
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/go/resolve.go
generated
vendored
Normal file
@@ -0,0 +1,306 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package golang
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/language/proto"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_ *goLang) Imports(_ *config.Config, r *rule.Rule, f *rule.File) []resolve.ImportSpec {
|
||||||
|
if !isGoLibrary(r.Kind()) {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if importPath := r.AttrString("importpath"); importPath == "" {
|
||||||
|
return []resolve.ImportSpec{}
|
||||||
|
} else {
|
||||||
|
return []resolve.ImportSpec{{goName, importPath}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *goLang) Embeds(r *rule.Rule, from label.Label) []label.Label {
|
||||||
|
embedStrings := r.AttrStrings("embed")
|
||||||
|
if isGoProtoLibrary(r.Kind()) {
|
||||||
|
embedStrings = append(embedStrings, r.AttrString("proto"))
|
||||||
|
}
|
||||||
|
embedLabels := make([]label.Label, 0, len(embedStrings))
|
||||||
|
for _, s := range embedStrings {
|
||||||
|
l, err := label.Parse(s)
|
||||||
|
if err != nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
l = l.Abs(from.Repo, from.Pkg)
|
||||||
|
embedLabels = append(embedLabels, l)
|
||||||
|
}
|
||||||
|
return embedLabels
|
||||||
|
}
|
||||||
|
|
||||||
|
func (gl *goLang) Resolve(c *config.Config, ix *resolve.RuleIndex, rc *repos.RemoteCache, r *rule.Rule, from label.Label) {
|
||||||
|
importsRaw := r.PrivateAttr(config.GazelleImportsKey)
|
||||||
|
if importsRaw == nil {
|
||||||
|
// may not be set in tests.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
imports := importsRaw.(rule.PlatformStrings)
|
||||||
|
r.DelAttr("deps")
|
||||||
|
resolve := resolveGo
|
||||||
|
if r.Kind() == "go_proto_library" {
|
||||||
|
resolve = resolveProto
|
||||||
|
}
|
||||||
|
deps, errs := imports.Map(func(imp string) (string, error) {
|
||||||
|
l, err := resolve(c, ix, rc, r, imp, from)
|
||||||
|
if err == skipImportError {
|
||||||
|
return "", nil
|
||||||
|
} else if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
for _, embed := range gl.Embeds(r, from) {
|
||||||
|
if embed.Equal(l) {
|
||||||
|
return "", nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
l = l.Rel(from.Repo, from.Pkg)
|
||||||
|
return l.String(), nil
|
||||||
|
})
|
||||||
|
for _, err := range errs {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
if !deps.IsEmpty() {
|
||||||
|
r.SetAttr("deps", deps)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
skipImportError = errors.New("std or self import")
|
||||||
|
notFoundError = errors.New("rule not found")
|
||||||
|
)
|
||||||
|
|
||||||
|
func resolveGo(c *config.Config, ix *resolve.RuleIndex, rc *repos.RemoteCache, r *rule.Rule, imp string, from label.Label) (label.Label, error) {
|
||||||
|
gc := getGoConfig(c)
|
||||||
|
pc := proto.GetProtoConfig(c)
|
||||||
|
if build.IsLocalImport(imp) {
|
||||||
|
cleanRel := path.Clean(path.Join(from.Pkg, imp))
|
||||||
|
if build.IsLocalImport(cleanRel) {
|
||||||
|
return label.NoLabel, fmt.Errorf("relative import path %q from %q points outside of repository", imp, from.Pkg)
|
||||||
|
}
|
||||||
|
imp = path.Join(gc.prefix, cleanRel)
|
||||||
|
}
|
||||||
|
|
||||||
|
if isStandard(imp) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
}
|
||||||
|
|
||||||
|
if pc.Mode.ShouldUseKnownImports() {
|
||||||
|
// These are commonly used libraries that depend on Well Known Types.
|
||||||
|
// They depend on the generated versions of these protos to avoid conflicts.
|
||||||
|
// However, since protoc-gen-go depends on these libraries, we generate
|
||||||
|
// its rules in disable_global mode (to avoid cyclic dependency), so the
|
||||||
|
// "go_default_library" versions of these libraries depend on the
|
||||||
|
// pre-generated versions of the proto libraries.
|
||||||
|
switch imp {
|
||||||
|
case "github.com/golang/protobuf/jsonpb":
|
||||||
|
return label.New("com_github_golang_protobuf", "jsonpb", "go_default_library_gen"), nil
|
||||||
|
case "github.com/golang/protobuf/descriptor":
|
||||||
|
return label.New("com_github_golang_protobuf", "descriptor", "go_default_library_gen"), nil
|
||||||
|
case "github.com/golang/protobuf/ptypes":
|
||||||
|
return label.New("com_github_golang_protobuf", "ptypes", "go_default_library_gen"), nil
|
||||||
|
}
|
||||||
|
if l, ok := knownGoProtoImports[imp]; ok {
|
||||||
|
return l, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, err := resolveWithIndexGo(ix, imp, from); err == nil || err == skipImportError {
|
||||||
|
return l, err
|
||||||
|
} else if err != notFoundError {
|
||||||
|
return label.NoLabel, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if pathtools.HasPrefix(imp, gc.prefix) {
|
||||||
|
pkg := path.Join(gc.prefixRel, pathtools.TrimPrefix(imp, gc.prefix))
|
||||||
|
return label.New("", pkg, config.DefaultLibName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
if gc.depMode == externalMode {
|
||||||
|
return resolveExternal(rc, imp)
|
||||||
|
} else {
|
||||||
|
return resolveVendored(rc, imp)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// isStandard returns whether a package is in the standard library.
|
||||||
|
func isStandard(imp string) bool {
|
||||||
|
return stdPackages[imp]
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveWithIndexGo(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) {
|
||||||
|
matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "go", Imp: imp}, "go")
|
||||||
|
var bestMatch resolve.FindResult
|
||||||
|
var bestMatchIsVendored bool
|
||||||
|
var bestMatchVendorRoot string
|
||||||
|
var matchError error
|
||||||
|
|
||||||
|
for _, m := range matches {
|
||||||
|
// Apply vendoring logic for Go libraries. A library in a vendor directory
|
||||||
|
// is only visible in the parent tree. Vendored libraries supercede
|
||||||
|
// non-vendored libraries, and libraries closer to from.Pkg supercede
|
||||||
|
// those further up the tree.
|
||||||
|
isVendored := false
|
||||||
|
vendorRoot := ""
|
||||||
|
parts := strings.Split(m.Label.Pkg, "/")
|
||||||
|
for i := len(parts) - 1; i >= 0; i-- {
|
||||||
|
if parts[i] == "vendor" {
|
||||||
|
isVendored = true
|
||||||
|
vendorRoot = strings.Join(parts[:i], "/")
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isVendored {
|
||||||
|
}
|
||||||
|
if isVendored && !label.New(m.Label.Repo, vendorRoot, "").Contains(from) {
|
||||||
|
// vendor directory not visible
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if bestMatch.Label.Equal(label.NoLabel) || isVendored && (!bestMatchIsVendored || len(vendorRoot) > len(bestMatchVendorRoot)) {
|
||||||
|
// Current match is better
|
||||||
|
bestMatch = m
|
||||||
|
bestMatchIsVendored = isVendored
|
||||||
|
bestMatchVendorRoot = vendorRoot
|
||||||
|
matchError = nil
|
||||||
|
} else if (!isVendored && bestMatchIsVendored) || (isVendored && len(vendorRoot) < len(bestMatchVendorRoot)) {
|
||||||
|
// Current match is worse
|
||||||
|
} else {
|
||||||
|
// Match is ambiguous
|
||||||
|
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.Label, m.Label, imp, from)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if matchError != nil {
|
||||||
|
return label.NoLabel, matchError
|
||||||
|
}
|
||||||
|
if bestMatch.Label.Equal(label.NoLabel) {
|
||||||
|
return label.NoLabel, notFoundError
|
||||||
|
}
|
||||||
|
if bestMatch.IsSelfImport(from) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
}
|
||||||
|
return bestMatch.Label, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveExternal(rc *repos.RemoteCache, imp string) (label.Label, error) {
|
||||||
|
prefix, repo, err := rc.Root(imp)
|
||||||
|
if err != nil {
|
||||||
|
return label.NoLabel, err
|
||||||
|
}
|
||||||
|
|
||||||
|
var pkg string
|
||||||
|
if imp != prefix {
|
||||||
|
pkg = pathtools.TrimPrefix(imp, prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
return label.New(repo, pkg, config.DefaultLibName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveVendored(rc *repos.RemoteCache, imp string) (label.Label, error) {
|
||||||
|
return label.New("", path.Join("vendor", imp), config.DefaultLibName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveProto(c *config.Config, ix *resolve.RuleIndex, rc *repos.RemoteCache, r *rule.Rule, imp string, from label.Label) (label.Label, error) {
|
||||||
|
pc := proto.GetProtoConfig(c)
|
||||||
|
|
||||||
|
if wellKnownProtos[imp] {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, ok := knownProtoImports[imp]; ok && pc.Mode.ShouldUseKnownImports() {
|
||||||
|
if l.Equal(from) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
} else {
|
||||||
|
return l, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, err := resolveWithIndexProto(ix, imp, from); err == nil || err == skipImportError {
|
||||||
|
return l, err
|
||||||
|
} else if err != notFoundError {
|
||||||
|
return label.NoLabel, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// As a fallback, guess the label based on the proto file name. We assume
|
||||||
|
// all proto files in a directory belong to the same package, and the
|
||||||
|
// package name matches the directory base name. We also assume that protos
|
||||||
|
// in the vendor directory must refer to something else in vendor.
|
||||||
|
rel := path.Dir(imp)
|
||||||
|
if rel == "." {
|
||||||
|
rel = ""
|
||||||
|
}
|
||||||
|
if from.Pkg == "vendor" || strings.HasPrefix(from.Pkg, "vendor/") {
|
||||||
|
rel = path.Join("vendor", rel)
|
||||||
|
}
|
||||||
|
return label.New("", rel, config.DefaultLibName), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// wellKnownProtos is the set of proto sets for which we don't need to add
|
||||||
|
// an explicit dependency in go_proto_library.
|
||||||
|
// TODO(jayconrod): generate from
|
||||||
|
// @io_bazel_rules_go//proto/wkt:WELL_KNOWN_TYPE_PACKAGES
|
||||||
|
var wellKnownProtos = map[string]bool{
|
||||||
|
"google/protobuf/any.proto": true,
|
||||||
|
"google/protobuf/api.proto": true,
|
||||||
|
"google/protobuf/compiler_plugin.proto": true,
|
||||||
|
"google/protobuf/descriptor.proto": true,
|
||||||
|
"google/protobuf/duration.proto": true,
|
||||||
|
"google/protobuf/empty.proto": true,
|
||||||
|
"google/protobuf/field_mask.proto": true,
|
||||||
|
"google/protobuf/source_context.proto": true,
|
||||||
|
"google/protobuf/struct.proto": true,
|
||||||
|
"google/protobuf/timestamp.proto": true,
|
||||||
|
"google/protobuf/type.proto": true,
|
||||||
|
"google/protobuf/wrappers.proto": true,
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveWithIndexProto(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) {
|
||||||
|
matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "proto", Imp: imp}, "go")
|
||||||
|
if len(matches) == 0 {
|
||||||
|
return label.NoLabel, notFoundError
|
||||||
|
}
|
||||||
|
if len(matches) > 1 {
|
||||||
|
return label.NoLabel, fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", matches[0].Label, matches[1].Label, imp, from)
|
||||||
|
}
|
||||||
|
if matches[0].IsSelfImport(from) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
}
|
||||||
|
return matches[0].Label, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isGoLibrary(kind string) bool {
|
||||||
|
return kind == "go_library" || isGoProtoLibrary(kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
func isGoProtoLibrary(kind string) bool {
|
||||||
|
return kind == "go_proto_library" || kind == "go_grpc_library"
|
||||||
|
}
|
||||||
@@ -17,7 +17,7 @@ limitations under the License.
|
|||||||
// Generated by gen_std_package_list.go
|
// Generated by gen_std_package_list.go
|
||||||
// DO NOT EDIT
|
// DO NOT EDIT
|
||||||
|
|
||||||
package resolve
|
package golang
|
||||||
|
|
||||||
var stdPackages = map[string]bool{
|
var stdPackages = map[string]bool{
|
||||||
"archive/tar": true,
|
"archive/tar": true,
|
||||||
@@ -31,6 +31,7 @@ var stdPackages = map[string]bool{
|
|||||||
"cmd/asm/internal/asm": true,
|
"cmd/asm/internal/asm": true,
|
||||||
"cmd/asm/internal/flags": true,
|
"cmd/asm/internal/flags": true,
|
||||||
"cmd/asm/internal/lex": true,
|
"cmd/asm/internal/lex": true,
|
||||||
|
"cmd/buildid": true,
|
||||||
"cmd/cgo": true,
|
"cmd/cgo": true,
|
||||||
"cmd/compile": true,
|
"cmd/compile": true,
|
||||||
"cmd/compile/internal/amd64": true,
|
"cmd/compile/internal/amd64": true,
|
||||||
@@ -53,7 +54,7 @@ var stdPackages = map[string]bool{
|
|||||||
"cmd/go": true,
|
"cmd/go": true,
|
||||||
"cmd/go/internal/base": true,
|
"cmd/go/internal/base": true,
|
||||||
"cmd/go/internal/bug": true,
|
"cmd/go/internal/bug": true,
|
||||||
"cmd/go/internal/buildid": true,
|
"cmd/go/internal/cache": true,
|
||||||
"cmd/go/internal/cfg": true,
|
"cmd/go/internal/cfg": true,
|
||||||
"cmd/go/internal/clean": true,
|
"cmd/go/internal/clean": true,
|
||||||
"cmd/go/internal/cmdflag": true,
|
"cmd/go/internal/cmdflag": true,
|
||||||
@@ -77,7 +78,9 @@ var stdPackages = map[string]bool{
|
|||||||
"cmd/gofmt": true,
|
"cmd/gofmt": true,
|
||||||
"cmd/internal/bio": true,
|
"cmd/internal/bio": true,
|
||||||
"cmd/internal/browser": true,
|
"cmd/internal/browser": true,
|
||||||
|
"cmd/internal/buildid": true,
|
||||||
"cmd/internal/dwarf": true,
|
"cmd/internal/dwarf": true,
|
||||||
|
"cmd/internal/edit": true,
|
||||||
"cmd/internal/gcprog": true,
|
"cmd/internal/gcprog": true,
|
||||||
"cmd/internal/goobj": true,
|
"cmd/internal/goobj": true,
|
||||||
"cmd/internal/obj": true,
|
"cmd/internal/obj": true,
|
||||||
@@ -91,20 +94,27 @@ var stdPackages = map[string]bool{
|
|||||||
"cmd/internal/objfile": true,
|
"cmd/internal/objfile": true,
|
||||||
"cmd/internal/src": true,
|
"cmd/internal/src": true,
|
||||||
"cmd/internal/sys": true,
|
"cmd/internal/sys": true,
|
||||||
|
"cmd/internal/test2json": true,
|
||||||
"cmd/link": true,
|
"cmd/link": true,
|
||||||
"cmd/link/internal/amd64": true,
|
"cmd/link/internal/amd64": true,
|
||||||
"cmd/link/internal/arm": true,
|
"cmd/link/internal/arm": true,
|
||||||
"cmd/link/internal/arm64": true,
|
"cmd/link/internal/arm64": true,
|
||||||
"cmd/link/internal/ld": true,
|
"cmd/link/internal/ld": true,
|
||||||
|
"cmd/link/internal/loadelf": true,
|
||||||
|
"cmd/link/internal/loadmacho": true,
|
||||||
|
"cmd/link/internal/loadpe": true,
|
||||||
"cmd/link/internal/mips": true,
|
"cmd/link/internal/mips": true,
|
||||||
"cmd/link/internal/mips64": true,
|
"cmd/link/internal/mips64": true,
|
||||||
|
"cmd/link/internal/objfile": true,
|
||||||
"cmd/link/internal/ppc64": true,
|
"cmd/link/internal/ppc64": true,
|
||||||
"cmd/link/internal/s390x": true,
|
"cmd/link/internal/s390x": true,
|
||||||
|
"cmd/link/internal/sym": true,
|
||||||
"cmd/link/internal/x86": true,
|
"cmd/link/internal/x86": true,
|
||||||
"cmd/nm": true,
|
"cmd/nm": true,
|
||||||
"cmd/objdump": true,
|
"cmd/objdump": true,
|
||||||
"cmd/pack": true,
|
"cmd/pack": true,
|
||||||
"cmd/pprof": true,
|
"cmd/pprof": true,
|
||||||
|
"cmd/test2json": true,
|
||||||
"cmd/trace": true,
|
"cmd/trace": true,
|
||||||
"cmd/vet": true,
|
"cmd/vet": true,
|
||||||
"cmd/vet/internal/cfg": true,
|
"cmd/vet/internal/cfg": true,
|
||||||
@@ -197,11 +207,11 @@ var stdPackages = map[string]bool{
|
|||||||
"internal/poll": true,
|
"internal/poll": true,
|
||||||
"internal/race": true,
|
"internal/race": true,
|
||||||
"internal/singleflight": true,
|
"internal/singleflight": true,
|
||||||
"internal/syscall/unix": true,
|
|
||||||
"internal/syscall/windows": true,
|
"internal/syscall/windows": true,
|
||||||
"internal/syscall/windows/registry": true,
|
"internal/syscall/windows/registry": true,
|
||||||
"internal/syscall/windows/sysdll": true,
|
"internal/syscall/windows/sysdll": true,
|
||||||
"internal/testenv": true,
|
"internal/testenv": true,
|
||||||
|
"internal/testlog": true,
|
||||||
"internal/trace": true,
|
"internal/trace": true,
|
||||||
"io": true,
|
"io": true,
|
||||||
"io/ioutil": true,
|
"io/ioutil": true,
|
||||||
@@ -235,6 +245,7 @@ var stdPackages = map[string]bool{
|
|||||||
"os": true,
|
"os": true,
|
||||||
"os/exec": true,
|
"os/exec": true,
|
||||||
"os/signal": true,
|
"os/signal": true,
|
||||||
|
"os/signal/internal/pty": true,
|
||||||
"os/user": true,
|
"os/user": true,
|
||||||
"path": true,
|
"path": true,
|
||||||
"path/filepath": true,
|
"path/filepath": true,
|
||||||
96
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/lang.go
generated
vendored
Normal file
96
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/lang.go
generated
vendored
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package language
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Language describes an extension for Gazelle that provides support for
|
||||||
|
// a set of Bazel rules.
|
||||||
|
//
|
||||||
|
// Languages are used primarily by the fix and update commands. The order
|
||||||
|
// in which languages are used matters, since languages may depend on
|
||||||
|
// one another. For example, go depends on proto, since go_proto_libraries
|
||||||
|
// are generated from metadata stored in proto_libraries.
|
||||||
|
//
|
||||||
|
// A single instance of Language is created for each fix / update run. Some
|
||||||
|
// state may be stored in this instance, but stateless behavior is encouraged,
|
||||||
|
// especially since some operations may be concurrent in the future.
|
||||||
|
//
|
||||||
|
// Tasks languages are used for
|
||||||
|
//
|
||||||
|
// * Configuration (embedded interface config.Configurer). Languages may
|
||||||
|
// define command line flags and alter the configuration in a directory
|
||||||
|
// based on directives in build files.
|
||||||
|
//
|
||||||
|
// * Fixing deprecated usage of rules in build files.
|
||||||
|
//
|
||||||
|
// * Generating rules from source files in a directory.
|
||||||
|
//
|
||||||
|
// * Resolving library imports (embedded interface resolve.Resolver). For
|
||||||
|
// example, import strings like "github.com/foo/bar" in Go can be resolved
|
||||||
|
// into Bazel labels like "@com_github_foo_bar//:go_default_library".
|
||||||
|
//
|
||||||
|
// Tasks languages support
|
||||||
|
//
|
||||||
|
// * Generating load statements: languages list files and symbols that may
|
||||||
|
// be loaded.
|
||||||
|
//
|
||||||
|
// * Merging generated rules into existing rules: languages provide metadata
|
||||||
|
// that helps with rule matching, merging, and deletion.
|
||||||
|
type Language interface {
|
||||||
|
config.Configurer
|
||||||
|
resolve.Resolver
|
||||||
|
|
||||||
|
// Kinds returns a map of maps rule names (kinds) and information on how to
|
||||||
|
// match and merge attributes that may be found in rules of those kinds. All
|
||||||
|
// kinds of rules generated for this language may be found here.
|
||||||
|
Kinds() map[string]rule.KindInfo
|
||||||
|
|
||||||
|
// Loads returns .bzl files and symbols they define. Every rule generated by
|
||||||
|
// GenerateRules, now or in the past, should be loadable from one of these
|
||||||
|
// files.
|
||||||
|
Loads() []rule.LoadInfo
|
||||||
|
|
||||||
|
// GenerateRules extracts build metadata from source files in a directory.
|
||||||
|
// GenerateRules is called in each directory where an update is requested
|
||||||
|
// in depth-first post-order.
|
||||||
|
//
|
||||||
|
// c is the configuration for the current directory.
|
||||||
|
// dir is the absolute path to the directory to scan.
|
||||||
|
// rel is the relative path to the directory from the repository root.
|
||||||
|
// f is the build file. It may be nil. It should not be modified.
|
||||||
|
// subdirs is a list of subdirectory names.
|
||||||
|
// regularFiles is a list of normal files in the directory.
|
||||||
|
// genFiles is a list of generated files, found in outputs of rules.
|
||||||
|
// otherEmpty and otherGen are lists of empty and generated rules created
|
||||||
|
// by other languages processed before this language.
|
||||||
|
//
|
||||||
|
// empty is a list of empty rules that may be deleted after merge.
|
||||||
|
// gen is a list of generated rules that may be updated or added.
|
||||||
|
//
|
||||||
|
// Any non-fatal errors this function encounters should be logged using
|
||||||
|
// log.Print.
|
||||||
|
GenerateRules(c *config.Config, dir, rel string, f *rule.File, subdirs, regularFiles, genFiles []string, otherEmpty, otherGen []*rule.Rule) (empty, gen []*rule.Rule)
|
||||||
|
|
||||||
|
// Fix repairs deprecated usage of language-specific rules in f. This is
|
||||||
|
// called before the file is indexed. Unless c.ShouldFix is true, fixes
|
||||||
|
// that delete or rename rules should not be performed.
|
||||||
|
Fix(c *config.Config, f *rule.File)
|
||||||
|
}
|
||||||
42
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/BUILD
generated
vendored
Normal file
42
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/BUILD
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
||||||
|
|
||||||
|
go_library(
|
||||||
|
name = "go_default_library",
|
||||||
|
srcs = [
|
||||||
|
"config.go",
|
||||||
|
"constants.go",
|
||||||
|
"fileinfo.go",
|
||||||
|
"fix.go",
|
||||||
|
"generate.go",
|
||||||
|
"kinds.go",
|
||||||
|
"known_imports.go",
|
||||||
|
"lang.go",
|
||||||
|
"package.go",
|
||||||
|
"resolve.go",
|
||||||
|
],
|
||||||
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto",
|
||||||
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/language/proto",
|
||||||
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
|
deps = [
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/language:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve:go_default_library",
|
||||||
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "package-srcs",
|
||||||
|
srcs = glob(["**"]),
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:private"],
|
||||||
|
)
|
||||||
|
|
||||||
|
filegroup(
|
||||||
|
name = "all-srcs",
|
||||||
|
srcs = [":package-srcs"],
|
||||||
|
tags = ["automanaged"],
|
||||||
|
visibility = ["//visibility:public"],
|
||||||
|
)
|
||||||
250
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/config.go
generated
vendored
Normal file
250
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/config.go
generated
vendored
Normal file
@@ -0,0 +1,250 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ProtoConfig contains configuration values related to protos.
|
||||||
|
//
|
||||||
|
// This type is public because other languages need to generate rules based
|
||||||
|
// on protos, so this configuration may be relevant to them.
|
||||||
|
type ProtoConfig struct {
|
||||||
|
// Mode determines how rules are generated for protos.
|
||||||
|
Mode Mode
|
||||||
|
|
||||||
|
// ModeExplicit indicates whether the proto mode was set explicitly.
|
||||||
|
ModeExplicit bool
|
||||||
|
|
||||||
|
// GoPrefix is the current Go prefix (the Go extension may set this in the
|
||||||
|
// root directory only). Used to generate proto rule names in the root
|
||||||
|
// directory when there are no proto files or the proto package name
|
||||||
|
// can't be determined.
|
||||||
|
// TODO(jayconrod): deprecate and remove Go-specific behavior.
|
||||||
|
GoPrefix string
|
||||||
|
|
||||||
|
// groupOption is an option name that Gazelle will use to group .proto
|
||||||
|
// files into proto_library rules. If unset, the proto package name is used.
|
||||||
|
groupOption string
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetProtoConfig(c *config.Config) *ProtoConfig {
|
||||||
|
return c.Exts[protoName].(*ProtoConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mode determines how proto rules are generated.
|
||||||
|
type Mode int
|
||||||
|
|
||||||
|
const (
|
||||||
|
// DefaultMode generates proto_library rules. Other languages should generate
|
||||||
|
// library rules based on these (e.g., go_proto_library) and should ignore
|
||||||
|
// checked-in generated files (e.g., .pb.go files) when there is a .proto
|
||||||
|
// file with a similar name.
|
||||||
|
DefaultMode Mode = iota
|
||||||
|
|
||||||
|
// DisableMode ignores .proto files and generates empty proto_library rules.
|
||||||
|
// Checked-in generated files (e.g., .pb.go files) should be treated as
|
||||||
|
// normal sources.
|
||||||
|
DisableMode
|
||||||
|
|
||||||
|
// DisableGlobalMode is similar to DisableMode, but it also prevents
|
||||||
|
// the use of special cases in dependency resolution for well known types
|
||||||
|
// and Google APIs.
|
||||||
|
DisableGlobalMode
|
||||||
|
|
||||||
|
// LegacyMode generates filegroups for .proto files if .pb.go files are
|
||||||
|
// present in the same directory.
|
||||||
|
LegacyMode
|
||||||
|
|
||||||
|
// PackageMode generates a proto_library for each set of .proto files with
|
||||||
|
// the same package name in each directory.
|
||||||
|
PackageMode
|
||||||
|
)
|
||||||
|
|
||||||
|
func ModeFromString(s string) (Mode, error) {
|
||||||
|
switch s {
|
||||||
|
case "default":
|
||||||
|
return DefaultMode, nil
|
||||||
|
case "disable":
|
||||||
|
return DisableMode, nil
|
||||||
|
case "disable_global":
|
||||||
|
return DisableGlobalMode, nil
|
||||||
|
case "legacy":
|
||||||
|
return LegacyMode, nil
|
||||||
|
case "package":
|
||||||
|
return PackageMode, nil
|
||||||
|
default:
|
||||||
|
return 0, fmt.Errorf("unrecognized proto mode: %q", s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Mode) String() string {
|
||||||
|
switch m {
|
||||||
|
case DefaultMode:
|
||||||
|
return "default"
|
||||||
|
case DisableMode:
|
||||||
|
return "disable"
|
||||||
|
case DisableGlobalMode:
|
||||||
|
return "disable_global"
|
||||||
|
case LegacyMode:
|
||||||
|
return "legacy"
|
||||||
|
case PackageMode:
|
||||||
|
return "package"
|
||||||
|
default:
|
||||||
|
log.Panicf("unknown mode %d", m)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Mode) ShouldGenerateRules() bool {
|
||||||
|
switch m {
|
||||||
|
case DisableMode, DisableGlobalMode, LegacyMode:
|
||||||
|
return false
|
||||||
|
default:
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Mode) ShouldIncludePregeneratedFiles() bool {
|
||||||
|
switch m {
|
||||||
|
case DisableMode, DisableGlobalMode, LegacyMode:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m Mode) ShouldUseKnownImports() bool {
|
||||||
|
return m != DisableGlobalMode
|
||||||
|
}
|
||||||
|
|
||||||
|
type modeFlag struct {
|
||||||
|
mode *Mode
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *modeFlag) Set(value string) error {
|
||||||
|
if mode, err := ModeFromString(value); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
*f.mode = mode
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (f *modeFlag) String() string {
|
||||||
|
var mode Mode
|
||||||
|
if f != nil && f.mode != nil {
|
||||||
|
mode = *f.mode
|
||||||
|
}
|
||||||
|
return mode.String()
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {
|
||||||
|
pc := &ProtoConfig{}
|
||||||
|
c.Exts[protoName] = pc
|
||||||
|
|
||||||
|
// Note: the -proto flag does not set the ModeExplicit flag. We want to
|
||||||
|
// be able to switch to DisableMode in vendor directories, even when
|
||||||
|
// this is set for compatibility with older versions.
|
||||||
|
fs.Var(&modeFlag{&pc.Mode}, "proto", "default: generates a proto_library rule for one package\n\tpackage: generates a proto_library rule for for each package\n\tdisable: does not touch proto rules\n\tdisable_global: does not touch proto rules and does not use special cases for protos in dependency resolution")
|
||||||
|
fs.StringVar(&pc.groupOption, "proto_group", "", "option name used to group .proto files into proto_library rules")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) CheckFlags(fs *flag.FlagSet, c *config.Config) error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) KnownDirectives() []string {
|
||||||
|
return []string{"proto", "proto_group"}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) Configure(c *config.Config, rel string, f *rule.File) {
|
||||||
|
pc := &ProtoConfig{}
|
||||||
|
*pc = *GetProtoConfig(c)
|
||||||
|
c.Exts[protoName] = pc
|
||||||
|
if f != nil {
|
||||||
|
for _, d := range f.Directives {
|
||||||
|
switch d.Key {
|
||||||
|
case "proto":
|
||||||
|
mode, err := ModeFromString(d.Value)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
pc.Mode = mode
|
||||||
|
pc.ModeExplicit = true
|
||||||
|
case "proto_group":
|
||||||
|
pc.groupOption = d.Value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inferProtoMode(c, rel, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
// inferProtoMode sets ProtoConfig.Mode based on the directory name and the
|
||||||
|
// contents of f. If the proto mode is set explicitly, this function does not
|
||||||
|
// change it. If this is a vendor directory, or go_proto_library is loaded from
|
||||||
|
// another file, proto rule generation is disabled.
|
||||||
|
//
|
||||||
|
// TODO(jayconrod): this logic is archaic, now that rules are generated by
|
||||||
|
// separate language extensions. Proto rule generation should be independent
|
||||||
|
// from Go.
|
||||||
|
func inferProtoMode(c *config.Config, rel string, f *rule.File) {
|
||||||
|
pc := GetProtoConfig(c)
|
||||||
|
if pc.Mode != DefaultMode || pc.ModeExplicit {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if pc.GoPrefix == wellKnownTypesGoPrefix {
|
||||||
|
pc.Mode = LegacyMode
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if path.Base(rel) == "vendor" {
|
||||||
|
pc.Mode = DisableMode
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if f == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
mode := DefaultMode
|
||||||
|
outer:
|
||||||
|
for _, l := range f.Loads {
|
||||||
|
name := l.Name()
|
||||||
|
if name == "@io_bazel_rules_go//proto:def.bzl" {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if name == "@io_bazel_rules_go//proto:go_proto_library.bzl" {
|
||||||
|
mode = LegacyMode
|
||||||
|
break
|
||||||
|
}
|
||||||
|
for _, sym := range l.Symbols() {
|
||||||
|
if sym == "go_proto_library" {
|
||||||
|
mode = DisableMode
|
||||||
|
break outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if mode == DefaultMode || pc.Mode == mode || c.ShouldFix && mode == LegacyMode {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
pc.Mode = mode
|
||||||
|
}
|
||||||
27
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/constants.go
generated
vendored
Normal file
27
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/constants.go
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
const (
|
||||||
|
// PackageInfoKey is the name of a private attribute set on generated
|
||||||
|
// proto_library rules. This attribute contains a Package record which
|
||||||
|
// describes the library and its sources.
|
||||||
|
PackageKey = "_package"
|
||||||
|
|
||||||
|
// wellKnownTypesGoPrefix is the import path for the Go repository containing
|
||||||
|
// pre-generated code for the Well Known Types.
|
||||||
|
wellKnownTypesGoPrefix = "github.com/golang/protobuf"
|
||||||
|
)
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
@@ -13,36 +13,47 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package packages
|
package proto
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"log"
|
"log"
|
||||||
"path"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"sort"
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"unicode"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// FileInfo contains metadata extracted from a .proto file.
|
||||||
|
type FileInfo struct {
|
||||||
|
Path, Name string
|
||||||
|
|
||||||
|
PackageName string
|
||||||
|
|
||||||
|
Options []Option
|
||||||
|
Imports []string
|
||||||
|
|
||||||
|
HasServices bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Option represents a top-level option statement in a .proto file. Only
|
||||||
|
// string options are supported for now.
|
||||||
|
type Option struct {
|
||||||
|
Key, Value string
|
||||||
|
}
|
||||||
|
|
||||||
var protoRe = buildProtoRegexp()
|
var protoRe = buildProtoRegexp()
|
||||||
|
|
||||||
const (
|
func protoFileInfo(dir, name string) FileInfo {
|
||||||
importSubexpIndex = 1
|
info := FileInfo{
|
||||||
packageSubexpIndex = 2
|
Path: filepath.Join(dir, name),
|
||||||
goPackageSubexpIndex = 3
|
Name: name,
|
||||||
serviceSubexpIndex = 4
|
}
|
||||||
)
|
content, err := ioutil.ReadFile(info.Path)
|
||||||
|
|
||||||
func protoFileInfo(c *config.Config, dir, rel, name string) fileInfo {
|
|
||||||
info := fileNameInfo(dir, rel, name)
|
|
||||||
content, err := ioutil.ReadFile(info.path)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Printf("%s: error reading proto file: %v", info.path, err)
|
log.Printf("%s: error reading proto file: %v", info.Path, err)
|
||||||
return info
|
return info
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,50 +61,39 @@ func protoFileInfo(c *config.Config, dir, rel, name string) fileInfo {
|
|||||||
switch {
|
switch {
|
||||||
case match[importSubexpIndex] != nil:
|
case match[importSubexpIndex] != nil:
|
||||||
imp := unquoteProtoString(match[importSubexpIndex])
|
imp := unquoteProtoString(match[importSubexpIndex])
|
||||||
info.imports = append(info.imports, imp)
|
info.Imports = append(info.Imports, imp)
|
||||||
|
|
||||||
case match[packageSubexpIndex] != nil:
|
case match[packageSubexpIndex] != nil:
|
||||||
pkg := string(match[packageSubexpIndex])
|
pkg := string(match[packageSubexpIndex])
|
||||||
if info.packageName == "" {
|
if info.PackageName == "" {
|
||||||
info.packageName = strings.Replace(pkg, ".", "_", -1)
|
info.PackageName = pkg
|
||||||
}
|
}
|
||||||
|
|
||||||
case match[goPackageSubexpIndex] != nil:
|
case match[optkeySubexpIndex] != nil:
|
||||||
gopkg := unquoteProtoString(match[goPackageSubexpIndex])
|
key := string(match[optkeySubexpIndex])
|
||||||
// If there's no / in the package option, then it's just a
|
value := unquoteProtoString(match[optvalSubexpIndex])
|
||||||
// simple package name, not a full import path.
|
info.Options = append(info.Options, Option{key, value})
|
||||||
if strings.LastIndexByte(gopkg, '/') == -1 {
|
|
||||||
info.packageName = gopkg
|
|
||||||
} else {
|
|
||||||
if i := strings.LastIndexByte(gopkg, ';'); i != -1 {
|
|
||||||
info.importPath = gopkg[:i]
|
|
||||||
info.packageName = gopkg[i+1:]
|
|
||||||
} else {
|
|
||||||
info.importPath = gopkg
|
|
||||||
info.packageName = path.Base(gopkg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case match[serviceSubexpIndex] != nil:
|
case match[serviceSubexpIndex] != nil:
|
||||||
info.hasServices = true
|
info.HasServices = true
|
||||||
|
|
||||||
default:
|
default:
|
||||||
// Comment matched. Nothing to extract.
|
// Comment matched. Nothing to extract.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sort.Strings(info.imports)
|
sort.Strings(info.Imports)
|
||||||
|
|
||||||
if info.packageName == "" {
|
|
||||||
stem := strings.TrimSuffix(name, ".proto")
|
|
||||||
fs := strings.FieldsFunc(stem, func(r rune) bool {
|
|
||||||
return !(unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_')
|
|
||||||
})
|
|
||||||
info.packageName = strings.Join(fs, "_")
|
|
||||||
}
|
|
||||||
|
|
||||||
return info
|
return info
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const (
|
||||||
|
importSubexpIndex = 1
|
||||||
|
packageSubexpIndex = 2
|
||||||
|
optkeySubexpIndex = 3
|
||||||
|
optvalSubexpIndex = 4
|
||||||
|
serviceSubexpIndex = 5
|
||||||
|
)
|
||||||
|
|
||||||
// Based on https://developers.google.com/protocol-buffers/docs/reference/proto3-spec
|
// Based on https://developers.google.com/protocol-buffers/docs/reference/proto3-spec
|
||||||
func buildProtoRegexp() *regexp.Regexp {
|
func buildProtoRegexp() *regexp.Regexp {
|
||||||
hexEscape := `\\[xX][0-9a-fA-f]{2}`
|
hexEscape := `\\[xX][0-9a-fA-f]{2}`
|
||||||
@@ -105,10 +105,10 @@ func buildProtoRegexp() *regexp.Regexp {
|
|||||||
fullIdent := ident + `(?:\.` + ident + `)*`
|
fullIdent := ident + `(?:\.` + ident + `)*`
|
||||||
importStmt := `\bimport\s*(?:public|weak)?\s*(?P<import>` + strLit + `)\s*;`
|
importStmt := `\bimport\s*(?:public|weak)?\s*(?P<import>` + strLit + `)\s*;`
|
||||||
packageStmt := `\bpackage\s*(?P<package>` + fullIdent + `)\s*;`
|
packageStmt := `\bpackage\s*(?P<package>` + fullIdent + `)\s*;`
|
||||||
goPackageStmt := `\boption\s*go_package\s*=\s*(?P<go_package>` + strLit + `)\s*;`
|
optionStmt := `\boption\s*(?P<optkey>` + fullIdent + `)\s*=\s*(?P<optval>` + strLit + `)\s*;`
|
||||||
serviceStmt := `(?P<service>service)`
|
serviceStmt := `(?P<service>service)`
|
||||||
comment := `//[^\n]*`
|
comment := `//[^\n]*`
|
||||||
protoReSrc := strings.Join([]string{importStmt, packageStmt, goPackageStmt, serviceStmt, comment}, "|")
|
protoReSrc := strings.Join([]string{importStmt, packageStmt, optionStmt, serviceStmt, comment}, "|")
|
||||||
return regexp.MustCompile(protoReSrc)
|
return regexp.MustCompile(protoReSrc)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
@@ -13,5 +13,12 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// Package generator provides Bazel rule generation for Go build targets.
|
package proto
|
||||||
package generator
|
|
||||||
|
import (
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_ *protoLang) Fix(c *config.Config, f *rule.File) {
|
||||||
|
}
|
||||||
276
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/generate.go
generated
vendored
Normal file
276
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/generate.go
generated
vendored
Normal file
@@ -0,0 +1,276 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_ *protoLang) GenerateRules(c *config.Config, dir, rel string, f *rule.File, subdirs, regularFiles, genFiles []string, otherEmpty, otherGen []*rule.Rule) (empty, gen []*rule.Rule) {
|
||||||
|
pc := GetProtoConfig(c)
|
||||||
|
if !pc.Mode.ShouldGenerateRules() {
|
||||||
|
// Don't create or delete proto rules in this mode. Any existing rules
|
||||||
|
// are likely hand-written.
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var regularProtoFiles []string
|
||||||
|
for _, name := range regularFiles {
|
||||||
|
if strings.HasSuffix(name, ".proto") {
|
||||||
|
regularProtoFiles = append(regularProtoFiles, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var genProtoFiles []string
|
||||||
|
for _, name := range genFiles {
|
||||||
|
if strings.HasSuffix(name, ".proto") {
|
||||||
|
genProtoFiles = append(genFiles, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pkgs := buildPackages(pc, dir, rel, regularProtoFiles, genProtoFiles)
|
||||||
|
shouldSetVisibility := !hasDefaultVisibility(f)
|
||||||
|
for _, pkg := range pkgs {
|
||||||
|
r := generateProto(pc, rel, pkg, shouldSetVisibility)
|
||||||
|
if r.IsEmpty(protoKinds[r.Kind()]) {
|
||||||
|
empty = append(empty, r)
|
||||||
|
} else {
|
||||||
|
gen = append(gen, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.SliceStable(gen, func(i, j int) bool {
|
||||||
|
return gen[i].Name() < gen[j].Name()
|
||||||
|
})
|
||||||
|
empty = append(empty, generateEmpty(f, regularProtoFiles, genProtoFiles)...)
|
||||||
|
return empty, gen
|
||||||
|
}
|
||||||
|
|
||||||
|
// RuleName returns a name for a proto_library derived from the given strings.
|
||||||
|
// For each string, RuleName will look for a non-empty suffix of identifier
|
||||||
|
// characters and then append "_proto" to that.
|
||||||
|
func RuleName(names ...string) string {
|
||||||
|
base := "root"
|
||||||
|
for _, name := range names {
|
||||||
|
notIdent := func(c rune) bool {
|
||||||
|
return !('A' <= c && c <= 'Z' ||
|
||||||
|
'a' <= c && c <= 'z' ||
|
||||||
|
'0' <= c && c <= '9' ||
|
||||||
|
c == '_')
|
||||||
|
}
|
||||||
|
if i := strings.LastIndexFunc(name, notIdent); i >= 0 {
|
||||||
|
name = name[i+1:]
|
||||||
|
}
|
||||||
|
if name != "" {
|
||||||
|
base = name
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return base + "_proto"
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildPackage extracts metadata from the .proto files in a directory and
|
||||||
|
// constructs possibly several packages, then selects a package to generate
|
||||||
|
// a proto_library rule for.
|
||||||
|
func buildPackages(pc *ProtoConfig, dir, rel string, protoFiles, genFiles []string) []*Package {
|
||||||
|
packageMap := make(map[string]*Package)
|
||||||
|
for _, name := range protoFiles {
|
||||||
|
info := protoFileInfo(dir, name)
|
||||||
|
key := info.PackageName
|
||||||
|
if pc.groupOption != "" {
|
||||||
|
for _, opt := range info.Options {
|
||||||
|
if opt.Key == pc.groupOption {
|
||||||
|
key = opt.Value
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if packageMap[key] == nil {
|
||||||
|
packageMap[key] = newPackage(info.PackageName)
|
||||||
|
}
|
||||||
|
packageMap[key].addFile(info)
|
||||||
|
}
|
||||||
|
|
||||||
|
switch pc.Mode {
|
||||||
|
case DefaultMode:
|
||||||
|
pkg, err := selectPackage(dir, rel, packageMap)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
}
|
||||||
|
if pkg == nil {
|
||||||
|
return nil // empty rule created in generateEmpty
|
||||||
|
}
|
||||||
|
for _, name := range genFiles {
|
||||||
|
pkg.addGenFile(dir, name)
|
||||||
|
}
|
||||||
|
return []*Package{pkg}
|
||||||
|
|
||||||
|
case PackageMode:
|
||||||
|
pkgs := make([]*Package, 0, len(packageMap))
|
||||||
|
for _, pkg := range packageMap {
|
||||||
|
pkgs = append(pkgs, pkg)
|
||||||
|
}
|
||||||
|
return pkgs
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// selectPackage chooses a package to generate rules for.
|
||||||
|
func selectPackage(dir, rel string, packageMap map[string]*Package) (*Package, error) {
|
||||||
|
if len(packageMap) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if len(packageMap) == 1 {
|
||||||
|
for _, pkg := range packageMap {
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defaultPackageName := strings.Replace(rel, "/", "_", -1)
|
||||||
|
for _, pkg := range packageMap {
|
||||||
|
if pkgName := goPackageName(pkg); pkgName != "" && pkgName == defaultPackageName {
|
||||||
|
return pkg, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("%s: directory contains multiple proto packages. Gazelle can only generate a proto_library for one package.", dir)
|
||||||
|
}
|
||||||
|
|
||||||
|
// goPackageName guesses the identifier in package declarations at the top of
|
||||||
|
// the .pb.go files that will be generated for this package. "" is returned
|
||||||
|
// if the package name cannot be determined.
|
||||||
|
//
|
||||||
|
// TODO(jayconrod): remove all Go-specific functionality. This is here
|
||||||
|
// temporarily for compatibility.
|
||||||
|
func goPackageName(pkg *Package) string {
|
||||||
|
if opt, ok := pkg.Options["go_package"]; ok {
|
||||||
|
if i := strings.IndexByte(opt, ';'); i >= 0 {
|
||||||
|
return opt[i+1:]
|
||||||
|
} else if i := strings.LastIndexByte(opt, '/'); i >= 0 {
|
||||||
|
return opt[i+1:]
|
||||||
|
} else {
|
||||||
|
return opt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if pkg.Name != "" {
|
||||||
|
return strings.Replace(pkg.Name, ".", "_", -1)
|
||||||
|
}
|
||||||
|
if len(pkg.Files) == 1 {
|
||||||
|
for s := range pkg.Files {
|
||||||
|
return strings.TrimSuffix(s, ".proto")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateProto creates a new proto_library rule for a package. The rule may
|
||||||
|
// be empty if there are no sources.
|
||||||
|
func generateProto(pc *ProtoConfig, rel string, pkg *Package, shouldSetVisibility bool) *rule.Rule {
|
||||||
|
var name string
|
||||||
|
if pc.Mode == DefaultMode {
|
||||||
|
name = RuleName(goPackageName(pkg), pc.GoPrefix, rel)
|
||||||
|
} else {
|
||||||
|
name = RuleName(pkg.Options[pc.groupOption], pkg.Name, rel)
|
||||||
|
}
|
||||||
|
r := rule.NewRule("proto_library", name)
|
||||||
|
srcs := make([]string, 0, len(pkg.Files))
|
||||||
|
for f := range pkg.Files {
|
||||||
|
srcs = append(srcs, f)
|
||||||
|
}
|
||||||
|
sort.Strings(srcs)
|
||||||
|
if len(srcs) > 0 {
|
||||||
|
r.SetAttr("srcs", srcs)
|
||||||
|
}
|
||||||
|
r.SetPrivateAttr(PackageKey, *pkg)
|
||||||
|
imports := make([]string, 0, len(pkg.Imports))
|
||||||
|
for i := range pkg.Imports {
|
||||||
|
imports = append(imports, i)
|
||||||
|
}
|
||||||
|
sort.Strings(imports)
|
||||||
|
r.SetPrivateAttr(config.GazelleImportsKey, imports)
|
||||||
|
for k, v := range pkg.Options {
|
||||||
|
r.SetPrivateAttr(k, v)
|
||||||
|
}
|
||||||
|
if shouldSetVisibility {
|
||||||
|
vis := checkInternalVisibility(rel, "//visibility:public")
|
||||||
|
r.SetAttr("visibility", []string{vis})
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
// generateEmpty generates a list of proto_library rules that may be deleted.
|
||||||
|
// This is generated from existing proto_library rules with srcs lists that
|
||||||
|
// don't match any static or generated files.
|
||||||
|
func generateEmpty(f *rule.File, regularFiles, genFiles []string) []*rule.Rule {
|
||||||
|
if f == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
knownFiles := make(map[string]bool)
|
||||||
|
for _, f := range regularFiles {
|
||||||
|
knownFiles[f] = true
|
||||||
|
}
|
||||||
|
for _, f := range genFiles {
|
||||||
|
knownFiles[f] = true
|
||||||
|
}
|
||||||
|
var empty []*rule.Rule
|
||||||
|
outer:
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() != "proto_library" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
srcs := r.AttrStrings("srcs")
|
||||||
|
if len(srcs) == 0 && r.Attr("srcs") != nil {
|
||||||
|
// srcs is not a string list; leave it alone
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
for _, src := range r.AttrStrings("srcs") {
|
||||||
|
if knownFiles[src] {
|
||||||
|
continue outer
|
||||||
|
}
|
||||||
|
}
|
||||||
|
empty = append(empty, rule.NewRule("proto_library", r.Name()))
|
||||||
|
}
|
||||||
|
return empty
|
||||||
|
}
|
||||||
|
|
||||||
|
// hasDefaultVisibility returns whether oldFile contains a "package" rule with
|
||||||
|
// a "default_visibility" attribute. Rules generated by Gazelle should not
|
||||||
|
// have their own visibility attributes if this is the case.
|
||||||
|
func hasDefaultVisibility(f *rule.File) bool {
|
||||||
|
if f == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
if r.Kind() == "package" && r.Attr("default_visibility") != nil {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// checkInternalVisibility overrides the given visibility if the package is
|
||||||
|
// internal.
|
||||||
|
func checkInternalVisibility(rel, visibility string) string {
|
||||||
|
if i := strings.LastIndex(rel, "/internal/"); i >= 0 {
|
||||||
|
visibility = fmt.Sprintf("//%s:__subpackages__", rel[:i])
|
||||||
|
} else if strings.HasPrefix(rel, "internal/") {
|
||||||
|
visibility = "//:__subpackages__"
|
||||||
|
}
|
||||||
|
return visibility
|
||||||
|
}
|
||||||
29
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/kinds.go
generated
vendored
Normal file
29
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/kinds.go
generated
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
|
||||||
|
var protoKinds = map[string]rule.KindInfo{
|
||||||
|
"proto_library": {
|
||||||
|
NonEmptyAttrs: map[string]bool{"srcs": true},
|
||||||
|
MergeableAttrs: map[string]bool{"srcs": true},
|
||||||
|
ResolveAttrs: map[string]bool{"deps": true},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) Kinds() map[string]rule.KindInfo { return protoKinds }
|
||||||
|
func (_ *protoLang) Loads() []rule.LoadInfo { return nil }
|
||||||
300
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/known_imports.go
generated
vendored
Normal file
300
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/known_imports.go
generated
vendored
Normal file
@@ -0,0 +1,300 @@
|
|||||||
|
|
||||||
|
// Generated by internal/language/proto/gen/gen_known_imports.go
|
||||||
|
// From internal/language/proto/proto.csv
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
|
||||||
|
var knownImports = map[string]label.Label{
|
||||||
|
|
||||||
|
"google/protobuf/any.proto": label.New("com_google_protobuf", "", "any_proto"),
|
||||||
|
"google/protobuf/api.proto": label.New("com_google_protobuf", "", "api_proto"),
|
||||||
|
"google/protobuf/compiler/plugin.proto": label.New("com_google_protobuf", "", "compiler_plugin_proto"),
|
||||||
|
"google/protobuf/descriptor.proto": label.New("com_google_protobuf", "", "descriptor_proto"),
|
||||||
|
"google/protobuf/duration.proto": label.New("com_google_protobuf", "", "duration_proto"),
|
||||||
|
"google/protobuf/empty.proto": label.New("com_google_protobuf", "", "empty_proto"),
|
||||||
|
"google/protobuf/field_mask.proto": label.New("com_google_protobuf", "", "field_mask_proto"),
|
||||||
|
"google/protobuf/source_context.proto": label.New("com_google_protobuf", "", "source_context_proto"),
|
||||||
|
"google/protobuf/struct.proto": label.New("com_google_protobuf", "", "struct_proto"),
|
||||||
|
"google/protobuf/timestamp.proto": label.New("com_google_protobuf", "", "timestamp_proto"),
|
||||||
|
"google/protobuf/type.proto": label.New("com_google_protobuf", "", "type_proto"),
|
||||||
|
"google/protobuf/wrappers.proto": label.New("com_google_protobuf", "", "wrappers_proto"),
|
||||||
|
"google/assistant/embedded/v1alpha2/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha2", "embedded_proto"),
|
||||||
|
"google/assistant/embedded/v1alpha1/embedded_assistant.proto": label.New("go_googleapis", "google/assistant/embedded/v1alpha1", "embedded_proto"),
|
||||||
|
"google/home/graph/v1/device.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_proto"),
|
||||||
|
"google/home/graph/v1/homegraph.proto": label.New("go_googleapis", "google/home/graph/v1", "graph_proto"),
|
||||||
|
"google/genomics/v1/operations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/variants.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/position.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/references.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/cigar.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/datasets.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/readalignment.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/annotations.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/reads.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/readgroup.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/readgroupset.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1/range.proto": label.New("go_googleapis", "google/genomics/v1", "genomics_proto"),
|
||||||
|
"google/genomics/v1alpha2/pipelines.proto": label.New("go_googleapis", "google/genomics/v1alpha2", "genomics_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_service_messages.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_service.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"),
|
||||||
|
"google/bigtable/v1/bigtable_data.proto": label.New("go_googleapis", "google/bigtable/v1", "bigtable_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"),
|
||||||
|
"google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto": label.New("go_googleapis", "google/bigtable/admin/cluster/v1", "cluster_proto"),
|
||||||
|
"google/bigtable/admin/v2/bigtable_instance_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"),
|
||||||
|
"google/bigtable/admin/v2/instance.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"),
|
||||||
|
"google/bigtable/admin/v2/table.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"),
|
||||||
|
"google/bigtable/admin/v2/bigtable_table_admin.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"),
|
||||||
|
"google/bigtable/admin/v2/common.proto": label.New("go_googleapis", "google/bigtable/admin/v2", "admin_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_service_messages.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_service.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"),
|
||||||
|
"google/bigtable/admin/table/v1/bigtable_table_data.proto": label.New("go_googleapis", "google/bigtable/admin/table/v1", "table_proto"),
|
||||||
|
"google/bigtable/v2/bigtable.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_proto"),
|
||||||
|
"google/bigtable/v2/data.proto": label.New("go_googleapis", "google/bigtable/v2", "bigtable_proto"),
|
||||||
|
"google/privacy/dlp/v2/storage.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_proto"),
|
||||||
|
"google/privacy/dlp/v2/dlp.proto": label.New("go_googleapis", "google/privacy/dlp/v2", "dlp_proto"),
|
||||||
|
"google/watcher/v1/watch.proto": label.New("go_googleapis", "google/watcher/v1", "watcher_proto"),
|
||||||
|
"google/firestore/admin/v1beta1/firestore_admin.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_proto"),
|
||||||
|
"google/firestore/admin/v1beta1/index.proto": label.New("go_googleapis", "google/firestore/admin/v1beta1", "admin_proto"),
|
||||||
|
"google/firestore/v1beta1/write.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"),
|
||||||
|
"google/firestore/v1beta1/document.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"),
|
||||||
|
"google/firestore/v1beta1/firestore.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"),
|
||||||
|
"google/firestore/v1beta1/query.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"),
|
||||||
|
"google/firestore/v1beta1/common.proto": label.New("go_googleapis", "google/firestore/v1beta1", "firestore_proto"),
|
||||||
|
"google/example/library/v1/library.proto": label.New("go_googleapis", "google/example/library/v1", "library_proto"),
|
||||||
|
"google/appengine/v1/instance.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/audit_data.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/appengine.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/application.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/operation.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/app_yaml.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/location.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/service.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/deploy.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/v1/version.proto": label.New("go_googleapis", "google/appengine/v1", "appengine_proto"),
|
||||||
|
"google/appengine/legacy/audit_data.proto": label.New("go_googleapis", "google/appengine/legacy", "legacy_proto"),
|
||||||
|
"google/appengine/logging/v1/request_log.proto": label.New("go_googleapis", "google/appengine/logging/v1", "logging_proto"),
|
||||||
|
"google/storagetransfer/v1/transfer.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_proto"),
|
||||||
|
"google/storagetransfer/v1/transfer_types.proto": label.New("go_googleapis", "google/storagetransfer/v1", "storagetransfer_proto"),
|
||||||
|
"google/longrunning/operations.proto": label.New("go_googleapis", "google/longrunning", "longrunning_proto"),
|
||||||
|
"google/container/v1/cluster_service.proto": label.New("go_googleapis", "google/container/v1", "container_proto"),
|
||||||
|
"google/container/v1beta1/cluster_service.proto": label.New("go_googleapis", "google/container/v1beta1", "container_proto"),
|
||||||
|
"google/container/v1alpha1/cluster_service.proto": label.New("go_googleapis", "google/container/v1alpha1", "container_proto"),
|
||||||
|
"google/datastore/v1beta3/datastore.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"),
|
||||||
|
"google/datastore/v1beta3/query.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"),
|
||||||
|
"google/datastore/v1beta3/entity.proto": label.New("go_googleapis", "google/datastore/v1beta3", "datastore_proto"),
|
||||||
|
"google/datastore/v1/datastore.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"),
|
||||||
|
"google/datastore/v1/query.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"),
|
||||||
|
"google/datastore/v1/entity.proto": label.New("go_googleapis", "google/datastore/v1", "datastore_proto"),
|
||||||
|
"google/datastore/admin/v1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1", "admin_proto"),
|
||||||
|
"google/datastore/admin/v1beta1/datastore_admin.proto": label.New("go_googleapis", "google/datastore/admin/v1beta1", "admin_proto"),
|
||||||
|
"google/bytestream/bytestream.proto": label.New("go_googleapis", "google/bytestream", "bytestream_proto"),
|
||||||
|
"google/iam/v1/iam_policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_proto"),
|
||||||
|
"google/iam/v1/policy.proto": label.New("go_googleapis", "google/iam/v1", "iam_proto"),
|
||||||
|
"google/iam/v1/logging/audit_data.proto": label.New("go_googleapis", "google/iam/v1/logging", "logging_proto"),
|
||||||
|
"google/iam/admin/v1/iam.proto": label.New("go_googleapis", "google/iam/admin/v1", "admin_proto"),
|
||||||
|
"google/type/money.proto": label.New("go_googleapis", "google/type", "money_proto"),
|
||||||
|
"google/type/latlng.proto": label.New("go_googleapis", "google/type", "latlng_proto"),
|
||||||
|
"google/type/color.proto": label.New("go_googleapis", "google/type", "color_proto"),
|
||||||
|
"google/type/timeofday.proto": label.New("go_googleapis", "google/type", "timeofday_proto"),
|
||||||
|
"google/type/date.proto": label.New("go_googleapis", "google/type", "date_proto"),
|
||||||
|
"google/type/dayofweek.proto": label.New("go_googleapis", "google/type", "dayofweek_proto"),
|
||||||
|
"google/type/postal_address.proto": label.New("go_googleapis", "google/type", "postaladdress_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/error_group_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"),
|
||||||
|
"google/devtools/clouderrorreporting/v1beta1/common.proto": label.New("go_googleapis", "google/devtools/clouderrorreporting/v1beta1", "clouderrorreporting_proto"),
|
||||||
|
"google/devtools/resultstore/v2/file.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/resultstore_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/configuration.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/action.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/resultstore_file_download.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/test_suite.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/file_set.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/coverage.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/coverage_summary.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/configured_target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/target.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/invocation.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/resultstore/v2/common.proto": label.New("go_googleapis", "google/devtools/resultstore/v2", "resultstore_proto"),
|
||||||
|
"google/devtools/source/v1/source_context.proto": label.New("go_googleapis", "google/devtools/source/v1", "source_proto"),
|
||||||
|
"google/devtools/remoteexecution/v1test/remote_execution.proto": label.New("go_googleapis", "google/devtools/remoteexecution/v1test", "remoteexecution_proto"),
|
||||||
|
"google/devtools/cloudbuild/v1/cloudbuild.proto": label.New("go_googleapis", "google/devtools/cloudbuild/v1", "cloudbuild_proto"),
|
||||||
|
"google/devtools/sourcerepo/v1/sourcerepo.proto": label.New("go_googleapis", "google/devtools/sourcerepo/v1", "sourcerepo_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/worker.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/tasks.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/bots.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"),
|
||||||
|
"google/devtools/remoteworkers/v1test2/command.proto": label.New("go_googleapis", "google/devtools/remoteworkers/v1test2", "remoteworkers_proto"),
|
||||||
|
"google/devtools/cloudtrace/v1/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v1", "cloudtrace_proto"),
|
||||||
|
"google/devtools/cloudtrace/v2/trace.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_proto"),
|
||||||
|
"google/devtools/cloudtrace/v2/tracing.proto": label.New("go_googleapis", "google/devtools/cloudtrace/v2", "cloudtrace_proto"),
|
||||||
|
"google/devtools/cloudprofiler/v2/profiler.proto": label.New("go_googleapis", "google/devtools/cloudprofiler/v2", "cloudprofiler_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/containeranalysis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/provenance.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/source_context.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/containeranalysis/v1alpha1/image_basis.proto": label.New("go_googleapis", "google/devtools/containeranalysis/v1alpha1", "containeranalysis_proto"),
|
||||||
|
"google/devtools/build/v1/build_events.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"),
|
||||||
|
"google/devtools/build/v1/build_status.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"),
|
||||||
|
"google/devtools/build/v1/publish_build_event.proto": label.New("go_googleapis", "google/devtools/build/v1", "build_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/debugger.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/data.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"),
|
||||||
|
"google/devtools/clouddebugger/v2/controller.proto": label.New("go_googleapis", "google/devtools/clouddebugger/v2", "clouddebugger_proto"),
|
||||||
|
"google/cloud/resourcemanager/v2/folders.proto": label.New("go_googleapis", "google/cloud/resourcemanager/v2", "resourcemanager_proto"),
|
||||||
|
"google/cloud/kms/v1/resources.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_proto"),
|
||||||
|
"google/cloud/kms/v1/service.proto": label.New("go_googleapis", "google/cloud/kms/v1", "kms_proto"),
|
||||||
|
"google/cloud/runtimeconfig/v1beta1/resources.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_proto"),
|
||||||
|
"google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto": label.New("go_googleapis", "google/cloud/runtimeconfig/v1beta1", "runtimeconfig_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/queue.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/task.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/target.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"),
|
||||||
|
"google/cloud/tasks/v2beta2/cloudtasks.proto": label.New("go_googleapis", "google/cloud/tasks/v2beta2", "tasks_proto"),
|
||||||
|
"google/cloud/oslogin/v1/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1", "oslogin_proto"),
|
||||||
|
"google/cloud/oslogin/v1alpha/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1alpha", "oslogin_proto"),
|
||||||
|
"google/cloud/oslogin/common/common.proto": label.New("go_googleapis", "google/cloud/oslogin/common", "common_proto"),
|
||||||
|
"google/cloud/oslogin/v1beta/oslogin.proto": label.New("go_googleapis", "google/cloud/oslogin/v1beta", "oslogin_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2beta1/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2beta1", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/context.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/session_entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/intent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/entity_type.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/webhook.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/session.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/dialogflow/v2/agent.proto": label.New("go_googleapis", "google/cloud/dialogflow/v2", "dialogflow_proto"),
|
||||||
|
"google/cloud/redis/v1beta1/cloud_redis.proto": label.New("go_googleapis", "google/cloud/redis/v1beta1", "redis_proto"),
|
||||||
|
"google/cloud/location/locations.proto": label.New("go_googleapis", "google/cloud/location", "location_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/scan_config.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/crawled_url.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/scan_run.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/websecurityscanner/v1alpha/finding_addon.proto": label.New("go_googleapis", "google/cloud/websecurityscanner/v1alpha", "websecurityscanner_proto"),
|
||||||
|
"google/cloud/language/v1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1", "language_proto"),
|
||||||
|
"google/cloud/language/v1beta2/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta2", "language_proto"),
|
||||||
|
"google/cloud/language/v1beta1/language_service.proto": label.New("go_googleapis", "google/cloud/language/v1beta1", "language_proto"),
|
||||||
|
"google/cloud/bigquery/datatransfer/v1/transfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_proto"),
|
||||||
|
"google/cloud/bigquery/datatransfer/v1/datatransfer.proto": label.New("go_googleapis", "google/cloud/bigquery/datatransfer/v1", "datatransfer_proto"),
|
||||||
|
"google/cloud/bigquery/logging/v1/audit_data.proto": label.New("go_googleapis", "google/cloud/bigquery/logging/v1", "logging_proto"),
|
||||||
|
"google/cloud/vision/v1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p2beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p2beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/image_annotator.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/geometry.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/web_detection.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"),
|
||||||
|
"google/cloud/vision/v1p1beta1/text_annotation.proto": label.New("go_googleapis", "google/cloud/vision/v1p1beta1", "vision_proto"),
|
||||||
|
"google/cloud/speech/v1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1", "speech_proto"),
|
||||||
|
"google/cloud/speech/v1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1beta1", "speech_proto"),
|
||||||
|
"google/cloud/speech/v1p1beta1/cloud_speech.proto": label.New("go_googleapis", "google/cloud/speech/v1p1beta1", "speech_proto"),
|
||||||
|
"google/cloud/iot/v1/device_manager.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_proto"),
|
||||||
|
"google/cloud/iot/v1/resources.proto": label.New("go_googleapis", "google/cloud/iot/v1", "iot_proto"),
|
||||||
|
"google/cloud/videointelligence/v1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1", "videointelligence_proto"),
|
||||||
|
"google/cloud/videointelligence/v1beta2/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta2", "videointelligence_proto"),
|
||||||
|
"google/cloud/videointelligence/v1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1beta1", "videointelligence_proto"),
|
||||||
|
"google/cloud/videointelligence/v1p1beta1/video_intelligence.proto": label.New("go_googleapis", "google/cloud/videointelligence/v1p1beta1", "videointelligence_proto"),
|
||||||
|
"google/cloud/audit/audit_log.proto": label.New("go_googleapis", "google/cloud/audit", "audit_proto"),
|
||||||
|
"google/cloud/support/common.proto": label.New("go_googleapis", "google/cloud/support", "common_proto"),
|
||||||
|
"google/cloud/support/v1alpha1/cloud_support.proto": label.New("go_googleapis", "google/cloud/support/v1alpha1", "support_proto"),
|
||||||
|
"google/cloud/ml/v1/operation_metadata.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"),
|
||||||
|
"google/cloud/ml/v1/job_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"),
|
||||||
|
"google/cloud/ml/v1/prediction_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"),
|
||||||
|
"google/cloud/ml/v1/model_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"),
|
||||||
|
"google/cloud/ml/v1/project_service.proto": label.New("go_googleapis", "google/cloud/ml/v1", "ml_proto"),
|
||||||
|
"google/cloud/texttospeech/v1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1", "texttospeech_proto"),
|
||||||
|
"google/cloud/texttospeech/v1beta1/cloud_tts.proto": label.New("go_googleapis", "google/cloud/texttospeech/v1beta1", "texttospeech_proto"),
|
||||||
|
"google/cloud/functions/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_proto"),
|
||||||
|
"google/cloud/functions/v1beta2/functions.proto": label.New("go_googleapis", "google/cloud/functions/v1beta2", "functions_proto"),
|
||||||
|
"google/cloud/billing/v1/cloud_billing.proto": label.New("go_googleapis", "google/cloud/billing/v1", "billing_proto"),
|
||||||
|
"google/cloud/dataproc/v1/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/operations.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/clusters.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/jobs.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/workflow_templates.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"),
|
||||||
|
"google/cloud/dataproc/v1beta2/shared.proto": label.New("go_googleapis", "google/cloud/dataproc/v1beta2", "dataproc_proto"),
|
||||||
|
"google/api/context.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/http.proto": label.New("go_googleapis", "google/api", "annotations_proto"),
|
||||||
|
"google/api/config_change.proto": label.New("go_googleapis", "google/api", "configchange_proto"),
|
||||||
|
"google/api/system_parameter.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/monitoring.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/distribution.proto": label.New("go_googleapis", "google/api", "distribution_proto"),
|
||||||
|
"google/api/endpoint.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/usage.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/monitored_resource.proto": label.New("go_googleapis", "google/api", "monitoredres_proto"),
|
||||||
|
"google/api/annotations.proto": label.New("go_googleapis", "google/api", "annotations_proto"),
|
||||||
|
"google/api/control.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/metric.proto": label.New("go_googleapis", "google/api", "metric_proto"),
|
||||||
|
"google/api/label.proto": label.New("go_googleapis", "google/api", "label_proto"),
|
||||||
|
"google/api/consumer.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/log.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/billing.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/service.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/logging.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/documentation.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/quota.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/auth.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/backend.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/source_info.proto": label.New("go_googleapis", "google/api", "serviceconfig_proto"),
|
||||||
|
"google/api/httpbody.proto": label.New("go_googleapis", "google/api", "httpbody_proto"),
|
||||||
|
"google/api/experimental/authorization_config.proto": label.New("go_googleapis", "google/api/experimental", "api_proto"),
|
||||||
|
"google/api/experimental/experimental.proto": label.New("go_googleapis", "google/api/experimental", "api_proto"),
|
||||||
|
"google/api/servicemanagement/v1/servicemanager.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_proto"),
|
||||||
|
"google/api/servicemanagement/v1/resources.proto": label.New("go_googleapis", "google/api/servicemanagement/v1", "servicemanagement_proto"),
|
||||||
|
"google/api/servicecontrol/v1/quota_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/distribution.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/check_error.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/operation.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/metric_value.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/log_entry.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/api/servicecontrol/v1/service_controller.proto": label.New("go_googleapis", "google/api/servicecontrol/v1", "servicecontrol_proto"),
|
||||||
|
"google/pubsub/v1/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1", "pubsub_proto"),
|
||||||
|
"google/pubsub/v1beta2/pubsub.proto": label.New("go_googleapis", "google/pubsub/v1beta2", "pubsub_proto"),
|
||||||
|
"google/spanner/v1/mutation.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/spanner.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/transaction.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/keys.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/type.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/query_plan.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/v1/result_set.proto": label.New("go_googleapis", "google/spanner/v1", "spanner_proto"),
|
||||||
|
"google/spanner/admin/database/v1/spanner_database_admin.proto": label.New("go_googleapis", "google/spanner/admin/database/v1", "database_proto"),
|
||||||
|
"google/spanner/admin/instance/v1/spanner_instance_admin.proto": label.New("go_googleapis", "google/spanner/admin/instance/v1", "instance_proto"),
|
||||||
|
"google/monitoring/v3/group.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/mutation_record.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/notification.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/alert_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/uptime_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/group_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/alert.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/uptime.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/metric.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/notification_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/metric_service.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/monitoring/v3/common.proto": label.New("go_googleapis", "google/monitoring/v3", "monitoring_proto"),
|
||||||
|
"google/rpc/code.proto": label.New("go_googleapis", "google/rpc", "code_proto"),
|
||||||
|
"google/rpc/status.proto": label.New("go_googleapis", "google/rpc", "status_proto"),
|
||||||
|
"google/rpc/error_details.proto": label.New("go_googleapis", "google/rpc", "errdetails_proto"),
|
||||||
|
"google/streetview/publish/v1/resources.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"),
|
||||||
|
"google/streetview/publish/v1/rpcmessages.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"),
|
||||||
|
"google/streetview/publish/v1/streetview_publish.proto": label.New("go_googleapis", "google/streetview/publish/v1", "publish_proto"),
|
||||||
|
"google/logging/v2/logging_metrics.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"),
|
||||||
|
"google/logging/v2/logging_config.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"),
|
||||||
|
"google/logging/v2/log_entry.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"),
|
||||||
|
"google/logging/v2/logging.proto": label.New("go_googleapis", "google/logging/v2", "logging_proto"),
|
||||||
|
"google/logging/type/log_severity.proto": label.New("go_googleapis", "google/logging/type", "ltype_proto"),
|
||||||
|
"google/logging/type/http_request.proto": label.New("go_googleapis", "google/logging/type", "ltype_proto"),
|
||||||
|
}
|
||||||
72
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/lang.go
generated
vendored
Normal file
72
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/lang.go
generated
vendored
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Package proto provides support for protocol buffer rules.
|
||||||
|
// It generates proto_library rules only (not go_proto_library or any other
|
||||||
|
// language-specific implementations).
|
||||||
|
//
|
||||||
|
// Configuration
|
||||||
|
//
|
||||||
|
// Configuration is largely controlled by Mode. In disable mode, proto rules are
|
||||||
|
// left alone (neither generated nor deleted). In legacy mode, filegroups are
|
||||||
|
// emitted containing protos. In default mode, proto_library rules are
|
||||||
|
// emitted. The proto mode may be set with the -proto command line flag or the
|
||||||
|
// "# gazelle:proto" directive.
|
||||||
|
//
|
||||||
|
// The configuration is largely public, and other languages may depend on it.
|
||||||
|
// For example, go uses Mode to determine whether to generate go_proto_library
|
||||||
|
// rules and ignore static .pb.go files.
|
||||||
|
//
|
||||||
|
// Rule generation
|
||||||
|
//
|
||||||
|
// Currently, Gazelle generates at most one proto_library per directory. Protos
|
||||||
|
// in the same package are grouped together into a proto_library. If there are
|
||||||
|
// sources for multiple packages, the package name that matches the directory
|
||||||
|
// name will be chosen; if there is no such package, an error will be printed.
|
||||||
|
// We expect to provide support for multiple proto_libraries in the future
|
||||||
|
// when Go has support for multiple packages and we have better rule matching.
|
||||||
|
// The generated proto_library will be named after the directory, not the
|
||||||
|
// proto or the package. For example, for foo/bar/baz.proto, a proto_library
|
||||||
|
// rule will be generated named //foo/bar:bar_proto.
|
||||||
|
//
|
||||||
|
// Dependency resolution
|
||||||
|
//
|
||||||
|
// proto_library rules are indexed by their srcs attribute. Gazelle attempts
|
||||||
|
// to resolve proto imports (e.g., import foo/bar/bar.proto) to the
|
||||||
|
// proto_library that contains the named source file
|
||||||
|
// (e.g., //foo/bar:bar_proto). If no indexed proto_library provides the source
|
||||||
|
// file, Gazelle will guess a label, following conventions.
|
||||||
|
//
|
||||||
|
// No attempt is made to resolve protos to rules in external repositories,
|
||||||
|
// since there's no indication that a proto import comes from an external
|
||||||
|
// repository. In the future, build files in external repos will be indexed,
|
||||||
|
// so we can support this (#12).
|
||||||
|
//
|
||||||
|
// Gazelle has special cases for Well Known Types (i.e., imports of the form
|
||||||
|
// google/protobuf/*.proto). These are resolved to rules in
|
||||||
|
// @com_google_protobuf.
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import "github.com/bazelbuild/bazel-gazelle/internal/language"
|
||||||
|
|
||||||
|
const protoName = "proto"
|
||||||
|
|
||||||
|
type protoLang struct{}
|
||||||
|
|
||||||
|
func (_ *protoLang) Name() string { return protoName }
|
||||||
|
|
||||||
|
func New() language.Language {
|
||||||
|
return &protoLang{}
|
||||||
|
}
|
||||||
55
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/package.go
generated
vendored
Normal file
55
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/package.go
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import "path/filepath"
|
||||||
|
|
||||||
|
// Package contains metadata for a set of .proto files that have the
|
||||||
|
// same package name. This translates to a proto_library rule.
|
||||||
|
type Package struct {
|
||||||
|
Name string
|
||||||
|
Files map[string]FileInfo
|
||||||
|
Imports map[string]bool
|
||||||
|
Options map[string]string
|
||||||
|
HasServices bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func newPackage(name string) *Package {
|
||||||
|
return &Package{
|
||||||
|
Name: name,
|
||||||
|
Files: map[string]FileInfo{},
|
||||||
|
Imports: map[string]bool{},
|
||||||
|
Options: map[string]string{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Package) addFile(info FileInfo) {
|
||||||
|
p.Files[info.Name] = info
|
||||||
|
for _, imp := range info.Imports {
|
||||||
|
p.Imports[imp] = true
|
||||||
|
}
|
||||||
|
for _, opt := range info.Options {
|
||||||
|
p.Options[opt.Key] = opt.Value
|
||||||
|
}
|
||||||
|
p.HasServices = p.HasServices || info.HasServices
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Package) addGenFile(dir, name string) {
|
||||||
|
p.Files[name] = FileInfo{
|
||||||
|
Name: name,
|
||||||
|
Path: filepath.Join(dir, filepath.FromSlash(name)),
|
||||||
|
}
|
||||||
|
}
|
||||||
293
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/proto.csv
generated
vendored
Normal file
293
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/proto.csv
generated
vendored
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
# This file lists special protos that Gazelle knows how to import. This is used to generate
|
||||||
|
# code for proto and Go resolvers.
|
||||||
|
#
|
||||||
|
# proto name,proto label,go import path,go proto label
|
||||||
|
google/protobuf/any.proto,@com_google_protobuf//:any_proto,github.com/golang/protobuf/ptypes/any,@io_bazel_rules_go//proto/wkt:any_go_proto
|
||||||
|
google/protobuf/api.proto,@com_google_protobuf//:api_proto,google.golang.org/genproto/protobuf/api,@io_bazel_rules_go//proto/wkt:api_go_proto
|
||||||
|
google/protobuf/compiler/plugin.proto,@com_google_protobuf//:compiler_plugin_proto,github.com/golang/protobuf/protoc-gen-go/plugin,@io_bazel_rules_go//proto/wkt:compiler_plugin_go_proto
|
||||||
|
google/protobuf/descriptor.proto,@com_google_protobuf//:descriptor_proto,github.com/golang/protobuf/protoc-gen-go/descriptor,@io_bazel_rules_go//proto/wkt:descriptor_go_proto
|
||||||
|
google/protobuf/duration.proto,@com_google_protobuf//:duration_proto,github.com/golang/protobuf/ptypes/duration,@io_bazel_rules_go//proto/wkt:duration_go_proto
|
||||||
|
google/protobuf/empty.proto,@com_google_protobuf//:empty_proto,github.com/golang/protobuf/ptypes/empty,@io_bazel_rules_go//proto/wkt:empty_go_proto
|
||||||
|
google/protobuf/field_mask.proto,@com_google_protobuf//:field_mask_proto,google.golang.org/genproto/protobuf/field_mask,@io_bazel_rules_go//proto/wkt:field_mask_go_proto
|
||||||
|
google/protobuf/source_context.proto,@com_google_protobuf//:source_context_proto,google.golang.org/genproto/protobuf/source_context,@io_bazel_rules_go//proto/wkt:source_context_go_proto
|
||||||
|
google/protobuf/struct.proto,@com_google_protobuf//:struct_proto,github.com/golang/protobuf/ptypes/struct,@io_bazel_rules_go//proto/wkt:struct_go_proto
|
||||||
|
google/protobuf/timestamp.proto,@com_google_protobuf//:timestamp_proto,github.com/golang/protobuf/ptypes/timestamp,@io_bazel_rules_go//proto/wkt:timestamp_go_proto
|
||||||
|
google/protobuf/type.proto,@com_google_protobuf//:type_proto,google.golang.org/genproto/protobuf/ptype,@io_bazel_rules_go//proto/wkt:type_go_proto
|
||||||
|
google/protobuf/wrappers.proto,@com_google_protobuf//:wrappers_proto,github.com/golang/protobuf/ptypes/wrappers,@io_bazel_rules_go//proto/wkt:wrappers_go_proto
|
||||||
|
google/assistant/embedded/v1alpha2/embedded_assistant.proto,@go_googleapis//google/assistant/embedded/v1alpha2:embedded_proto,google.golang.org/genproto/googleapis/assistant/embedded/v1alpha2,@go_googleapis//google/assistant/embedded/v1alpha2:embedded_go_proto
|
||||||
|
google/assistant/embedded/v1alpha1/embedded_assistant.proto,@go_googleapis//google/assistant/embedded/v1alpha1:embedded_proto,google.golang.org/genproto/googleapis/assistant/embedded/v1alpha1,@go_googleapis//google/assistant/embedded/v1alpha1:embedded_go_proto
|
||||||
|
google/home/graph/v1/device.proto,@go_googleapis//google/home/graph/v1:graph_proto,google.golang.org/genproto/googleapis/home/graph/v1,@go_googleapis//google/home/graph/v1:graph_go_proto
|
||||||
|
google/home/graph/v1/homegraph.proto,@go_googleapis//google/home/graph/v1:graph_proto,google.golang.org/genproto/googleapis/home/graph/v1,@go_googleapis//google/home/graph/v1:graph_go_proto
|
||||||
|
google/genomics/v1/operations.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/variants.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/position.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/references.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/cigar.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/datasets.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/readalignment.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/annotations.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/reads.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/readgroup.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/readgroupset.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1/range.proto,@go_googleapis//google/genomics/v1:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1,@go_googleapis//google/genomics/v1:genomics_go_proto
|
||||||
|
google/genomics/v1alpha2/pipelines.proto,@go_googleapis//google/genomics/v1alpha2:genomics_proto,google.golang.org/genproto/googleapis/genomics/v1alpha2,@go_googleapis//google/genomics/v1alpha2:genomics_go_proto
|
||||||
|
google/bigtable/v1/bigtable_service_messages.proto,@go_googleapis//google/bigtable/v1:bigtable_proto,google.golang.org/genproto/googleapis/bigtable/v1,@go_googleapis//google/bigtable/v1:bigtable_go_proto
|
||||||
|
google/bigtable/v1/bigtable_service.proto,@go_googleapis//google/bigtable/v1:bigtable_proto,google.golang.org/genproto/googleapis/bigtable/v1,@go_googleapis//google/bigtable/v1:bigtable_go_proto
|
||||||
|
google/bigtable/v1/bigtable_data.proto,@go_googleapis//google/bigtable/v1:bigtable_proto,google.golang.org/genproto/googleapis/bigtable/v1,@go_googleapis//google/bigtable/v1:bigtable_go_proto
|
||||||
|
google/bigtable/admin/cluster/v1/bigtable_cluster_data.proto,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_proto,google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_go_proto
|
||||||
|
google/bigtable/admin/cluster/v1/bigtable_cluster_service_messages.proto,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_proto,google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_go_proto
|
||||||
|
google/bigtable/admin/cluster/v1/bigtable_cluster_service.proto,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_proto,google.golang.org/genproto/googleapis/bigtable/admin/cluster/v1,@go_googleapis//google/bigtable/admin/cluster/v1:cluster_go_proto
|
||||||
|
google/bigtable/admin/v2/bigtable_instance_admin.proto,@go_googleapis//google/bigtable/admin/v2:admin_proto,google.golang.org/genproto/googleapis/bigtable/admin/v2,@go_googleapis//google/bigtable/admin/v2:admin_go_proto
|
||||||
|
google/bigtable/admin/v2/instance.proto,@go_googleapis//google/bigtable/admin/v2:admin_proto,google.golang.org/genproto/googleapis/bigtable/admin/v2,@go_googleapis//google/bigtable/admin/v2:admin_go_proto
|
||||||
|
google/bigtable/admin/v2/table.proto,@go_googleapis//google/bigtable/admin/v2:admin_proto,google.golang.org/genproto/googleapis/bigtable/admin/v2,@go_googleapis//google/bigtable/admin/v2:admin_go_proto
|
||||||
|
google/bigtable/admin/v2/bigtable_table_admin.proto,@go_googleapis//google/bigtable/admin/v2:admin_proto,google.golang.org/genproto/googleapis/bigtable/admin/v2,@go_googleapis//google/bigtable/admin/v2:admin_go_proto
|
||||||
|
google/bigtable/admin/v2/common.proto,@go_googleapis//google/bigtable/admin/v2:admin_proto,google.golang.org/genproto/googleapis/bigtable/admin/v2,@go_googleapis//google/bigtable/admin/v2:admin_go_proto
|
||||||
|
google/bigtable/admin/table/v1/bigtable_table_service_messages.proto,@go_googleapis//google/bigtable/admin/table/v1:table_proto,google.golang.org/genproto/googleapis/bigtable/admin/table/v1,@go_googleapis//google/bigtable/admin/table/v1:table_go_proto
|
||||||
|
google/bigtable/admin/table/v1/bigtable_table_service.proto,@go_googleapis//google/bigtable/admin/table/v1:table_proto,google.golang.org/genproto/googleapis/bigtable/admin/table/v1,@go_googleapis//google/bigtable/admin/table/v1:table_go_proto
|
||||||
|
google/bigtable/admin/table/v1/bigtable_table_data.proto,@go_googleapis//google/bigtable/admin/table/v1:table_proto,google.golang.org/genproto/googleapis/bigtable/admin/table/v1,@go_googleapis//google/bigtable/admin/table/v1:table_go_proto
|
||||||
|
google/bigtable/v2/bigtable.proto,@go_googleapis//google/bigtable/v2:bigtable_proto,google.golang.org/genproto/googleapis/bigtable/v2,@go_googleapis//google/bigtable/v2:bigtable_go_proto
|
||||||
|
google/bigtable/v2/data.proto,@go_googleapis//google/bigtable/v2:bigtable_proto,google.golang.org/genproto/googleapis/bigtable/v2,@go_googleapis//google/bigtable/v2:bigtable_go_proto
|
||||||
|
google/privacy/dlp/v2/storage.proto,@go_googleapis//google/privacy/dlp/v2:dlp_proto,google.golang.org/genproto/googleapis/privacy/dlp/v2,@go_googleapis//google/privacy/dlp/v2:dlp_go_proto
|
||||||
|
google/privacy/dlp/v2/dlp.proto,@go_googleapis//google/privacy/dlp/v2:dlp_proto,google.golang.org/genproto/googleapis/privacy/dlp/v2,@go_googleapis//google/privacy/dlp/v2:dlp_go_proto
|
||||||
|
google/watcher/v1/watch.proto,@go_googleapis//google/watcher/v1:watcher_proto,google.golang.org/genproto/googleapis/watcher/v1,@go_googleapis//google/watcher/v1:watcher_go_proto
|
||||||
|
google/firestore/admin/v1beta1/firestore_admin.proto,@go_googleapis//google/firestore/admin/v1beta1:admin_proto,google.golang.org/genproto/googleapis/firestore/admin/v1beta1,@go_googleapis//google/firestore/admin/v1beta1:admin_go_proto
|
||||||
|
google/firestore/admin/v1beta1/index.proto,@go_googleapis//google/firestore/admin/v1beta1:admin_proto,google.golang.org/genproto/googleapis/firestore/admin/v1beta1,@go_googleapis//google/firestore/admin/v1beta1:admin_go_proto
|
||||||
|
google/firestore/v1beta1/write.proto,@go_googleapis//google/firestore/v1beta1:firestore_proto,google.golang.org/genproto/googleapis/firestore/v1beta1,@go_googleapis//google/firestore/v1beta1:firestore_go_proto
|
||||||
|
google/firestore/v1beta1/document.proto,@go_googleapis//google/firestore/v1beta1:firestore_proto,google.golang.org/genproto/googleapis/firestore/v1beta1,@go_googleapis//google/firestore/v1beta1:firestore_go_proto
|
||||||
|
google/firestore/v1beta1/firestore.proto,@go_googleapis//google/firestore/v1beta1:firestore_proto,google.golang.org/genproto/googleapis/firestore/v1beta1,@go_googleapis//google/firestore/v1beta1:firestore_go_proto
|
||||||
|
google/firestore/v1beta1/query.proto,@go_googleapis//google/firestore/v1beta1:firestore_proto,google.golang.org/genproto/googleapis/firestore/v1beta1,@go_googleapis//google/firestore/v1beta1:firestore_go_proto
|
||||||
|
google/firestore/v1beta1/common.proto,@go_googleapis//google/firestore/v1beta1:firestore_proto,google.golang.org/genproto/googleapis/firestore/v1beta1,@go_googleapis//google/firestore/v1beta1:firestore_go_proto
|
||||||
|
google/example/library/v1/library.proto,@go_googleapis//google/example/library/v1:library_proto,google.golang.org/genproto/googleapis/example/library/v1,@go_googleapis//google/example/library/v1:library_go_proto
|
||||||
|
google/appengine/v1/instance.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/audit_data.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/appengine.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/application.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/operation.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/app_yaml.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/location.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/service.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/deploy.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/v1/version.proto,@go_googleapis//google/appengine/v1:appengine_proto,google.golang.org/genproto/googleapis/appengine/v1,@go_googleapis//google/appengine/v1:appengine_go_proto
|
||||||
|
google/appengine/legacy/audit_data.proto,@go_googleapis//google/appengine/legacy:legacy_proto,google.golang.org/genproto/googleapis/appengine/legacy,@go_googleapis//google/appengine/legacy:legacy_go_proto
|
||||||
|
google/appengine/logging/v1/request_log.proto,@go_googleapis//google/appengine/logging/v1:logging_proto,google.golang.org/genproto/googleapis/appengine/logging/v1,@go_googleapis//google/appengine/logging/v1:logging_go_proto
|
||||||
|
google/storagetransfer/v1/transfer.proto,@go_googleapis//google/storagetransfer/v1:storagetransfer_proto,google.golang.org/genproto/googleapis/storagetransfer/v1,@go_googleapis//google/storagetransfer/v1:storagetransfer_go_proto
|
||||||
|
google/storagetransfer/v1/transfer_types.proto,@go_googleapis//google/storagetransfer/v1:storagetransfer_proto,google.golang.org/genproto/googleapis/storagetransfer/v1,@go_googleapis//google/storagetransfer/v1:storagetransfer_go_proto
|
||||||
|
google/longrunning/operations.proto,@go_googleapis//google/longrunning:longrunning_proto,google.golang.org/genproto/googleapis/longrunning,@go_googleapis//google/longrunning:longrunning_go_proto
|
||||||
|
google/container/v1/cluster_service.proto,@go_googleapis//google/container/v1:container_proto,google.golang.org/genproto/googleapis/container/v1,@go_googleapis//google/container/v1:container_go_proto
|
||||||
|
google/container/v1beta1/cluster_service.proto,@go_googleapis//google/container/v1beta1:container_proto,google.golang.org/genproto/googleapis/container/v1beta1,@go_googleapis//google/container/v1beta1:container_go_proto
|
||||||
|
google/container/v1alpha1/cluster_service.proto,@go_googleapis//google/container/v1alpha1:container_proto,google.golang.org/genproto/googleapis/container/v1alpha1,@go_googleapis//google/container/v1alpha1:container_go_proto
|
||||||
|
google/datastore/v1beta3/datastore.proto,@go_googleapis//google/datastore/v1beta3:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1beta3,@go_googleapis//google/datastore/v1beta3:datastore_go_proto
|
||||||
|
google/datastore/v1beta3/query.proto,@go_googleapis//google/datastore/v1beta3:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1beta3,@go_googleapis//google/datastore/v1beta3:datastore_go_proto
|
||||||
|
google/datastore/v1beta3/entity.proto,@go_googleapis//google/datastore/v1beta3:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1beta3,@go_googleapis//google/datastore/v1beta3:datastore_go_proto
|
||||||
|
google/datastore/v1/datastore.proto,@go_googleapis//google/datastore/v1:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1,@go_googleapis//google/datastore/v1:datastore_go_proto
|
||||||
|
google/datastore/v1/query.proto,@go_googleapis//google/datastore/v1:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1,@go_googleapis//google/datastore/v1:datastore_go_proto
|
||||||
|
google/datastore/v1/entity.proto,@go_googleapis//google/datastore/v1:datastore_proto,google.golang.org/genproto/googleapis/datastore/v1,@go_googleapis//google/datastore/v1:datastore_go_proto
|
||||||
|
google/datastore/admin/v1/datastore_admin.proto,@go_googleapis//google/datastore/admin/v1:admin_proto,google.golang.org/genproto/googleapis/datastore/admin/v1,@go_googleapis//google/datastore/admin/v1:admin_go_proto
|
||||||
|
google/datastore/admin/v1beta1/datastore_admin.proto,@go_googleapis//google/datastore/admin/v1beta1:admin_proto,google.golang.org/genproto/googleapis/datastore/admin/v1beta1,@go_googleapis//google/datastore/admin/v1beta1:admin_go_proto
|
||||||
|
google/bytestream/bytestream.proto,@go_googleapis//google/bytestream:bytestream_proto,google.golang.org/genproto/googleapis/bytestream,@go_googleapis//google/bytestream:bytestream_go_proto
|
||||||
|
google/iam/v1/iam_policy.proto,@go_googleapis//google/iam/v1:iam_proto,google.golang.org/genproto/googleapis/iam/v1,@go_googleapis//google/iam/v1:iam_go_proto
|
||||||
|
google/iam/v1/policy.proto,@go_googleapis//google/iam/v1:iam_proto,google.golang.org/genproto/googleapis/iam/v1,@go_googleapis//google/iam/v1:iam_go_proto
|
||||||
|
google/iam/v1/logging/audit_data.proto,@go_googleapis//google/iam/v1/logging:logging_proto,google.golang.org/genproto/googleapis/iam/v1/logging,@go_googleapis//google/iam/v1/logging:logging_go_proto
|
||||||
|
google/iam/admin/v1/iam.proto,@go_googleapis//google/iam/admin/v1:admin_proto,google.golang.org/genproto/googleapis/iam/admin/v1,@go_googleapis//google/iam/admin/v1:admin_go_proto
|
||||||
|
google/type/money.proto,@go_googleapis//google/type:money_proto,google.golang.org/genproto/googleapis/type/money,@go_googleapis//google/type:money_go_proto
|
||||||
|
google/type/latlng.proto,@go_googleapis//google/type:latlng_proto,google.golang.org/genproto/googleapis/type/latlng,@go_googleapis//google/type:latlng_go_proto
|
||||||
|
google/type/color.proto,@go_googleapis//google/type:color_proto,google.golang.org/genproto/googleapis/type/color,@go_googleapis//google/type:color_go_proto
|
||||||
|
google/type/timeofday.proto,@go_googleapis//google/type:timeofday_proto,google.golang.org/genproto/googleapis/type/timeofday,@go_googleapis//google/type:timeofday_go_proto
|
||||||
|
google/type/date.proto,@go_googleapis//google/type:date_proto,google.golang.org/genproto/googleapis/type/date,@go_googleapis//google/type:date_go_proto
|
||||||
|
google/type/dayofweek.proto,@go_googleapis//google/type:dayofweek_proto,google.golang.org/genproto/googleapis/type/dayofweek,@go_googleapis//google/type:dayofweek_go_proto
|
||||||
|
google/type/postal_address.proto,@go_googleapis//google/type:postaladdress_proto,google.golang.org/genproto/googleapis/type/postaladdress,@go_googleapis//google/type:postaladdress_go_proto
|
||||||
|
google/devtools/clouderrorreporting/v1beta1/report_errors_service.proto,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_proto,google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_go_proto
|
||||||
|
google/devtools/clouderrorreporting/v1beta1/error_group_service.proto,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_proto,google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_go_proto
|
||||||
|
google/devtools/clouderrorreporting/v1beta1/error_stats_service.proto,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_proto,google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_go_proto
|
||||||
|
google/devtools/clouderrorreporting/v1beta1/common.proto,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_proto,google.golang.org/genproto/googleapis/devtools/clouderrorreporting/v1beta1,@go_googleapis//google/devtools/clouderrorreporting/v1beta1:clouderrorreporting_go_proto
|
||||||
|
google/devtools/resultstore/v2/file.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/resultstore_download.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/configuration.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/action.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/resultstore_file_download.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/test_suite.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/file_set.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/coverage.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/coverage_summary.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/configured_target.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/target.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/invocation.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/resultstore/v2/common.proto,@go_googleapis//google/devtools/resultstore/v2:resultstore_proto,google.golang.org/genproto/googleapis/devtools/resultstore/v2,@go_googleapis//google/devtools/resultstore/v2:resultstore_go_proto
|
||||||
|
google/devtools/source/v1/source_context.proto,@go_googleapis//google/devtools/source/v1:source_proto,google.golang.org/genproto/googleapis/devtools/source/v1,@go_googleapis//google/devtools/source/v1:source_go_proto
|
||||||
|
google/devtools/remoteexecution/v1test/remote_execution.proto,@go_googleapis//google/devtools/remoteexecution/v1test:remoteexecution_proto,google.golang.org/genproto/googleapis/devtools/remoteexecution/v1test,@go_googleapis//google/devtools/remoteexecution/v1test:remoteexecution_go_proto
|
||||||
|
google/devtools/cloudbuild/v1/cloudbuild.proto,@go_googleapis//google/devtools/cloudbuild/v1:cloudbuild_proto,google.golang.org/genproto/googleapis/devtools/cloudbuild/v1,@go_googleapis//google/devtools/cloudbuild/v1:cloudbuild_go_proto
|
||||||
|
google/devtools/sourcerepo/v1/sourcerepo.proto,@go_googleapis//google/devtools/sourcerepo/v1:sourcerepo_proto,google.golang.org/genproto/googleapis/devtools/sourcerepo/v1,@go_googleapis//google/devtools/sourcerepo/v1:sourcerepo_go_proto
|
||||||
|
google/devtools/remoteworkers/v1test2/worker.proto,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_proto,google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_go_proto
|
||||||
|
google/devtools/remoteworkers/v1test2/tasks.proto,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_proto,google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_go_proto
|
||||||
|
google/devtools/remoteworkers/v1test2/bots.proto,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_proto,google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_go_proto
|
||||||
|
google/devtools/remoteworkers/v1test2/command.proto,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_proto,google.golang.org/genproto/googleapis/devtools/remoteworkers/v1test2,@go_googleapis//google/devtools/remoteworkers/v1test2:remoteworkers_go_proto
|
||||||
|
google/devtools/cloudtrace/v1/trace.proto,@go_googleapis//google/devtools/cloudtrace/v1:cloudtrace_proto,google.golang.org/genproto/googleapis/devtools/cloudtrace/v1,@go_googleapis//google/devtools/cloudtrace/v1:cloudtrace_go_proto
|
||||||
|
google/devtools/cloudtrace/v2/trace.proto,@go_googleapis//google/devtools/cloudtrace/v2:cloudtrace_proto,google.golang.org/genproto/googleapis/devtools/cloudtrace/v2,@go_googleapis//google/devtools/cloudtrace/v2:cloudtrace_go_proto
|
||||||
|
google/devtools/cloudtrace/v2/tracing.proto,@go_googleapis//google/devtools/cloudtrace/v2:cloudtrace_proto,google.golang.org/genproto/googleapis/devtools/cloudtrace/v2,@go_googleapis//google/devtools/cloudtrace/v2:cloudtrace_go_proto
|
||||||
|
google/devtools/cloudprofiler/v2/profiler.proto,@go_googleapis//google/devtools/cloudprofiler/v2:cloudprofiler_proto,google.golang.org/genproto/googleapis/devtools/cloudprofiler/v2,@go_googleapis//google/devtools/cloudprofiler/v2:cloudprofiler_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/containeranalysis.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/bill_of_materials.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/provenance.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/package_vulnerability.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/source_context.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/containeranalysis/v1alpha1/image_basis.proto,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_proto,google.golang.org/genproto/googleapis/devtools/containeranalysis/v1alpha1,@go_googleapis//google/devtools/containeranalysis/v1alpha1:containeranalysis_go_proto
|
||||||
|
google/devtools/build/v1/build_events.proto,@go_googleapis//google/devtools/build/v1:build_proto,google.golang.org/genproto/googleapis/devtools/build/v1,@go_googleapis//google/devtools/build/v1:build_go_proto
|
||||||
|
google/devtools/build/v1/build_status.proto,@go_googleapis//google/devtools/build/v1:build_proto,google.golang.org/genproto/googleapis/devtools/build/v1,@go_googleapis//google/devtools/build/v1:build_go_proto
|
||||||
|
google/devtools/build/v1/publish_build_event.proto,@go_googleapis//google/devtools/build/v1:build_proto,google.golang.org/genproto/googleapis/devtools/build/v1,@go_googleapis//google/devtools/build/v1:build_go_proto
|
||||||
|
google/devtools/clouddebugger/v2/debugger.proto,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_proto,google.golang.org/genproto/googleapis/devtools/clouddebugger/v2,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_go_proto
|
||||||
|
google/devtools/clouddebugger/v2/data.proto,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_proto,google.golang.org/genproto/googleapis/devtools/clouddebugger/v2,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_go_proto
|
||||||
|
google/devtools/clouddebugger/v2/controller.proto,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_proto,google.golang.org/genproto/googleapis/devtools/clouddebugger/v2,@go_googleapis//google/devtools/clouddebugger/v2:clouddebugger_go_proto
|
||||||
|
google/cloud/resourcemanager/v2/folders.proto,@go_googleapis//google/cloud/resourcemanager/v2:resourcemanager_proto,google.golang.org/genproto/googleapis/cloud/resourcemanager/v2,@go_googleapis//google/cloud/resourcemanager/v2:resourcemanager_go_proto
|
||||||
|
google/cloud/kms/v1/resources.proto,@go_googleapis//google/cloud/kms/v1:kms_proto,google.golang.org/genproto/googleapis/cloud/kms/v1,@go_googleapis//google/cloud/kms/v1:kms_go_proto
|
||||||
|
google/cloud/kms/v1/service.proto,@go_googleapis//google/cloud/kms/v1:kms_proto,google.golang.org/genproto/googleapis/cloud/kms/v1,@go_googleapis//google/cloud/kms/v1:kms_go_proto
|
||||||
|
google/cloud/runtimeconfig/v1beta1/resources.proto,@go_googleapis//google/cloud/runtimeconfig/v1beta1:runtimeconfig_proto,google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1,@go_googleapis//google/cloud/runtimeconfig/v1beta1:runtimeconfig_go_proto
|
||||||
|
google/cloud/runtimeconfig/v1beta1/runtimeconfig.proto,@go_googleapis//google/cloud/runtimeconfig/v1beta1:runtimeconfig_proto,google.golang.org/genproto/googleapis/cloud/runtimeconfig/v1beta1,@go_googleapis//google/cloud/runtimeconfig/v1beta1:runtimeconfig_go_proto
|
||||||
|
google/cloud/tasks/v2beta2/queue.proto,@go_googleapis//google/cloud/tasks/v2beta2:tasks_proto,google.golang.org/genproto/googleapis/cloud/tasks/v2beta2,@go_googleapis//google/cloud/tasks/v2beta2:tasks_go_proto
|
||||||
|
google/cloud/tasks/v2beta2/task.proto,@go_googleapis//google/cloud/tasks/v2beta2:tasks_proto,google.golang.org/genproto/googleapis/cloud/tasks/v2beta2,@go_googleapis//google/cloud/tasks/v2beta2:tasks_go_proto
|
||||||
|
google/cloud/tasks/v2beta2/target.proto,@go_googleapis//google/cloud/tasks/v2beta2:tasks_proto,google.golang.org/genproto/googleapis/cloud/tasks/v2beta2,@go_googleapis//google/cloud/tasks/v2beta2:tasks_go_proto
|
||||||
|
google/cloud/tasks/v2beta2/cloudtasks.proto,@go_googleapis//google/cloud/tasks/v2beta2:tasks_proto,google.golang.org/genproto/googleapis/cloud/tasks/v2beta2,@go_googleapis//google/cloud/tasks/v2beta2:tasks_go_proto
|
||||||
|
google/cloud/oslogin/v1/oslogin.proto,@go_googleapis//google/cloud/oslogin/v1:oslogin_proto,google.golang.org/genproto/googleapis/cloud/oslogin/v1,@go_googleapis//google/cloud/oslogin/v1:oslogin_go_proto
|
||||||
|
google/cloud/oslogin/v1alpha/oslogin.proto,@go_googleapis//google/cloud/oslogin/v1alpha:oslogin_proto,google.golang.org/genproto/googleapis/cloud/oslogin/v1alpha,@go_googleapis//google/cloud/oslogin/v1alpha:oslogin_go_proto
|
||||||
|
google/cloud/oslogin/common/common.proto,@go_googleapis//google/cloud/oslogin/common:common_proto,google.golang.org/genproto/googleapis/cloud/oslogin/common,@go_googleapis//google/cloud/oslogin/common:common_go_proto
|
||||||
|
google/cloud/oslogin/v1beta/oslogin.proto,@go_googleapis//google/cloud/oslogin/v1beta:oslogin_proto,google.golang.org/genproto/googleapis/cloud/oslogin/v1beta,@go_googleapis//google/cloud/oslogin/v1beta:oslogin_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/context.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/session_entity_type.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/intent.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/entity_type.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/webhook.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/session.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2beta1/agent.proto,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2beta1,@go_googleapis//google/cloud/dialogflow/v2beta1:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/context.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/session_entity_type.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/intent.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/entity_type.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/webhook.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/session.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/dialogflow/v2/agent.proto,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_proto,google.golang.org/genproto/googleapis/cloud/dialogflow/v2,@go_googleapis//google/cloud/dialogflow/v2:dialogflow_go_proto
|
||||||
|
google/cloud/redis/v1beta1/cloud_redis.proto,@go_googleapis//google/cloud/redis/v1beta1:redis_proto,google.golang.org/genproto/googleapis/cloud/redis/v1beta1,@go_googleapis//google/cloud/redis/v1beta1:redis_go_proto
|
||||||
|
google/cloud/location/locations.proto,@go_googleapis//google/cloud/location:location_proto,google.golang.org/genproto/googleapis/cloud/location,@go_googleapis//google/cloud/location:location_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/finding.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/finding_type_stats.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/scan_config.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/crawled_url.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/scan_run.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/websecurityscanner/v1alpha/finding_addon.proto,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_proto,google.golang.org/genproto/googleapis/cloud/websecurityscanner/v1alpha,@go_googleapis//google/cloud/websecurityscanner/v1alpha:websecurityscanner_go_proto
|
||||||
|
google/cloud/language/v1/language_service.proto,@go_googleapis//google/cloud/language/v1:language_proto,google.golang.org/genproto/googleapis/cloud/language/v1,@go_googleapis//google/cloud/language/v1:language_go_proto
|
||||||
|
google/cloud/language/v1beta2/language_service.proto,@go_googleapis//google/cloud/language/v1beta2:language_proto,google.golang.org/genproto/googleapis/cloud/language/v1beta2,@go_googleapis//google/cloud/language/v1beta2:language_go_proto
|
||||||
|
google/cloud/language/v1beta1/language_service.proto,@go_googleapis//google/cloud/language/v1beta1:language_proto,google.golang.org/genproto/googleapis/cloud/language/v1beta1,@go_googleapis//google/cloud/language/v1beta1:language_go_proto
|
||||||
|
google/cloud/bigquery/datatransfer/v1/transfer.proto,@go_googleapis//google/cloud/bigquery/datatransfer/v1:datatransfer_proto,google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1,@go_googleapis//google/cloud/bigquery/datatransfer/v1:datatransfer_go_proto
|
||||||
|
google/cloud/bigquery/datatransfer/v1/datatransfer.proto,@go_googleapis//google/cloud/bigquery/datatransfer/v1:datatransfer_proto,google.golang.org/genproto/googleapis/cloud/bigquery/datatransfer/v1,@go_googleapis//google/cloud/bigquery/datatransfer/v1:datatransfer_go_proto
|
||||||
|
google/cloud/bigquery/logging/v1/audit_data.proto,@go_googleapis//google/cloud/bigquery/logging/v1:logging_proto,google.golang.org/genproto/googleapis/cloud/bigquery/logging/v1,@go_googleapis//google/cloud/bigquery/logging/v1:logging_go_proto
|
||||||
|
google/cloud/vision/v1/image_annotator.proto,@go_googleapis//google/cloud/vision/v1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1,@go_googleapis//google/cloud/vision/v1:vision_go_proto
|
||||||
|
google/cloud/vision/v1/geometry.proto,@go_googleapis//google/cloud/vision/v1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1,@go_googleapis//google/cloud/vision/v1:vision_go_proto
|
||||||
|
google/cloud/vision/v1/web_detection.proto,@go_googleapis//google/cloud/vision/v1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1,@go_googleapis//google/cloud/vision/v1:vision_go_proto
|
||||||
|
google/cloud/vision/v1/text_annotation.proto,@go_googleapis//google/cloud/vision/v1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1,@go_googleapis//google/cloud/vision/v1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p2beta1/image_annotator.proto,@go_googleapis//google/cloud/vision/v1p2beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1,@go_googleapis//google/cloud/vision/v1p2beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p2beta1/geometry.proto,@go_googleapis//google/cloud/vision/v1p2beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1,@go_googleapis//google/cloud/vision/v1p2beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p2beta1/web_detection.proto,@go_googleapis//google/cloud/vision/v1p2beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1,@go_googleapis//google/cloud/vision/v1p2beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p2beta1/text_annotation.proto,@go_googleapis//google/cloud/vision/v1p2beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p2beta1,@go_googleapis//google/cloud/vision/v1p2beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p1beta1/image_annotator.proto,@go_googleapis//google/cloud/vision/v1p1beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1,@go_googleapis//google/cloud/vision/v1p1beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p1beta1/geometry.proto,@go_googleapis//google/cloud/vision/v1p1beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1,@go_googleapis//google/cloud/vision/v1p1beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p1beta1/web_detection.proto,@go_googleapis//google/cloud/vision/v1p1beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1,@go_googleapis//google/cloud/vision/v1p1beta1:vision_go_proto
|
||||||
|
google/cloud/vision/v1p1beta1/text_annotation.proto,@go_googleapis//google/cloud/vision/v1p1beta1:vision_proto,google.golang.org/genproto/googleapis/cloud/vision/v1p1beta1,@go_googleapis//google/cloud/vision/v1p1beta1:vision_go_proto
|
||||||
|
google/cloud/speech/v1/cloud_speech.proto,@go_googleapis//google/cloud/speech/v1:speech_proto,google.golang.org/genproto/googleapis/cloud/speech/v1,@go_googleapis//google/cloud/speech/v1:speech_go_proto
|
||||||
|
google/cloud/speech/v1beta1/cloud_speech.proto,@go_googleapis//google/cloud/speech/v1beta1:speech_proto,google.golang.org/genproto/googleapis/cloud/speech/v1beta1,@go_googleapis//google/cloud/speech/v1beta1:speech_go_proto
|
||||||
|
google/cloud/speech/v1p1beta1/cloud_speech.proto,@go_googleapis//google/cloud/speech/v1p1beta1:speech_proto,google.golang.org/genproto/googleapis/cloud/speech/v1p1beta1,@go_googleapis//google/cloud/speech/v1p1beta1:speech_go_proto
|
||||||
|
google/cloud/iot/v1/device_manager.proto,@go_googleapis//google/cloud/iot/v1:iot_proto,google.golang.org/genproto/googleapis/cloud/iot/v1,@go_googleapis//google/cloud/iot/v1:iot_go_proto
|
||||||
|
google/cloud/iot/v1/resources.proto,@go_googleapis//google/cloud/iot/v1:iot_proto,google.golang.org/genproto/googleapis/cloud/iot/v1,@go_googleapis//google/cloud/iot/v1:iot_go_proto
|
||||||
|
google/cloud/videointelligence/v1/video_intelligence.proto,@go_googleapis//google/cloud/videointelligence/v1:videointelligence_proto,google.golang.org/genproto/googleapis/cloud/videointelligence/v1,@go_googleapis//google/cloud/videointelligence/v1:videointelligence_go_proto
|
||||||
|
google/cloud/videointelligence/v1beta2/video_intelligence.proto,@go_googleapis//google/cloud/videointelligence/v1beta2:videointelligence_proto,google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta2,@go_googleapis//google/cloud/videointelligence/v1beta2:videointelligence_go_proto
|
||||||
|
google/cloud/videointelligence/v1beta1/video_intelligence.proto,@go_googleapis//google/cloud/videointelligence/v1beta1:videointelligence_proto,google.golang.org/genproto/googleapis/cloud/videointelligence/v1beta1,@go_googleapis//google/cloud/videointelligence/v1beta1:videointelligence_go_proto
|
||||||
|
google/cloud/videointelligence/v1p1beta1/video_intelligence.proto,@go_googleapis//google/cloud/videointelligence/v1p1beta1:videointelligence_proto,google.golang.org/genproto/googleapis/cloud/videointelligence/v1p1beta1,@go_googleapis//google/cloud/videointelligence/v1p1beta1:videointelligence_go_proto
|
||||||
|
google/cloud/audit/audit_log.proto,@go_googleapis//google/cloud/audit:audit_proto,google.golang.org/genproto/googleapis/cloud/audit,@go_googleapis//google/cloud/audit:audit_go_proto
|
||||||
|
google/cloud/support/common.proto,@go_googleapis//google/cloud/support:common_proto,google.golang.org/genproto/googleapis/cloud/support/common,@go_googleapis//google/cloud/support:common_go_proto
|
||||||
|
google/cloud/support/v1alpha1/cloud_support.proto,@go_googleapis//google/cloud/support/v1alpha1:support_proto,google.golang.org/genproto/googleapis/cloud/support/v1alpha1,@go_googleapis//google/cloud/support/v1alpha1:support_go_proto
|
||||||
|
google/cloud/ml/v1/operation_metadata.proto,@go_googleapis//google/cloud/ml/v1:ml_proto,google.golang.org/genproto/googleapis/cloud/ml/v1,@go_googleapis//google/cloud/ml/v1:ml_go_proto
|
||||||
|
google/cloud/ml/v1/job_service.proto,@go_googleapis//google/cloud/ml/v1:ml_proto,google.golang.org/genproto/googleapis/cloud/ml/v1,@go_googleapis//google/cloud/ml/v1:ml_go_proto
|
||||||
|
google/cloud/ml/v1/prediction_service.proto,@go_googleapis//google/cloud/ml/v1:ml_proto,google.golang.org/genproto/googleapis/cloud/ml/v1,@go_googleapis//google/cloud/ml/v1:ml_go_proto
|
||||||
|
google/cloud/ml/v1/model_service.proto,@go_googleapis//google/cloud/ml/v1:ml_proto,google.golang.org/genproto/googleapis/cloud/ml/v1,@go_googleapis//google/cloud/ml/v1:ml_go_proto
|
||||||
|
google/cloud/ml/v1/project_service.proto,@go_googleapis//google/cloud/ml/v1:ml_proto,google.golang.org/genproto/googleapis/cloud/ml/v1,@go_googleapis//google/cloud/ml/v1:ml_go_proto
|
||||||
|
google/cloud/texttospeech/v1/cloud_tts.proto,@go_googleapis//google/cloud/texttospeech/v1:texttospeech_proto,google.golang.org/genproto/googleapis/cloud/texttospeech/v1,@go_googleapis//google/cloud/texttospeech/v1:texttospeech_go_proto
|
||||||
|
google/cloud/texttospeech/v1beta1/cloud_tts.proto,@go_googleapis//google/cloud/texttospeech/v1beta1:texttospeech_proto,google.golang.org/genproto/googleapis/cloud/texttospeech/v1beta1,@go_googleapis//google/cloud/texttospeech/v1beta1:texttospeech_go_proto
|
||||||
|
google/cloud/functions/v1beta2/operations.proto,@go_googleapis//google/cloud/functions/v1beta2:functions_proto,google.golang.org/genproto/googleapis/cloud/functions/v1beta2,@go_googleapis//google/cloud/functions/v1beta2:functions_go_proto
|
||||||
|
google/cloud/functions/v1beta2/functions.proto,@go_googleapis//google/cloud/functions/v1beta2:functions_proto,google.golang.org/genproto/googleapis/cloud/functions/v1beta2,@go_googleapis//google/cloud/functions/v1beta2:functions_go_proto
|
||||||
|
google/cloud/billing/v1/cloud_billing.proto,@go_googleapis//google/cloud/billing/v1:billing_proto,google.golang.org/genproto/googleapis/cloud/billing/v1,@go_googleapis//google/cloud/billing/v1:billing_go_proto
|
||||||
|
google/cloud/dataproc/v1/operations.proto,@go_googleapis//google/cloud/dataproc/v1:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1,@go_googleapis//google/cloud/dataproc/v1:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1/clusters.proto,@go_googleapis//google/cloud/dataproc/v1:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1,@go_googleapis//google/cloud/dataproc/v1:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1/jobs.proto,@go_googleapis//google/cloud/dataproc/v1:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1,@go_googleapis//google/cloud/dataproc/v1:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1beta2/operations.proto,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1beta2/clusters.proto,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1beta2/jobs.proto,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1beta2/workflow_templates.proto,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_go_proto
|
||||||
|
google/cloud/dataproc/v1beta2/shared.proto,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_proto,google.golang.org/genproto/googleapis/cloud/dataproc/v1beta2,@go_googleapis//google/cloud/dataproc/v1beta2:dataproc_go_proto
|
||||||
|
google/api/context.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/http.proto,@go_googleapis//google/api:annotations_proto,google.golang.org/genproto/googleapis/api/annotations,@go_googleapis//google/api:annotations_go_proto
|
||||||
|
google/api/config_change.proto,@go_googleapis//google/api:configchange_proto,google.golang.org/genproto/googleapis/api/configchange,@go_googleapis//google/api:configchange_go_proto
|
||||||
|
google/api/system_parameter.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/monitoring.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/distribution.proto,@go_googleapis//google/api:distribution_proto,google.golang.org/genproto/googleapis/api/distribution,@go_googleapis//google/api:distribution_go_proto
|
||||||
|
google/api/endpoint.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/usage.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/monitored_resource.proto,@go_googleapis//google/api:monitoredres_proto,google.golang.org/genproto/googleapis/api/monitoredres,@go_googleapis//google/api:monitoredres_go_proto
|
||||||
|
google/api/annotations.proto,@go_googleapis//google/api:annotations_proto,google.golang.org/genproto/googleapis/api/annotations,@go_googleapis//google/api:annotations_go_proto
|
||||||
|
google/api/control.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/metric.proto,@go_googleapis//google/api:metric_proto,google.golang.org/genproto/googleapis/api/metric,@go_googleapis//google/api:metric_go_proto
|
||||||
|
google/api/label.proto,@go_googleapis//google/api:label_proto,google.golang.org/genproto/googleapis/api/label,@go_googleapis//google/api:label_go_proto
|
||||||
|
google/api/consumer.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/log.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/billing.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/service.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/logging.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/documentation.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/quota.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/auth.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/backend.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/source_info.proto,@go_googleapis//google/api:serviceconfig_proto,google.golang.org/genproto/googleapis/api/serviceconfig,@go_googleapis//google/api:serviceconfig_go_proto
|
||||||
|
google/api/httpbody.proto,@go_googleapis//google/api:httpbody_proto,google.golang.org/genproto/googleapis/api/httpbody,@go_googleapis//google/api:httpbody_go_proto
|
||||||
|
google/api/experimental/authorization_config.proto,@go_googleapis//google/api/experimental:api_proto,google.golang.org/genproto/googleapis/api,@go_googleapis//google/api/experimental:api_go_proto
|
||||||
|
google/api/experimental/experimental.proto,@go_googleapis//google/api/experimental:api_proto,google.golang.org/genproto/googleapis/api,@go_googleapis//google/api/experimental:api_go_proto
|
||||||
|
google/api/servicemanagement/v1/servicemanager.proto,@go_googleapis//google/api/servicemanagement/v1:servicemanagement_proto,google.golang.org/genproto/googleapis/api/servicemanagement/v1,@go_googleapis//google/api/servicemanagement/v1:servicemanagement_go_proto
|
||||||
|
google/api/servicemanagement/v1/resources.proto,@go_googleapis//google/api/servicemanagement/v1:servicemanagement_proto,google.golang.org/genproto/googleapis/api/servicemanagement/v1,@go_googleapis//google/api/servicemanagement/v1:servicemanagement_go_proto
|
||||||
|
google/api/servicecontrol/v1/quota_controller.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/distribution.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/check_error.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/operation.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/metric_value.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/log_entry.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/api/servicecontrol/v1/service_controller.proto,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_proto,google.golang.org/genproto/googleapis/api/servicecontrol/v1,@go_googleapis//google/api/servicecontrol/v1:servicecontrol_go_proto
|
||||||
|
google/pubsub/v1/pubsub.proto,@go_googleapis//google/pubsub/v1:pubsub_proto,google.golang.org/genproto/googleapis/pubsub/v1,@go_googleapis//google/pubsub/v1:pubsub_go_proto
|
||||||
|
google/pubsub/v1beta2/pubsub.proto,@go_googleapis//google/pubsub/v1beta2:pubsub_proto,google.golang.org/genproto/googleapis/pubsub/v1beta2,@go_googleapis//google/pubsub/v1beta2:pubsub_go_proto
|
||||||
|
google/spanner/v1/mutation.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/spanner.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/transaction.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/keys.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/type.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/query_plan.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/v1/result_set.proto,@go_googleapis//google/spanner/v1:spanner_proto,google.golang.org/genproto/googleapis/spanner/v1,@go_googleapis//google/spanner/v1:spanner_go_proto
|
||||||
|
google/spanner/admin/database/v1/spanner_database_admin.proto,@go_googleapis//google/spanner/admin/database/v1:database_proto,google.golang.org/genproto/googleapis/spanner/admin/database/v1,@go_googleapis//google/spanner/admin/database/v1:database_go_proto
|
||||||
|
google/spanner/admin/instance/v1/spanner_instance_admin.proto,@go_googleapis//google/spanner/admin/instance/v1:instance_proto,google.golang.org/genproto/googleapis/spanner/admin/instance/v1,@go_googleapis//google/spanner/admin/instance/v1:instance_go_proto
|
||||||
|
google/monitoring/v3/group.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/mutation_record.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/notification.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/alert_service.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/uptime_service.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/group_service.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/alert.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/uptime.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/metric.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/notification_service.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/metric_service.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/monitoring/v3/common.proto,@go_googleapis//google/monitoring/v3:monitoring_proto,google.golang.org/genproto/googleapis/monitoring/v3,@go_googleapis//google/monitoring/v3:monitoring_go_proto
|
||||||
|
google/rpc/code.proto,@go_googleapis//google/rpc:code_proto,google.golang.org/genproto/googleapis/rpc/code,@go_googleapis//google/rpc:code_go_proto
|
||||||
|
google/rpc/status.proto,@go_googleapis//google/rpc:status_proto,google.golang.org/genproto/googleapis/rpc/status,@go_googleapis//google/rpc:status_go_proto
|
||||||
|
google/rpc/error_details.proto,@go_googleapis//google/rpc:errdetails_proto,google.golang.org/genproto/googleapis/rpc/errdetails,@go_googleapis//google/rpc:errdetails_go_proto
|
||||||
|
google/streetview/publish/v1/resources.proto,@go_googleapis//google/streetview/publish/v1:publish_proto,google.golang.org/genproto/googleapis/streetview/publish/v1,@go_googleapis//google/streetview/publish/v1:publish_go_proto
|
||||||
|
google/streetview/publish/v1/rpcmessages.proto,@go_googleapis//google/streetview/publish/v1:publish_proto,google.golang.org/genproto/googleapis/streetview/publish/v1,@go_googleapis//google/streetview/publish/v1:publish_go_proto
|
||||||
|
google/streetview/publish/v1/streetview_publish.proto,@go_googleapis//google/streetview/publish/v1:publish_proto,google.golang.org/genproto/googleapis/streetview/publish/v1,@go_googleapis//google/streetview/publish/v1:publish_go_proto
|
||||||
|
google/logging/v2/logging_metrics.proto,@go_googleapis//google/logging/v2:logging_proto,google.golang.org/genproto/googleapis/logging/v2,@go_googleapis//google/logging/v2:logging_go_proto
|
||||||
|
google/logging/v2/logging_config.proto,@go_googleapis//google/logging/v2:logging_proto,google.golang.org/genproto/googleapis/logging/v2,@go_googleapis//google/logging/v2:logging_go_proto
|
||||||
|
google/logging/v2/log_entry.proto,@go_googleapis//google/logging/v2:logging_proto,google.golang.org/genproto/googleapis/logging/v2,@go_googleapis//google/logging/v2:logging_go_proto
|
||||||
|
google/logging/v2/logging.proto,@go_googleapis//google/logging/v2:logging_proto,google.golang.org/genproto/googleapis/logging/v2,@go_googleapis//google/logging/v2:logging_go_proto
|
||||||
|
google/logging/type/log_severity.proto,@go_googleapis//google/logging/type:ltype_proto,google.golang.org/genproto/googleapis/logging/type,@go_googleapis//google/logging/type:ltype_go_proto
|
||||||
|
google/logging/type/http_request.proto,@go_googleapis//google/logging/type:ltype_proto,google.golang.org/genproto/googleapis/logging/type,@go_googleapis//google/logging/type:ltype_go_proto
|
||||||
|
116
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/resolve.go
generated
vendored
Normal file
116
vendor/github.com/bazelbuild/bazel-gazelle/internal/language/proto/resolve.go
generated
vendored
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package proto
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/resolve"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (_ *protoLang) Imports(c *config.Config, r *rule.Rule, f *rule.File) []resolve.ImportSpec {
|
||||||
|
rel := f.Pkg
|
||||||
|
srcs := r.AttrStrings("srcs")
|
||||||
|
imports := make([]resolve.ImportSpec, len(srcs))
|
||||||
|
for i, src := range srcs {
|
||||||
|
imports[i] = resolve.ImportSpec{Lang: "proto", Imp: path.Join(rel, src)}
|
||||||
|
}
|
||||||
|
return imports
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) Embeds(r *rule.Rule, from label.Label) []label.Label {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *protoLang) Resolve(c *config.Config, ix *resolve.RuleIndex, rc *repos.RemoteCache, r *rule.Rule, from label.Label) {
|
||||||
|
pc := GetProtoConfig(c)
|
||||||
|
importsRaw := r.PrivateAttr(config.GazelleImportsKey)
|
||||||
|
if importsRaw == nil {
|
||||||
|
// may not be set in tests.
|
||||||
|
return
|
||||||
|
}
|
||||||
|
imports := importsRaw.([]string)
|
||||||
|
r.DelAttr("deps")
|
||||||
|
deps := make([]string, 0, len(imports))
|
||||||
|
for _, imp := range imports {
|
||||||
|
l, err := resolveProto(pc, ix, r, imp, from)
|
||||||
|
if err == skipImportError {
|
||||||
|
continue
|
||||||
|
} else if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
} else {
|
||||||
|
l = l.Rel(from.Repo, from.Pkg)
|
||||||
|
deps = append(deps, l.String())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(deps) > 0 {
|
||||||
|
r.SetAttr("deps", deps)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
skipImportError = errors.New("std import")
|
||||||
|
notFoundError = errors.New("not found")
|
||||||
|
)
|
||||||
|
|
||||||
|
func resolveProto(pc *ProtoConfig, ix *resolve.RuleIndex, r *rule.Rule, imp string, from label.Label) (label.Label, error) {
|
||||||
|
if !strings.HasSuffix(imp, ".proto") {
|
||||||
|
return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp)
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, ok := knownImports[imp]; ok && pc.Mode.ShouldUseKnownImports() {
|
||||||
|
if l.Equal(from) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
} else {
|
||||||
|
return l, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if l, err := resolveWithIndex(ix, imp, from); err == nil || err == skipImportError {
|
||||||
|
return l, err
|
||||||
|
} else if err != notFoundError {
|
||||||
|
return label.NoLabel, err
|
||||||
|
}
|
||||||
|
|
||||||
|
rel := path.Dir(imp)
|
||||||
|
if rel == "." {
|
||||||
|
rel = ""
|
||||||
|
}
|
||||||
|
name := RuleName(rel)
|
||||||
|
return label.New("", rel, name), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func resolveWithIndex(ix *resolve.RuleIndex, imp string, from label.Label) (label.Label, error) {
|
||||||
|
matches := ix.FindRulesByImport(resolve.ImportSpec{Lang: "proto", Imp: imp}, "proto")
|
||||||
|
if len(matches) == 0 {
|
||||||
|
return label.NoLabel, notFoundError
|
||||||
|
}
|
||||||
|
if len(matches) > 1 {
|
||||||
|
return label.NoLabel, fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", matches[0].Label, matches[1].Label, imp, from)
|
||||||
|
}
|
||||||
|
if matches[0].IsSelfImport(from) {
|
||||||
|
return label.NoLabel, skipImportError
|
||||||
|
}
|
||||||
|
return matches[0].Label, nil
|
||||||
|
}
|
||||||
6
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/BUILD
generated
vendored
6
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/BUILD
generated
vendored
@@ -9,11 +9,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/merger",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/merger",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/merger",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/merger",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = ["//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library"],
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
955
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/fix.go
generated
vendored
955
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/fix.go
generated
vendored
File diff suppressed because it is too large
Load Diff
902
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/merger.go
generated
vendored
902
vendor/github.com/bazelbuild/bazel-gazelle/internal/merger/merger.go
generated
vendored
@@ -18,662 +18,94 @@ package merger
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"log"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const keep = "keep" // marker in srcs or deps to tell gazelle to preserve.
|
// Phase indicates which attributes should be merged in matching rules.
|
||||||
|
//
|
||||||
|
// The pre-resolve merge is performed before rules are indexed for dependency
|
||||||
|
// resolution. All attributes not related to dependencies are merged. This
|
||||||
|
// merge must be performed indexing because attributes related to indexing
|
||||||
|
// (e.g., srcs, importpath) will be affected.
|
||||||
|
//
|
||||||
|
// The post-resolve merge is performed after rules are indexed. All attributes
|
||||||
|
// related to dependencies are merged.
|
||||||
|
type Phase int
|
||||||
|
|
||||||
// MergableAttrs is the set of attribute names for each kind of rule that
|
const (
|
||||||
// may be merged. When an attribute is mergeable, a generated value may
|
PreResolve Phase = iota
|
||||||
// replace or augment an existing value. If an attribute is not mergeable,
|
PostResolve
|
||||||
// existing values are preserved. Generated non-mergeable attributes may
|
|
||||||
// still be added to a rule if there is no corresponding existing attribute.
|
|
||||||
type MergeableAttrs map[string]map[string]bool
|
|
||||||
|
|
||||||
var (
|
|
||||||
// PreResolveAttrs is the set of attributes that should be merged before
|
|
||||||
// dependency resolution, i.e., everything except deps.
|
|
||||||
PreResolveAttrs MergeableAttrs
|
|
||||||
|
|
||||||
// PostResolveAttrs is the set of attributes that should be merged after
|
|
||||||
// dependency resolution, i.e., deps.
|
|
||||||
PostResolveAttrs MergeableAttrs
|
|
||||||
|
|
||||||
// RepoAttrs is the set of attributes that should be merged in repository
|
|
||||||
// rules in WORKSPACE.
|
|
||||||
RepoAttrs MergeableAttrs
|
|
||||||
|
|
||||||
// nonEmptyAttrs is the set of attributes that disqualify a rule from being
|
|
||||||
// deleted after merge.
|
|
||||||
nonEmptyAttrs MergeableAttrs
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func init() {
|
|
||||||
PreResolveAttrs = make(MergeableAttrs)
|
|
||||||
PostResolveAttrs = make(MergeableAttrs)
|
|
||||||
RepoAttrs = make(MergeableAttrs)
|
|
||||||
nonEmptyAttrs = make(MergeableAttrs)
|
|
||||||
for _, set := range []struct {
|
|
||||||
mergeableAttrs MergeableAttrs
|
|
||||||
kinds, attrs []string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
mergeableAttrs: PreResolveAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_library",
|
|
||||||
"go_binary",
|
|
||||||
"go_test",
|
|
||||||
"go_proto_library",
|
|
||||||
"proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"srcs",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: PreResolveAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_library",
|
|
||||||
"go_proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"importpath",
|
|
||||||
"importmap",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: PreResolveAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_library",
|
|
||||||
"go_binary",
|
|
||||||
"go_test",
|
|
||||||
"go_proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"cgo",
|
|
||||||
"clinkopts",
|
|
||||||
"copts",
|
|
||||||
"embed",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: PreResolveAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"proto",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: PostResolveAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_library",
|
|
||||||
"go_binary",
|
|
||||||
"go_test",
|
|
||||||
"go_proto_library",
|
|
||||||
"proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"deps",
|
|
||||||
config.GazelleImportsKey,
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: RepoAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_repository",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"commit",
|
|
||||||
"importpath",
|
|
||||||
"remote",
|
|
||||||
"sha256",
|
|
||||||
"strip_prefix",
|
|
||||||
"tag",
|
|
||||||
"type",
|
|
||||||
"urls",
|
|
||||||
"vcs",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: nonEmptyAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_binary",
|
|
||||||
"go_library",
|
|
||||||
"go_test",
|
|
||||||
"proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"srcs",
|
|
||||||
"deps",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: nonEmptyAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_binary",
|
|
||||||
"go_library",
|
|
||||||
"go_test",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"embed",
|
|
||||||
},
|
|
||||||
}, {
|
|
||||||
mergeableAttrs: nonEmptyAttrs,
|
|
||||||
kinds: []string{
|
|
||||||
"go_proto_library",
|
|
||||||
},
|
|
||||||
attrs: []string{
|
|
||||||
"proto",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
} {
|
|
||||||
for _, kind := range set.kinds {
|
|
||||||
if set.mergeableAttrs[kind] == nil {
|
|
||||||
set.mergeableAttrs[kind] = make(map[string]bool)
|
|
||||||
}
|
|
||||||
for _, attr := range set.attrs {
|
|
||||||
set.mergeableAttrs[kind][attr] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MergeFile merges the rules in genRules with matching rules in f and
|
// MergeFile merges the rules in genRules with matching rules in f and
|
||||||
// adds unmatched rules to the end of the merged file. MergeFile also merges
|
// adds unmatched rules to the end of the merged file. MergeFile also merges
|
||||||
// rules in empty with matching rules in f and deletes rules that
|
// rules in empty with matching rules in f and deletes rules that
|
||||||
// are empty after merging. attrs is the set of attributes to merge. Attributes
|
// are empty after merging. attrs is the set of attributes to merge. Attributes
|
||||||
// not in this set will be left alone if they already exist.
|
// not in this set will be left alone if they already exist.
|
||||||
func MergeFile(genRules []bf.Expr, empty []bf.Expr, f *bf.File, attrs MergeableAttrs) (mergedRules []bf.Expr) {
|
func MergeFile(oldFile *rule.File, emptyRules, genRules []*rule.Rule, phase Phase, kinds map[string]rule.KindInfo) {
|
||||||
|
getMergeAttrs := func(r *rule.Rule) map[string]bool {
|
||||||
|
if phase == PreResolve {
|
||||||
|
return kinds[r.Kind()].MergeableAttrs
|
||||||
|
} else {
|
||||||
|
return kinds[r.Kind()].ResolveAttrs
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Merge empty rules into the file and delete any rules which become empty.
|
// Merge empty rules into the file and delete any rules which become empty.
|
||||||
var deletedIndices []int
|
for _, emptyRule := range emptyRules {
|
||||||
for _, s := range empty {
|
if oldRule, _ := match(oldFile.Rules, emptyRule, kinds[emptyRule.Kind()]); oldRule != nil {
|
||||||
emptyCall := s.(*bf.CallExpr)
|
if oldRule.ShouldKeep() {
|
||||||
if oldCall, i, _ := match(f.Stmt, emptyCall); oldCall != nil {
|
continue
|
||||||
mergedRule := mergeRule(emptyCall, oldCall, attrs, f.Path)
|
}
|
||||||
if isRuleEmpty(mergedRule) {
|
rule.MergeRules(emptyRule, oldRule, getMergeAttrs(emptyRule), oldFile.Path)
|
||||||
deletedIndices = append(deletedIndices, i)
|
if oldRule.IsEmpty(kinds[oldRule.Kind()]) {
|
||||||
} else {
|
oldRule.Delete()
|
||||||
f.Stmt[i] = mergedRule
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(deletedIndices) > 0 {
|
oldFile.Sync()
|
||||||
sort.Ints(deletedIndices)
|
|
||||||
f.Stmt = deleteIndices(f.Stmt, deletedIndices)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Match generated rules with existing rules in the file. Keep track of
|
// Match generated rules with existing rules in the file. Keep track of
|
||||||
// rules with non-standard names.
|
// rules with non-standard names.
|
||||||
matchIndices := make([]int, len(genRules))
|
matchRules := make([]*rule.Rule, len(genRules))
|
||||||
matchErrors := make([]error, len(genRules))
|
matchErrors := make([]error, len(genRules))
|
||||||
substitutions := make(map[string]string)
|
substitutions := make(map[string]string)
|
||||||
for i, s := range genRules {
|
for i, genRule := range genRules {
|
||||||
genCall := s.(*bf.CallExpr)
|
oldRule, err := match(oldFile.Rules, genRule, kinds[genRule.Kind()])
|
||||||
oldCall, oldIndex, err := match(f.Stmt, genCall)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// TODO(jayconrod): add a verbose mode and log errors. They are too chatty
|
// TODO(jayconrod): add a verbose mode and log errors. They are too chatty
|
||||||
// to print by default.
|
// to print by default.
|
||||||
matchErrors[i] = err
|
matchErrors[i] = err
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
matchIndices[i] = oldIndex // < 0 indicates no match
|
matchRules[i] = oldRule
|
||||||
if oldCall != nil {
|
if oldRule != nil {
|
||||||
oldRule := bf.Rule{Call: oldCall}
|
if oldRule.Name() != genRule.Name() {
|
||||||
genRule := bf.Rule{Call: genCall}
|
substitutions[genRule.Name()] = oldRule.Name()
|
||||||
oldName := oldRule.Name()
|
|
||||||
genName := genRule.Name()
|
|
||||||
if oldName != genName {
|
|
||||||
substitutions[genName] = oldName
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Rename labels in generated rules that refer to other generated rules.
|
// Rename labels in generated rules that refer to other generated rules.
|
||||||
if len(substitutions) > 0 {
|
if len(substitutions) > 0 {
|
||||||
genRules = append([]bf.Expr{}, genRules...)
|
for _, genRule := range genRules {
|
||||||
for i, s := range genRules {
|
substituteRule(genRule, substitutions, kinds[genRule.Kind()])
|
||||||
genRules[i] = substituteRule(s.(*bf.CallExpr), substitutions)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Merge generated rules with existing rules or append to the end of the file.
|
// Merge generated rules with existing rules or append to the end of the file.
|
||||||
for i := range genRules {
|
for i, genRule := range genRules {
|
||||||
if matchErrors[i] != nil {
|
if matchErrors[i] != nil {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if matchIndices[i] < 0 {
|
if matchRules[i] == nil {
|
||||||
f.Stmt = append(f.Stmt, genRules[i])
|
genRule.Insert(oldFile)
|
||||||
mergedRules = append(mergedRules, genRules[i])
|
|
||||||
} else {
|
} else {
|
||||||
mergedRule := mergeRule(genRules[i].(*bf.CallExpr), f.Stmt[matchIndices[i]].(*bf.CallExpr), attrs, f.Path)
|
rule.MergeRules(genRule, matchRules[i], getMergeAttrs(genRule), oldFile.Path)
|
||||||
f.Stmt[matchIndices[i]] = mergedRule
|
|
||||||
mergedRules = append(mergedRules, mergedRule)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return mergedRules
|
|
||||||
}
|
|
||||||
|
|
||||||
// mergeRule combines information from gen and old and returns an updated rule.
|
|
||||||
// Both rules must be non-nil and must have the same kind and same name.
|
|
||||||
// attrs is the set of attributes which may be merged.
|
|
||||||
// If nil is returned, the rule should be deleted.
|
|
||||||
func mergeRule(gen, old *bf.CallExpr, attrs MergeableAttrs, filename string) bf.Expr {
|
|
||||||
if old != nil && shouldKeep(old) {
|
|
||||||
return old
|
|
||||||
}
|
|
||||||
|
|
||||||
genRule := bf.Rule{Call: gen}
|
|
||||||
oldRule := bf.Rule{Call: old}
|
|
||||||
merged := *old
|
|
||||||
merged.List = nil
|
|
||||||
mergedRule := bf.Rule{Call: &merged}
|
|
||||||
|
|
||||||
// Copy unnamed arguments from the old rule without merging. The only rule
|
|
||||||
// generated with unnamed arguments is go_prefix, which we currently
|
|
||||||
// leave in place.
|
|
||||||
// TODO: maybe gazelle should allow the prefix to be changed.
|
|
||||||
for _, a := range old.List {
|
|
||||||
if b, ok := a.(*bf.BinaryExpr); ok && b.Op == "=" {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
merged.List = append(merged.List, a)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge attributes from the old rule. Preserve comments on old attributes.
|
|
||||||
// Assume generated attributes have no comments.
|
|
||||||
kind := oldRule.Kind()
|
|
||||||
for _, k := range oldRule.AttrKeys() {
|
|
||||||
oldAttr := oldRule.AttrDefn(k)
|
|
||||||
if !attrs[kind][k] || shouldKeep(oldAttr) {
|
|
||||||
merged.List = append(merged.List, oldAttr)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
oldExpr := oldAttr.Y
|
|
||||||
genExpr := genRule.Attr(k)
|
|
||||||
mergedExpr, err := mergeExpr(genExpr, oldExpr)
|
|
||||||
if err != nil {
|
|
||||||
start, end := oldExpr.Span()
|
|
||||||
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
|
|
||||||
mergedExpr = oldExpr
|
|
||||||
}
|
|
||||||
if mergedExpr != nil {
|
|
||||||
mergedAttr := *oldAttr
|
|
||||||
mergedAttr.Y = mergedExpr
|
|
||||||
merged.List = append(merged.List, &mergedAttr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge attributes from genRule that we haven't processed already.
|
|
||||||
for _, k := range genRule.AttrKeys() {
|
|
||||||
if mergedRule.Attr(k) == nil {
|
|
||||||
mergedRule.SetAttr(k, genRule.Attr(k))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &merged
|
|
||||||
}
|
|
||||||
|
|
||||||
// mergeExpr combines information from gen and old and returns an updated
|
|
||||||
// expression. The following kinds of expressions are recognized:
|
|
||||||
//
|
|
||||||
// * nil
|
|
||||||
// * strings (can only be merged with strings)
|
|
||||||
// * lists of strings
|
|
||||||
// * a call to select with a dict argument. The dict keys must be strings,
|
|
||||||
// and the values must be lists of strings.
|
|
||||||
// * a list of strings combined with a select call using +. The list must
|
|
||||||
// be the left operand.
|
|
||||||
//
|
|
||||||
// An error is returned if the expressions can't be merged, for example
|
|
||||||
// because they are not in one of the above formats.
|
|
||||||
func mergeExpr(gen, old bf.Expr) (bf.Expr, error) {
|
|
||||||
if shouldKeep(old) {
|
|
||||||
return old, nil
|
|
||||||
}
|
|
||||||
if gen == nil && (old == nil || isScalar(old)) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
if isScalar(gen) {
|
|
||||||
return gen, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
genExprs, err := extractPlatformStringsExprs(gen)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
oldExprs, err := extractPlatformStringsExprs(old)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
mergedExprs, err := mergePlatformStringsExprs(genExprs, oldExprs)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return makePlatformStringsExpr(mergedExprs), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// platformStringsExprs is a set of sub-expressions that match the structure
|
|
||||||
// of package.PlatformStrings. rules.Generator produces expressions that
|
|
||||||
// follow this structure for srcs, deps, and other attributes, so this matches
|
|
||||||
// all non-scalar expressions generated by Gazelle.
|
|
||||||
//
|
|
||||||
// The matched expression has the form:
|
|
||||||
//
|
|
||||||
// [] + select({}) + select({}) + select({})
|
|
||||||
//
|
|
||||||
// The four collections may appear in any order, and some or all of them may
|
|
||||||
// be omitted (all fields are nil for a nil expression).
|
|
||||||
type platformStringsExprs struct {
|
|
||||||
generic *bf.ListExpr
|
|
||||||
os, arch, platform *bf.DictExpr
|
|
||||||
}
|
|
||||||
|
|
||||||
// extractPlatformStringsExprs matches an expression and attempts to extract
|
|
||||||
// sub-expressions in platformStringsExprs. The sub-expressions can then be
|
|
||||||
// merged with corresponding sub-expressions. Any field in the returned
|
|
||||||
// structure may be nil. An error is returned if the given expression does
|
|
||||||
// not follow the pattern described by platformStringsExprs.
|
|
||||||
func extractPlatformStringsExprs(expr bf.Expr) (platformStringsExprs, error) {
|
|
||||||
var ps platformStringsExprs
|
|
||||||
if expr == nil {
|
|
||||||
return ps, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Break the expression into a sequence of expressions combined with +.
|
|
||||||
var parts []bf.Expr
|
|
||||||
for {
|
|
||||||
binop, ok := expr.(*bf.BinaryExpr)
|
|
||||||
if !ok {
|
|
||||||
parts = append(parts, expr)
|
|
||||||
break
|
|
||||||
}
|
|
||||||
parts = append(parts, binop.Y)
|
|
||||||
expr = binop.X
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process each part. They may be in any order.
|
|
||||||
for _, part := range parts {
|
|
||||||
switch part := part.(type) {
|
|
||||||
case *bf.ListExpr:
|
|
||||||
if ps.generic != nil {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple list expressions")
|
|
||||||
}
|
|
||||||
ps.generic = part
|
|
||||||
|
|
||||||
case *bf.CallExpr:
|
|
||||||
x, ok := part.X.(*bf.LiteralExpr)
|
|
||||||
if !ok || x.Token != "select" || len(part.List) != 1 {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: callee other than select or wrong number of args")
|
|
||||||
}
|
|
||||||
arg, ok := part.List[0].(*bf.DictExpr)
|
|
||||||
if !ok {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: select argument not dict")
|
|
||||||
}
|
|
||||||
var dict **bf.DictExpr
|
|
||||||
for _, item := range arg.List {
|
|
||||||
kv := item.(*bf.KeyValueExpr) // parser guarantees this
|
|
||||||
k, ok := kv.Key.(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict keys are not all strings")
|
|
||||||
}
|
|
||||||
if k.Value == "//conditions:default" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
key, err := label.Parse(k.Value)
|
|
||||||
if err != nil {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key is not label: %q", k.Value)
|
|
||||||
}
|
|
||||||
if config.KnownOSSet[key.Name] {
|
|
||||||
dict = &ps.os
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if config.KnownArchSet[key.Name] {
|
|
||||||
dict = &ps.arch
|
|
||||||
break
|
|
||||||
}
|
|
||||||
osArch := strings.Split(key.Name, "_")
|
|
||||||
if len(osArch) != 2 || !config.KnownOSSet[osArch[0]] || !config.KnownArchSet[osArch[1]] {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key contains unknown platform: %q", k.Value)
|
|
||||||
}
|
|
||||||
dict = &ps.platform
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if dict == nil {
|
|
||||||
// We could not identify the dict because it's empty or only contains
|
|
||||||
// //conditions:default. We'll call it the platform dict to avoid
|
|
||||||
// dropping it.
|
|
||||||
dict = &ps.platform
|
|
||||||
}
|
|
||||||
if *dict != nil {
|
|
||||||
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple selects that are either os-specific, arch-specific, or platform-specific")
|
|
||||||
}
|
|
||||||
*dict = arg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ps, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// makePlatformStringsExpr constructs a single expression from the
|
|
||||||
// sub-expressions in ps.
|
|
||||||
func makePlatformStringsExpr(ps platformStringsExprs) bf.Expr {
|
|
||||||
makeSelect := func(dict *bf.DictExpr) bf.Expr {
|
|
||||||
return &bf.CallExpr{
|
|
||||||
X: &bf.LiteralExpr{Token: "select"},
|
|
||||||
List: []bf.Expr{dict},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
forceMultiline := func(e bf.Expr) {
|
|
||||||
switch e := e.(type) {
|
|
||||||
case *bf.ListExpr:
|
|
||||||
e.ForceMultiLine = true
|
|
||||||
case *bf.CallExpr:
|
|
||||||
e.List[0].(*bf.DictExpr).ForceMultiLine = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var parts []bf.Expr
|
|
||||||
if ps.generic != nil {
|
|
||||||
parts = append(parts, ps.generic)
|
|
||||||
}
|
|
||||||
if ps.os != nil {
|
|
||||||
parts = append(parts, makeSelect(ps.os))
|
|
||||||
}
|
|
||||||
if ps.arch != nil {
|
|
||||||
parts = append(parts, makeSelect(ps.arch))
|
|
||||||
}
|
|
||||||
if ps.platform != nil {
|
|
||||||
parts = append(parts, makeSelect(ps.platform))
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(parts) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
if len(parts) == 1 {
|
|
||||||
return parts[0]
|
|
||||||
}
|
|
||||||
expr := parts[0]
|
|
||||||
forceMultiline(expr)
|
|
||||||
for _, part := range parts[1:] {
|
|
||||||
forceMultiline(part)
|
|
||||||
expr = &bf.BinaryExpr{
|
|
||||||
Op: "+",
|
|
||||||
X: expr,
|
|
||||||
Y: part,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return expr
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergePlatformStringsExprs(gen, old platformStringsExprs) (platformStringsExprs, error) {
|
|
||||||
var ps platformStringsExprs
|
|
||||||
var err error
|
|
||||||
ps.generic = mergeList(gen.generic, old.generic)
|
|
||||||
if ps.os, err = mergeDict(gen.os, old.os); err != nil {
|
|
||||||
return platformStringsExprs{}, err
|
|
||||||
}
|
|
||||||
if ps.arch, err = mergeDict(gen.arch, old.arch); err != nil {
|
|
||||||
return platformStringsExprs{}, err
|
|
||||||
}
|
|
||||||
if ps.platform, err = mergeDict(gen.platform, old.platform); err != nil {
|
|
||||||
return platformStringsExprs{}, err
|
|
||||||
}
|
|
||||||
return ps, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeList(gen, old *bf.ListExpr) *bf.ListExpr {
|
|
||||||
if old == nil {
|
|
||||||
return gen
|
|
||||||
}
|
|
||||||
if gen == nil {
|
|
||||||
gen = &bf.ListExpr{List: []bf.Expr{}}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a list of strings from the gen list and keep matching strings
|
|
||||||
// in the old list. This preserves comments. Also keep anything with
|
|
||||||
// a "# keep" comment, whether or not it's in the gen list.
|
|
||||||
genSet := make(map[string]bool)
|
|
||||||
for _, v := range gen.List {
|
|
||||||
if s := stringValue(v); s != "" {
|
|
||||||
genSet[s] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var merged []bf.Expr
|
|
||||||
kept := make(map[string]bool)
|
|
||||||
keepComment := false
|
|
||||||
for _, v := range old.List {
|
|
||||||
s := stringValue(v)
|
|
||||||
if keep := shouldKeep(v); keep || genSet[s] {
|
|
||||||
keepComment = keepComment || keep
|
|
||||||
merged = append(merged, v)
|
|
||||||
if s != "" {
|
|
||||||
kept[s] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add anything in the gen list that wasn't kept.
|
|
||||||
for _, v := range gen.List {
|
|
||||||
if s := stringValue(v); kept[s] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
merged = append(merged, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(merged) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return &bf.ListExpr{
|
|
||||||
List: merged,
|
|
||||||
ForceMultiLine: gen.ForceMultiLine || old.ForceMultiLine || keepComment,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func mergeDict(gen, old *bf.DictExpr) (*bf.DictExpr, error) {
|
|
||||||
if old == nil {
|
|
||||||
return gen, nil
|
|
||||||
}
|
|
||||||
if gen == nil {
|
|
||||||
gen = &bf.DictExpr{List: []bf.Expr{}}
|
|
||||||
}
|
|
||||||
|
|
||||||
var entries []*dictEntry
|
|
||||||
entryMap := make(map[string]*dictEntry)
|
|
||||||
|
|
||||||
for _, kv := range old.List {
|
|
||||||
k, v, err := dictEntryKeyValue(kv)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
if _, ok := entryMap[k]; ok {
|
|
||||||
return nil, fmt.Errorf("old dict contains more than one case named %q", k)
|
|
||||||
}
|
|
||||||
e := &dictEntry{key: k, oldValue: v}
|
|
||||||
entries = append(entries, e)
|
|
||||||
entryMap[k] = e
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, kv := range gen.List {
|
|
||||||
k, v, err := dictEntryKeyValue(kv)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
e, ok := entryMap[k]
|
|
||||||
if !ok {
|
|
||||||
e = &dictEntry{key: k}
|
|
||||||
entries = append(entries, e)
|
|
||||||
entryMap[k] = e
|
|
||||||
}
|
|
||||||
e.genValue = v
|
|
||||||
}
|
|
||||||
|
|
||||||
keys := make([]string, 0, len(entries))
|
|
||||||
haveDefault := false
|
|
||||||
for _, e := range entries {
|
|
||||||
e.mergedValue = mergeList(e.genValue, e.oldValue)
|
|
||||||
if e.key == "//conditions:default" {
|
|
||||||
// Keep the default case, even if it's empty.
|
|
||||||
haveDefault = true
|
|
||||||
if e.mergedValue == nil {
|
|
||||||
e.mergedValue = &bf.ListExpr{}
|
|
||||||
}
|
|
||||||
} else if e.mergedValue != nil {
|
|
||||||
keys = append(keys, e.key)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(keys) == 0 && (!haveDefault || len(entryMap["//conditions:default"].mergedValue.List) == 0) {
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
// Always put the default case last.
|
|
||||||
if haveDefault {
|
|
||||||
keys = append(keys, "//conditions:default")
|
|
||||||
}
|
|
||||||
|
|
||||||
mergedEntries := make([]bf.Expr, len(keys))
|
|
||||||
for i, k := range keys {
|
|
||||||
e := entryMap[k]
|
|
||||||
mergedEntries[i] = &bf.KeyValueExpr{
|
|
||||||
Key: &bf.StringExpr{Value: e.key},
|
|
||||||
Value: e.mergedValue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &bf.DictExpr{List: mergedEntries, ForceMultiLine: true}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
type dictEntry struct {
|
|
||||||
key string
|
|
||||||
oldValue, genValue, mergedValue *bf.ListExpr
|
|
||||||
}
|
|
||||||
|
|
||||||
func dictEntryKeyValue(e bf.Expr) (string, *bf.ListExpr, error) {
|
|
||||||
kv, ok := e.(*bf.KeyValueExpr)
|
|
||||||
if !ok {
|
|
||||||
return "", nil, fmt.Errorf("dict entry was not a key-value pair: %#v", e)
|
|
||||||
}
|
|
||||||
k, ok := kv.Key.(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
return "", nil, fmt.Errorf("dict key was not string: %#v", kv.Key)
|
|
||||||
}
|
|
||||||
v, ok := kv.Value.(*bf.ListExpr)
|
|
||||||
if !ok {
|
|
||||||
return "", nil, fmt.Errorf("dict value was not list: %#v", kv.Value)
|
|
||||||
}
|
|
||||||
return k.Value, v, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// substituteAttrs contains a list of attributes for each kind that should be
|
|
||||||
// processed by substituteRule and substituteExpr. Note that "name" does not
|
|
||||||
// need to be substituted since it's not mergeable.
|
|
||||||
var substituteAttrs = map[string][]string{
|
|
||||||
"go_binary": {"embed"},
|
|
||||||
"go_library": {"embed"},
|
|
||||||
"go_test": {"embed"},
|
|
||||||
"go_proto_library": {"proto"},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// substituteRule replaces local labels (those beginning with ":", referring to
|
// substituteRule replaces local labels (those beginning with ":", referring to
|
||||||
@@ -681,247 +113,87 @@ var substituteAttrs = map[string][]string{
|
|||||||
// to update generated rules before merging when the corresponding existing
|
// to update generated rules before merging when the corresponding existing
|
||||||
// rules have different names. If substituteRule replaces a string, it returns
|
// rules have different names. If substituteRule replaces a string, it returns
|
||||||
// a new expression; it will not modify the original expression.
|
// a new expression; it will not modify the original expression.
|
||||||
func substituteRule(call *bf.CallExpr, substitutions map[string]string) *bf.CallExpr {
|
func substituteRule(r *rule.Rule, substitutions map[string]string, info rule.KindInfo) {
|
||||||
rule := bf.Rule{Call: call}
|
for attr := range info.SubstituteAttrs {
|
||||||
attrs, ok := substituteAttrs[rule.Kind()]
|
if expr := r.Attr(attr); expr != nil {
|
||||||
if !ok {
|
expr = rule.MapExprStrings(expr, func(s string) string {
|
||||||
return call
|
if rename, ok := substitutions[strings.TrimPrefix(s, ":")]; ok {
|
||||||
}
|
return ":" + rename
|
||||||
|
} else {
|
||||||
didCopy := false
|
return s
|
||||||
for i, arg := range call.List {
|
|
||||||
kv, ok := arg.(*bf.BinaryExpr)
|
|
||||||
if !ok || kv.Op != "=" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
key, ok := kv.X.(*bf.LiteralExpr)
|
|
||||||
shouldRename := false
|
|
||||||
for _, k := range attrs {
|
|
||||||
shouldRename = shouldRename || key.Token == k
|
|
||||||
}
|
|
||||||
if !shouldRename {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
value := substituteExpr(kv.Y, substitutions)
|
|
||||||
if value != kv.Y {
|
|
||||||
if !didCopy {
|
|
||||||
didCopy = true
|
|
||||||
callCopy := *call
|
|
||||||
callCopy.List = append([]bf.Expr{}, call.List...)
|
|
||||||
call = &callCopy
|
|
||||||
}
|
|
||||||
kvCopy := *kv
|
|
||||||
kvCopy.Y = value
|
|
||||||
call.List[i] = &kvCopy
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return call
|
|
||||||
}
|
|
||||||
|
|
||||||
// substituteExpr replaces local labels according to a substitution map.
|
|
||||||
// It only supports string and list expressions (which should be sufficient
|
|
||||||
// for generated rules). If it replaces a string, it returns a new expression;
|
|
||||||
// otherwise, it returns e.
|
|
||||||
func substituteExpr(e bf.Expr, substitutions map[string]string) bf.Expr {
|
|
||||||
switch e := e.(type) {
|
|
||||||
case *bf.StringExpr:
|
|
||||||
if rename, ok := substitutions[strings.TrimPrefix(e.Value, ":")]; ok {
|
|
||||||
return &bf.StringExpr{Value: ":" + rename}
|
|
||||||
}
|
|
||||||
case *bf.ListExpr:
|
|
||||||
var listCopy *bf.ListExpr
|
|
||||||
for i, elem := range e.List {
|
|
||||||
renamed := substituteExpr(elem, substitutions)
|
|
||||||
if renamed != elem {
|
|
||||||
if listCopy == nil {
|
|
||||||
listCopy = new(bf.ListExpr)
|
|
||||||
*listCopy = *e
|
|
||||||
listCopy.List = append([]bf.Expr{}, e.List...)
|
|
||||||
}
|
}
|
||||||
listCopy.List[i] = renamed
|
})
|
||||||
}
|
r.SetAttr(attr, expr)
|
||||||
}
|
|
||||||
if listCopy != nil {
|
|
||||||
return listCopy
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return e
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// shouldKeep returns whether an expression from the original file should be
|
// match searches for a rule that can be merged with x in rules.
|
||||||
// preserved. This is true if it has a prefix or end-of-line comment "keep".
|
|
||||||
// Note that bf.Rewrite recognizes "keep sorted" comments which are different,
|
|
||||||
// so we don't recognize comments that only start with "keep".
|
|
||||||
func shouldKeep(e bf.Expr) bool {
|
|
||||||
for _, c := range append(e.Comment().Before, e.Comment().Suffix...) {
|
|
||||||
text := strings.TrimSpace(strings.TrimPrefix(c.Token, "#"))
|
|
||||||
if text == keep {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// matchAttrs contains lists of attributes for each kind that are used in
|
|
||||||
// matching. For example, importpath attributes can be used to match go_library
|
|
||||||
// rules, even when the names are different.
|
|
||||||
var matchAttrs = map[string][]string{
|
|
||||||
"go_library": {"importpath"},
|
|
||||||
"go_proto_library": {"importpath"},
|
|
||||||
"go_repository": {"importpath"},
|
|
||||||
}
|
|
||||||
|
|
||||||
// matchAny is a set of kinds which may be matched regardless of attributes.
|
|
||||||
// For example, if there is only one go_binary in a package, any go_binary
|
|
||||||
// rule will match.
|
|
||||||
var matchAny = map[string]bool{"go_binary": true}
|
|
||||||
|
|
||||||
// match searches for a rule that can be merged with x in stmts.
|
|
||||||
//
|
//
|
||||||
// A rule is considered a match if its kind is equal to x's kind AND either its
|
// A rule is considered a match if its kind is equal to x's kind AND either its
|
||||||
// name is equal OR at least one of the attributes in matchAttrs is equal.
|
// name is equal OR at least one of the attributes in matchAttrs is equal.
|
||||||
//
|
//
|
||||||
// If there are no matches, nil, -1, and nil are returned.
|
// If there are no matches, nil and nil are returned.
|
||||||
//
|
//
|
||||||
// If a rule has the same name but a different kind, nil, -1, and an error
|
// If a rule has the same name but a different kind, nill and an error
|
||||||
// are returned.
|
// are returned.
|
||||||
//
|
//
|
||||||
// If there is exactly one match, the rule, its index in stmts, and nil
|
// If there is exactly one match, the rule and nil are returned.
|
||||||
// are returned.
|
|
||||||
//
|
//
|
||||||
// If there are multiple matches, match will attempt to disambiguate, based on
|
// If there are multiple matches, match will attempt to disambiguate, based on
|
||||||
// the quality of the match (name match is best, then attribute match in the
|
// the quality of the match (name match is best, then attribute match in the
|
||||||
// order that attributes are listed). If disambiguation is successful,
|
// order that attributes are listed). If disambiguation is successful,
|
||||||
// the rule, its index in stmts, and nil are returned. Otherwise, nil, -1,
|
// the rule and nil are returned. Otherwise, nil and an error are returned.
|
||||||
// and an error are returned.
|
func match(rules []*rule.Rule, x *rule.Rule, info rule.KindInfo) (*rule.Rule, error) {
|
||||||
func match(stmts []bf.Expr, x *bf.CallExpr) (*bf.CallExpr, int, error) {
|
xname := x.Name()
|
||||||
type matchInfo struct {
|
xkind := x.Kind()
|
||||||
rule bf.Rule
|
var nameMatches []*rule.Rule
|
||||||
index int
|
var kindMatches []*rule.Rule
|
||||||
}
|
for _, y := range rules {
|
||||||
|
if xname == y.Name() {
|
||||||
xr := bf.Rule{Call: x}
|
nameMatches = append(nameMatches, y)
|
||||||
xname := xr.Name()
|
|
||||||
xkind := xr.Kind()
|
|
||||||
var nameMatches []matchInfo
|
|
||||||
var kindMatches []matchInfo
|
|
||||||
for i, s := range stmts {
|
|
||||||
y, ok := s.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
yr := bf.Rule{Call: y}
|
if xkind == y.Kind() {
|
||||||
if xname == yr.Name() {
|
kindMatches = append(kindMatches, y)
|
||||||
nameMatches = append(nameMatches, matchInfo{yr, i})
|
|
||||||
}
|
|
||||||
if xkind == yr.Kind() {
|
|
||||||
kindMatches = append(kindMatches, matchInfo{yr, i})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(nameMatches) == 1 {
|
if len(nameMatches) == 1 {
|
||||||
if ykind := nameMatches[0].rule.Kind(); xkind != ykind {
|
y := nameMatches[0]
|
||||||
return nil, -1, fmt.Errorf("could not merge %s(%s): a rule of the same name has kind %s", xkind, xname, ykind)
|
if xkind != y.Kind() {
|
||||||
|
return nil, fmt.Errorf("could not merge %s(%s): a rule of the same name has kind %s", xkind, xname, y.Kind())
|
||||||
}
|
}
|
||||||
return nameMatches[0].rule.Call, nameMatches[0].index, nil
|
return y, nil
|
||||||
}
|
}
|
||||||
if len(nameMatches) > 1 {
|
if len(nameMatches) > 1 {
|
||||||
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same name", xkind, xname)
|
return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same name", xkind, xname)
|
||||||
}
|
}
|
||||||
|
|
||||||
attrs := matchAttrs[xr.Kind()]
|
for _, key := range info.MatchAttrs {
|
||||||
for _, key := range attrs {
|
var attrMatches []*rule.Rule
|
||||||
var attrMatches []matchInfo
|
xvalue := x.AttrString(key)
|
||||||
xvalue := xr.AttrString(key)
|
|
||||||
if xvalue == "" {
|
if xvalue == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
for _, m := range kindMatches {
|
for _, y := range kindMatches {
|
||||||
if xvalue == m.rule.AttrString(key) {
|
if xvalue == y.AttrString(key) {
|
||||||
attrMatches = append(attrMatches, m)
|
attrMatches = append(attrMatches, y)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(attrMatches) == 1 {
|
if len(attrMatches) == 1 {
|
||||||
return attrMatches[0].rule.Call, attrMatches[0].index, nil
|
return attrMatches[0], nil
|
||||||
} else if len(attrMatches) > 1 {
|
} else if len(attrMatches) > 1 {
|
||||||
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same attribute %s = %q", xkind, xname, key, xvalue)
|
return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same attribute %s = %q", xkind, xname, key, xvalue)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if matchAny[xkind] {
|
if info.MatchAny {
|
||||||
if len(kindMatches) == 1 {
|
if len(kindMatches) == 1 {
|
||||||
return kindMatches[0].rule.Call, kindMatches[0].index, nil
|
return kindMatches[0], nil
|
||||||
} else if len(kindMatches) > 1 {
|
} else if len(kindMatches) > 1 {
|
||||||
return nil, -1, fmt.Errorf("could not merge %s(%s): multiple rules have the same kind but different names", xkind, xname)
|
return nil, fmt.Errorf("could not merge %s(%s): multiple rules have the same kind but different names", xkind, xname)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, -1, nil
|
return nil, nil
|
||||||
}
|
|
||||||
|
|
||||||
func kind(c *bf.CallExpr) string {
|
|
||||||
return (&bf.Rule{Call: c}).Kind()
|
|
||||||
}
|
|
||||||
|
|
||||||
func name(c *bf.CallExpr) string {
|
|
||||||
return (&bf.Rule{Call: c}).Name()
|
|
||||||
}
|
|
||||||
|
|
||||||
// isRuleEmpty returns true if a rule cannot be built because it has no sources,
|
|
||||||
// dependencies, or embeds after merging. This is based on a per-kind whitelist
|
|
||||||
// of attributes. Other attributes, like "name" and "visibility" don't affect
|
|
||||||
// emptiness. Always returns false for expressions that aren't in the known
|
|
||||||
// set of rules.
|
|
||||||
func isRuleEmpty(e bf.Expr) bool {
|
|
||||||
c, ok := e.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
r := bf.Rule{Call: c}
|
|
||||||
kind := r.Kind()
|
|
||||||
if nonEmptyAttrs[kind] == nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, attr := range r.AttrKeys() {
|
|
||||||
if nonEmptyAttrs[kind][attr] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func isScalar(e bf.Expr) bool {
|
|
||||||
switch e.(type) {
|
|
||||||
case *bf.StringExpr, *bf.LiteralExpr:
|
|
||||||
return true
|
|
||||||
default:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func stringValue(e bf.Expr) string {
|
|
||||||
s, ok := e.(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
return s.Value
|
|
||||||
}
|
|
||||||
|
|
||||||
// deleteIndices copies a list, dropping elements at deletedIndices.
|
|
||||||
// deletedIndices must be sorted.
|
|
||||||
func deleteIndices(stmt []bf.Expr, deletedIndices []int) []bf.Expr {
|
|
||||||
if len(deletedIndices) == 0 {
|
|
||||||
return stmt
|
|
||||||
}
|
|
||||||
kept := make([]bf.Expr, 0, len(stmt)-len(deletedIndices))
|
|
||||||
di := 0
|
|
||||||
for i, s := range stmt {
|
|
||||||
if di < len(deletedIndices) && i == deletedIndices[di] {
|
|
||||||
di++
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
kept = append(kept, s)
|
|
||||||
}
|
|
||||||
return kept
|
|
||||||
}
|
}
|
||||||
|
|||||||
399
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/fileinfo.go
generated
vendored
399
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/fileinfo.go
generated
vendored
@@ -1,399 +0,0 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package packages
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bufio"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
// fileInfo holds information used to decide how to build a file. This
|
|
||||||
// information comes from the file's name, from package and import declarations
|
|
||||||
// (in .go files), and from +build and cgo comments.
|
|
||||||
type fileInfo struct {
|
|
||||||
path, rel, name, ext string
|
|
||||||
|
|
||||||
// packageName is the Go package name of a .go file, without the
|
|
||||||
// "_test" suffix if it was present. It is empty for non-Go files.
|
|
||||||
packageName string
|
|
||||||
|
|
||||||
// importPath is the canonical import path for this file's package.
|
|
||||||
// This may be read from a package comment (in Go) or a go_package
|
|
||||||
// option (in proto). This field is empty for files that don't specify
|
|
||||||
// an import path.
|
|
||||||
importPath string
|
|
||||||
|
|
||||||
// category is the type of file, based on extension.
|
|
||||||
category extCategory
|
|
||||||
|
|
||||||
// isTest is true if the file stem (the part before the extension)
|
|
||||||
// ends with "_test.go". This is never true for non-Go files.
|
|
||||||
isTest bool
|
|
||||||
|
|
||||||
// imports is a list of packages imported by a file. It does not include
|
|
||||||
// "C" or anything from the standard library.
|
|
||||||
imports []string
|
|
||||||
|
|
||||||
// isCgo is true for .go files that import "C".
|
|
||||||
isCgo bool
|
|
||||||
|
|
||||||
// goos and goarch contain the OS and architecture suffixes in the filename,
|
|
||||||
// if they were present.
|
|
||||||
goos, goarch string
|
|
||||||
|
|
||||||
// tags is a list of build tag lines. Each entry is the trimmed text of
|
|
||||||
// a line after a "+build" prefix.
|
|
||||||
tags []tagLine
|
|
||||||
|
|
||||||
// copts and clinkopts contain flags that are part of CFLAGS, CPPFLAGS,
|
|
||||||
// CXXFLAGS, and LDFLAGS directives in cgo comments.
|
|
||||||
copts, clinkopts []taggedOpts
|
|
||||||
|
|
||||||
// hasServices indicates whether a .proto file has service definitions.
|
|
||||||
hasServices bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// tagLine represents the space-separated disjunction of build tag groups
|
|
||||||
// in a line comment.
|
|
||||||
type tagLine []tagGroup
|
|
||||||
|
|
||||||
// check returns true if at least one of the tag groups is satisfied.
|
|
||||||
func (l tagLine) check(c *config.Config, os, arch string) bool {
|
|
||||||
if len(l) == 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, g := range l {
|
|
||||||
if g.check(c, os, arch) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// tagGroup represents a comma-separated conjuction of build tags.
|
|
||||||
type tagGroup []string
|
|
||||||
|
|
||||||
// check returns true if all of the tags are true. Tags that start with
|
|
||||||
// "!" are negated (but "!!") is not allowed. Go release tags (e.g., "go1.8")
|
|
||||||
// are ignored. If the group contains an os or arch tag, but the os or arch
|
|
||||||
// parameters are empty, check returns false even if the tag is negated.
|
|
||||||
func (g tagGroup) check(c *config.Config, os, arch string) bool {
|
|
||||||
for _, t := range g {
|
|
||||||
if strings.HasPrefix(t, "!!") { // bad syntax, reject always
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
not := strings.HasPrefix(t, "!")
|
|
||||||
if not {
|
|
||||||
t = t[1:]
|
|
||||||
}
|
|
||||||
if isIgnoredTag(t) {
|
|
||||||
// Release tags are treated as "unknown" and are considered true,
|
|
||||||
// whether or not they are negated.
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
var match bool
|
|
||||||
if _, ok := config.KnownOSSet[t]; ok {
|
|
||||||
if os == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
match = os == t
|
|
||||||
} else if _, ok := config.KnownArchSet[t]; ok {
|
|
||||||
if arch == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
match = arch == t
|
|
||||||
} else {
|
|
||||||
match = c.GenericTags[t]
|
|
||||||
}
|
|
||||||
if not {
|
|
||||||
match = !match
|
|
||||||
}
|
|
||||||
if !match {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// taggedOpts a list of compile or link options which should only be applied
|
|
||||||
// if the given set of build tags are satisfied. These options have already
|
|
||||||
// been tokenized using the same algorithm that "go build" uses, then joined
|
|
||||||
// with OptSeparator.
|
|
||||||
type taggedOpts struct {
|
|
||||||
tags tagLine
|
|
||||||
opts string
|
|
||||||
}
|
|
||||||
|
|
||||||
// OptSeparator is a special character inserted between options that appeared
|
|
||||||
// together in a #cgo directive. This allows options to be split, modified,
|
|
||||||
// and escaped by other packages.
|
|
||||||
//
|
|
||||||
// It's important to keep options grouped together in the same string. For
|
|
||||||
// example, if we have "-framework IOKit" together in a #cgo directive,
|
|
||||||
// "-framework" shouldn't be treated as a separate string for the purposes of
|
|
||||||
// sorting and de-duplicating.
|
|
||||||
const OptSeparator = "\x1D"
|
|
||||||
|
|
||||||
// extCategory indicates how a file should be treated, based on extension.
|
|
||||||
type extCategory int
|
|
||||||
|
|
||||||
const (
|
|
||||||
// ignoredExt is applied to files which are not part of a build.
|
|
||||||
ignoredExt extCategory = iota
|
|
||||||
|
|
||||||
// unsupportedExt is applied to files that we don't support but would be
|
|
||||||
// built with "go build".
|
|
||||||
unsupportedExt
|
|
||||||
|
|
||||||
// goExt is applied to .go files.
|
|
||||||
goExt
|
|
||||||
|
|
||||||
// cExt is applied to C and C++ files.
|
|
||||||
cExt
|
|
||||||
|
|
||||||
// hExt is applied to header files. If cgo code is present, these may be
|
|
||||||
// C or C++ headers. If not, they are treated as Go assembly headers.
|
|
||||||
hExt
|
|
||||||
|
|
||||||
// sExt is applied to Go assembly files, ending with .s.
|
|
||||||
sExt
|
|
||||||
|
|
||||||
// csExt is applied to other assembly files, ending with .S. These are built
|
|
||||||
// with the C compiler if cgo code is present.
|
|
||||||
csExt
|
|
||||||
|
|
||||||
// protoExt is applied to .proto files.
|
|
||||||
protoExt
|
|
||||||
)
|
|
||||||
|
|
||||||
// fileNameInfo returns information that can be inferred from the name of
|
|
||||||
// a file. It does not read data from the file.
|
|
||||||
func fileNameInfo(dir, rel, name string) fileInfo {
|
|
||||||
ext := path.Ext(name)
|
|
||||||
|
|
||||||
// Categorize the file based on extension. Based on go/build.Context.Import.
|
|
||||||
var category extCategory
|
|
||||||
switch ext {
|
|
||||||
case ".go":
|
|
||||||
category = goExt
|
|
||||||
case ".c", ".cc", ".cpp", ".cxx":
|
|
||||||
category = cExt
|
|
||||||
case ".h", ".hh", ".hpp", ".hxx":
|
|
||||||
category = hExt
|
|
||||||
case ".s":
|
|
||||||
category = sExt
|
|
||||||
case ".S":
|
|
||||||
category = csExt
|
|
||||||
case ".proto":
|
|
||||||
category = protoExt
|
|
||||||
case ".m", ".f", ".F", ".for", ".f90", ".swig", ".swigcxx", ".syso":
|
|
||||||
category = unsupportedExt
|
|
||||||
default:
|
|
||||||
category = ignoredExt
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine test, goos, and goarch. This is intended to match the logic
|
|
||||||
// in goodOSArchFile in go/build.
|
|
||||||
var isTest bool
|
|
||||||
var goos, goarch string
|
|
||||||
l := strings.Split(name[:len(name)-len(ext)], "_")
|
|
||||||
if len(l) >= 2 && l[len(l)-1] == "test" {
|
|
||||||
isTest = category == goExt
|
|
||||||
l = l[:len(l)-1]
|
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case len(l) >= 3 && config.KnownOSSet[l[len(l)-2]] && config.KnownArchSet[l[len(l)-1]]:
|
|
||||||
goos = l[len(l)-2]
|
|
||||||
goarch = l[len(l)-1]
|
|
||||||
case len(l) >= 2 && config.KnownOSSet[l[len(l)-1]]:
|
|
||||||
goos = l[len(l)-1]
|
|
||||||
case len(l) >= 2 && config.KnownArchSet[l[len(l)-1]]:
|
|
||||||
goarch = l[len(l)-1]
|
|
||||||
}
|
|
||||||
|
|
||||||
return fileInfo{
|
|
||||||
path: filepath.Join(dir, name),
|
|
||||||
rel: rel,
|
|
||||||
name: name,
|
|
||||||
ext: ext,
|
|
||||||
category: category,
|
|
||||||
isTest: isTest,
|
|
||||||
goos: goos,
|
|
||||||
goarch: goarch,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherFileInfo returns information about a non-.go file. It will parse
|
|
||||||
// part of the file to determine build tags. If the file can't be read, an
|
|
||||||
// error will be logged, and partial information will be returned.
|
|
||||||
func otherFileInfo(dir, rel, name string) fileInfo {
|
|
||||||
info := fileNameInfo(dir, rel, name)
|
|
||||||
if info.category == ignoredExt {
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
if info.category == unsupportedExt {
|
|
||||||
log.Printf("%s: warning: file extension not yet supported", info.path)
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
|
|
||||||
tags, err := readTags(info.path)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("%s: error reading file: %v", info.path, err)
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
info.tags = tags
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
|
|
||||||
// readTags reads and extracts build tags from the block of comments
|
|
||||||
// and blank lines at the start of a file which is separated from the
|
|
||||||
// rest of the file by a blank line. Each string in the returned slice
|
|
||||||
// is the trimmed text of a line after a "+build" prefix.
|
|
||||||
// Based on go/build.Context.shouldBuild.
|
|
||||||
func readTags(path string) ([]tagLine, error) {
|
|
||||||
f, err := os.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
defer f.Close()
|
|
||||||
scanner := bufio.NewScanner(f)
|
|
||||||
|
|
||||||
// Pass 1: Identify leading run of // comments and blank lines,
|
|
||||||
// which must be followed by a blank line.
|
|
||||||
var lines []string
|
|
||||||
end := 0
|
|
||||||
for scanner.Scan() {
|
|
||||||
line := strings.TrimSpace(scanner.Text())
|
|
||||||
if line == "" {
|
|
||||||
end = len(lines)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if strings.HasPrefix(line, "//") {
|
|
||||||
lines = append(lines, line[len("//"):])
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
break
|
|
||||||
}
|
|
||||||
if err := scanner.Err(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
lines = lines[:end]
|
|
||||||
|
|
||||||
// Pass 2: Process each line in the run.
|
|
||||||
var tagLines []tagLine
|
|
||||||
for _, line := range lines {
|
|
||||||
fields := strings.Fields(line)
|
|
||||||
if len(fields) > 0 && fields[0] == "+build" {
|
|
||||||
tagLines = append(tagLines, parseTagsInGroups(fields[1:]))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return tagLines, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func parseTagsInGroups(groups []string) tagLine {
|
|
||||||
var l tagLine
|
|
||||||
for _, g := range groups {
|
|
||||||
l = append(l, tagGroup(strings.Split(g, ",")))
|
|
||||||
}
|
|
||||||
return l
|
|
||||||
}
|
|
||||||
|
|
||||||
func isOSArchSpecific(info fileInfo, cgoTags tagLine) (osSpecific, archSpecific bool) {
|
|
||||||
if info.goos != "" {
|
|
||||||
osSpecific = true
|
|
||||||
}
|
|
||||||
if info.goarch != "" {
|
|
||||||
archSpecific = true
|
|
||||||
}
|
|
||||||
lines := info.tags
|
|
||||||
if len(cgoTags) > 0 {
|
|
||||||
lines = append(lines, cgoTags)
|
|
||||||
}
|
|
||||||
for _, line := range lines {
|
|
||||||
for _, group := range line {
|
|
||||||
for _, tag := range group {
|
|
||||||
if strings.HasPrefix(tag, "!") {
|
|
||||||
tag = tag[1:]
|
|
||||||
}
|
|
||||||
_, osOk := config.KnownOSSet[tag]
|
|
||||||
if osOk {
|
|
||||||
osSpecific = true
|
|
||||||
}
|
|
||||||
_, archOk := config.KnownArchSet[tag]
|
|
||||||
if archOk {
|
|
||||||
archSpecific = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return osSpecific, archSpecific
|
|
||||||
}
|
|
||||||
|
|
||||||
// checkConstraints determines whether build constraints are satisfied on
|
|
||||||
// a given platform.
|
|
||||||
//
|
|
||||||
// The first few arguments describe the platform. genericTags is the set
|
|
||||||
// of build tags that are true on all platforms. os and arch are the platform
|
|
||||||
// GOOS and GOARCH strings. If os or arch is empty, checkConstraints will
|
|
||||||
// return false in the presence of OS and architecture constraints, even
|
|
||||||
// if they are negated.
|
|
||||||
//
|
|
||||||
// The remaining arguments describe the file being tested. All of these may
|
|
||||||
// be empty or nil. osSuffix and archSuffix are filename suffixes. fileTags
|
|
||||||
// is a list tags from +build comments found near the top of the file. cgoTags
|
|
||||||
// is an extra set of tags in a #cgo directive.
|
|
||||||
func checkConstraints(c *config.Config, os, arch, osSuffix, archSuffix string, fileTags []tagLine, cgoTags tagLine) bool {
|
|
||||||
if osSuffix != "" && osSuffix != os || archSuffix != "" && archSuffix != arch {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, l := range fileTags {
|
|
||||||
if !l.check(c, os, arch) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(cgoTags) > 0 && !cgoTags.check(c, os, arch) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
// isIgnoredTag returns whether the tag is "cgo" or is a release tag.
|
|
||||||
// Release tags match the pattern "go[0-9]\.[0-9]+".
|
|
||||||
// Gazelle won't consider whether an ignored tag is satisfied when evaluating
|
|
||||||
// build constraints for a file.
|
|
||||||
func isIgnoredTag(tag string) bool {
|
|
||||||
if tag == "cgo" {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
if len(tag) < 5 || !strings.HasPrefix(tag, "go") {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if tag[2] < '0' || tag[2] > '9' || tag[3] != '.' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for _, c := range tag[4:] {
|
|
||||||
if c < '0' || c > '9' {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
274
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/fileinfo_go.go
generated
vendored
274
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/fileinfo_go.go
generated
vendored
@@ -1,274 +0,0 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package packages
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"errors"
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"log"
|
|
||||||
"path/filepath"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
"unicode/utf8"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
)
|
|
||||||
|
|
||||||
// goFileInfo returns information about a .go file. It will parse part of the
|
|
||||||
// file to determine the package name, imports, and build constraints.
|
|
||||||
// If the file can't be read, an error will be logged, and partial information
|
|
||||||
// will be returned.
|
|
||||||
// This function is intended to match go/build.Context.Import.
|
|
||||||
// TODD(#53): extract canonical import path
|
|
||||||
func goFileInfo(c *config.Config, dir, rel, name string) fileInfo {
|
|
||||||
info := fileNameInfo(dir, rel, name)
|
|
||||||
fset := token.NewFileSet()
|
|
||||||
pf, err := parser.ParseFile(fset, info.path, nil, parser.ImportsOnly|parser.ParseComments)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("%s: error reading go file: %v", info.path, err)
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
|
|
||||||
info.packageName = pf.Name.Name
|
|
||||||
if info.isTest && strings.HasSuffix(info.packageName, "_test") {
|
|
||||||
info.packageName = info.packageName[:len(info.packageName)-len("_test")]
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, decl := range pf.Decls {
|
|
||||||
d, ok := decl.(*ast.GenDecl)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, dspec := range d.Specs {
|
|
||||||
spec, ok := dspec.(*ast.ImportSpec)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
quoted := spec.Path.Value
|
|
||||||
path, err := strconv.Unquote(quoted)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("%s: error reading go file: %v", info.path, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if path == "C" {
|
|
||||||
if info.isTest {
|
|
||||||
log.Printf("%s: warning: use of cgo in test not supported", info.path)
|
|
||||||
}
|
|
||||||
info.isCgo = true
|
|
||||||
cg := spec.Doc
|
|
||||||
if cg == nil && len(d.Specs) == 1 {
|
|
||||||
cg = d.Doc
|
|
||||||
}
|
|
||||||
if cg != nil {
|
|
||||||
if err := saveCgo(&info, cg); err != nil {
|
|
||||||
log.Printf("%s: error reading go file: %v", info.path, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
info.imports = append(info.imports, path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tags, err := readTags(info.path)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("%s: error reading go file: %v", info.path, err)
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
info.tags = tags
|
|
||||||
|
|
||||||
return info
|
|
||||||
}
|
|
||||||
|
|
||||||
// saveCgo extracts CFLAGS, CPPFLAGS, CXXFLAGS, and LDFLAGS directives
|
|
||||||
// from a comment above a "C" import. This is intended to match logic in
|
|
||||||
// go/build.Context.saveCgo.
|
|
||||||
func saveCgo(info *fileInfo, cg *ast.CommentGroup) error {
|
|
||||||
text := cg.Text()
|
|
||||||
for _, line := range strings.Split(text, "\n") {
|
|
||||||
orig := line
|
|
||||||
|
|
||||||
// Line is
|
|
||||||
// #cgo [GOOS/GOARCH...] LDFLAGS: stuff
|
|
||||||
//
|
|
||||||
line = strings.TrimSpace(line)
|
|
||||||
if len(line) < 5 || line[:4] != "#cgo" || (line[4] != ' ' && line[4] != '\t') {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
// Split at colon.
|
|
||||||
line = strings.TrimSpace(line[4:])
|
|
||||||
i := strings.Index(line, ":")
|
|
||||||
if i < 0 {
|
|
||||||
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
|
||||||
}
|
|
||||||
line, optstr := strings.TrimSpace(line[:i]), strings.TrimSpace(line[i+1:])
|
|
||||||
|
|
||||||
// Parse tags and verb.
|
|
||||||
f := strings.Fields(line)
|
|
||||||
if len(f) < 1 {
|
|
||||||
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
|
||||||
}
|
|
||||||
verb := f[len(f)-1]
|
|
||||||
tags := parseTagsInGroups(f[:len(f)-1])
|
|
||||||
|
|
||||||
// Parse options.
|
|
||||||
opts, err := splitQuoted(optstr)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("%s: invalid #cgo line: %s", info.path, orig)
|
|
||||||
}
|
|
||||||
var ok bool
|
|
||||||
for i, opt := range opts {
|
|
||||||
if opt, ok = expandSrcDir(opt, info.rel); !ok {
|
|
||||||
return fmt.Errorf("%s: malformed #cgo argument: %s", info.path, orig)
|
|
||||||
}
|
|
||||||
opts[i] = opt
|
|
||||||
}
|
|
||||||
joinedStr := strings.Join(opts, OptSeparator)
|
|
||||||
|
|
||||||
// Add tags to appropriate list.
|
|
||||||
switch verb {
|
|
||||||
case "CFLAGS", "CPPFLAGS", "CXXFLAGS":
|
|
||||||
info.copts = append(info.copts, taggedOpts{tags, joinedStr})
|
|
||||||
case "LDFLAGS":
|
|
||||||
info.clinkopts = append(info.clinkopts, taggedOpts{tags, joinedStr})
|
|
||||||
case "pkg-config":
|
|
||||||
return fmt.Errorf("%s: pkg-config not supported: %s", info.path, orig)
|
|
||||||
default:
|
|
||||||
return fmt.Errorf("%s: invalid #cgo verb: %s", info.path, orig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// splitQuoted splits the string s around each instance of one or more consecutive
|
|
||||||
// white space characters while taking into account quotes and escaping, and
|
|
||||||
// returns an array of substrings of s or an empty list if s contains only white space.
|
|
||||||
// Single quotes and double quotes are recognized to prevent splitting within the
|
|
||||||
// quoted region, and are removed from the resulting substrings. If a quote in s
|
|
||||||
// isn't closed err will be set and r will have the unclosed argument as the
|
|
||||||
// last element. The backslash is used for escaping.
|
|
||||||
//
|
|
||||||
// For example, the following string:
|
|
||||||
//
|
|
||||||
// a b:"c d" 'e''f' "g\""
|
|
||||||
//
|
|
||||||
// Would be parsed as:
|
|
||||||
//
|
|
||||||
// []string{"a", "b:c d", "ef", `g"`}
|
|
||||||
//
|
|
||||||
// Copied from go/build.splitQuoted
|
|
||||||
func splitQuoted(s string) (r []string, err error) {
|
|
||||||
var args []string
|
|
||||||
arg := make([]rune, len(s))
|
|
||||||
escaped := false
|
|
||||||
quoted := false
|
|
||||||
quote := '\x00'
|
|
||||||
i := 0
|
|
||||||
for _, rune := range s {
|
|
||||||
switch {
|
|
||||||
case escaped:
|
|
||||||
escaped = false
|
|
||||||
case rune == '\\':
|
|
||||||
escaped = true
|
|
||||||
continue
|
|
||||||
case quote != '\x00':
|
|
||||||
if rune == quote {
|
|
||||||
quote = '\x00'
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
case rune == '"' || rune == '\'':
|
|
||||||
quoted = true
|
|
||||||
quote = rune
|
|
||||||
continue
|
|
||||||
case unicode.IsSpace(rune):
|
|
||||||
if quoted || i > 0 {
|
|
||||||
quoted = false
|
|
||||||
args = append(args, string(arg[:i]))
|
|
||||||
i = 0
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
arg[i] = rune
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
if quoted || i > 0 {
|
|
||||||
args = append(args, string(arg[:i]))
|
|
||||||
}
|
|
||||||
if quote != 0 {
|
|
||||||
err = errors.New("unclosed quote")
|
|
||||||
} else if escaped {
|
|
||||||
err = errors.New("unfinished escaping")
|
|
||||||
}
|
|
||||||
return args, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// expandSrcDir expands any occurrence of ${SRCDIR}, making sure
|
|
||||||
// the result is safe for the shell.
|
|
||||||
//
|
|
||||||
// Copied from go/build.expandSrcDir
|
|
||||||
func expandSrcDir(str string, srcdir string) (string, bool) {
|
|
||||||
// "\" delimited paths cause safeCgoName to fail
|
|
||||||
// so convert native paths with a different delimiter
|
|
||||||
// to "/" before starting (eg: on windows).
|
|
||||||
srcdir = filepath.ToSlash(srcdir)
|
|
||||||
|
|
||||||
// Spaces are tolerated in ${SRCDIR}, but not anywhere else.
|
|
||||||
chunks := strings.Split(str, "${SRCDIR}")
|
|
||||||
if len(chunks) < 2 {
|
|
||||||
return str, safeCgoName(str, false)
|
|
||||||
}
|
|
||||||
ok := true
|
|
||||||
for _, chunk := range chunks {
|
|
||||||
ok = ok && (chunk == "" || safeCgoName(chunk, false))
|
|
||||||
}
|
|
||||||
ok = ok && (srcdir == "" || safeCgoName(srcdir, true))
|
|
||||||
res := strings.Join(chunks, srcdir)
|
|
||||||
return res, ok && res != ""
|
|
||||||
}
|
|
||||||
|
|
||||||
// NOTE: $ is not safe for the shell, but it is allowed here because of linker options like -Wl,$ORIGIN.
|
|
||||||
// We never pass these arguments to a shell (just to programs we construct argv for), so this should be okay.
|
|
||||||
// See golang.org/issue/6038.
|
|
||||||
// The @ is for OS X. See golang.org/issue/13720.
|
|
||||||
// The % is for Jenkins. See golang.org/issue/16959.
|
|
||||||
const safeString = "+-.,/0123456789=ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz:$@%"
|
|
||||||
const safeSpaces = " "
|
|
||||||
|
|
||||||
var safeBytes = []byte(safeSpaces + safeString)
|
|
||||||
|
|
||||||
// Copied from go/build.safeCgoName
|
|
||||||
func safeCgoName(s string, spaces bool) bool {
|
|
||||||
if s == "" {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
safe := safeBytes
|
|
||||||
if !spaces {
|
|
||||||
safe = safe[len(safeSpaces):]
|
|
||||||
}
|
|
||||||
for i := 0; i < len(s); i++ {
|
|
||||||
if c := s[i]; c < utf8.RuneSelf && bytes.IndexByte(safe, c) < 0 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
651
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/package.go
generated
vendored
651
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/package.go
generated
vendored
@@ -1,651 +0,0 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package packages
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"path"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Package contains metadata about a Go package extracted from a directory.
|
|
||||||
// It fills a similar role to go/build.Package, but it separates files by
|
|
||||||
// target instead of by type, and it supports multiple platforms.
|
|
||||||
type Package struct {
|
|
||||||
// Name is the symbol found in package declarations of the .go files in
|
|
||||||
// the package. It does not include the "_test" suffix from external tests.
|
|
||||||
Name string
|
|
||||||
|
|
||||||
// Dir is an absolute path to the directory that contains the package.
|
|
||||||
Dir string
|
|
||||||
|
|
||||||
// Rel is the relative path to the package directory from the repository
|
|
||||||
// root. If the directory is the repository root itself, Rel is empty.
|
|
||||||
// Components in Rel are separated with slashes.
|
|
||||||
Rel string
|
|
||||||
|
|
||||||
// ImportPath is the string used to import this package in Go.
|
|
||||||
ImportPath string
|
|
||||||
|
|
||||||
Library, Binary, Test GoTarget
|
|
||||||
Proto ProtoTarget
|
|
||||||
|
|
||||||
HasTestdata bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// GoTarget contains metadata about a buildable Go target in a package.
|
|
||||||
type GoTarget struct {
|
|
||||||
Sources, Imports PlatformStrings
|
|
||||||
COpts, CLinkOpts PlatformStrings
|
|
||||||
Cgo bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProtoTarget contains metadata about proto files in a package.
|
|
||||||
type ProtoTarget struct {
|
|
||||||
Sources, Imports PlatformStrings
|
|
||||||
HasServices bool
|
|
||||||
|
|
||||||
// HasPbGo indicates whether unexcluded .pb.go files are present in the
|
|
||||||
// same package. They will not be in this target's sources.
|
|
||||||
HasPbGo bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// PlatformStrings contains a set of strings associated with a buildable
|
|
||||||
// Go target in a package. This is used to store source file names,
|
|
||||||
// import paths, and flags.
|
|
||||||
//
|
|
||||||
// Strings are stored in four sets: generic strings, OS-specific strings,
|
|
||||||
// arch-specific strings, and OS-and-arch-specific strings. A string may not
|
|
||||||
// be duplicated within a list or across sets; however, a string may appear
|
|
||||||
// in more than one list within a set (e.g., in "linux" and "windows" within
|
|
||||||
// the OS set). Strings within each list should be sorted, though this may
|
|
||||||
// not be relied upon.
|
|
||||||
type PlatformStrings struct {
|
|
||||||
// Generic is a list of strings not specific to any platform.
|
|
||||||
Generic []string
|
|
||||||
|
|
||||||
// OS is a map from OS name (anything in config.KnownOSs) to
|
|
||||||
// OS-specific strings.
|
|
||||||
OS map[string][]string
|
|
||||||
|
|
||||||
// Arch is a map from architecture name (anything in config.KnownArchs) to
|
|
||||||
// architecture-specific strings.
|
|
||||||
Arch map[string][]string
|
|
||||||
|
|
||||||
// Platform is a map from platforms to OS and architecture-specific strings.
|
|
||||||
Platform map[config.Platform][]string
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsCommand returns true if the package name is "main".
|
|
||||||
func (p *Package) IsCommand() bool {
|
|
||||||
return p.Name == "main"
|
|
||||||
}
|
|
||||||
|
|
||||||
// EmptyPackage returns an empty package. The package name and import path
|
|
||||||
// are inferred from the directory name and configuration. This is useful
|
|
||||||
// for deleting rules in directories which no longer have source files.
|
|
||||||
func EmptyPackage(c *config.Config, dir, rel string) *Package {
|
|
||||||
packageName := pathtools.RelBaseName(rel, c.GoPrefix, c.RepoRoot)
|
|
||||||
pb := packageBuilder{
|
|
||||||
name: packageName,
|
|
||||||
dir: dir,
|
|
||||||
rel: rel,
|
|
||||||
}
|
|
||||||
pb.inferImportPath(c)
|
|
||||||
return pb.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *GoTarget) HasGo() bool {
|
|
||||||
return t.Sources.HasGo()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (t *ProtoTarget) HasProto() bool {
|
|
||||||
return !t.Sources.IsEmpty()
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ps *PlatformStrings) HasGo() bool {
|
|
||||||
return ps.firstGoFile() != ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ps *PlatformStrings) IsEmpty() bool {
|
|
||||||
return len(ps.Generic) == 0 && len(ps.OS) == 0 && len(ps.Arch) == 0 && len(ps.Platform) == 0
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ps *PlatformStrings) Flat() []string {
|
|
||||||
unique := make(map[string]struct{})
|
|
||||||
for _, s := range ps.Generic {
|
|
||||||
unique[s] = struct{}{}
|
|
||||||
}
|
|
||||||
for _, ss := range ps.OS {
|
|
||||||
for _, s := range ss {
|
|
||||||
unique[s] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, ss := range ps.Arch {
|
|
||||||
for _, s := range ss {
|
|
||||||
unique[s] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, ss := range ps.Platform {
|
|
||||||
for _, s := range ss {
|
|
||||||
unique[s] = struct{}{}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
flat := make([]string, 0, len(unique))
|
|
||||||
for s := range unique {
|
|
||||||
flat = append(flat, s)
|
|
||||||
}
|
|
||||||
sort.Strings(flat)
|
|
||||||
return flat
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ps *PlatformStrings) firstGoFile() string {
|
|
||||||
for _, f := range ps.Generic {
|
|
||||||
if strings.HasSuffix(f, ".go") {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, fs := range ps.OS {
|
|
||||||
for _, f := range fs {
|
|
||||||
if strings.HasSuffix(f, ".go") {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, fs := range ps.Arch {
|
|
||||||
for _, f := range fs {
|
|
||||||
if strings.HasSuffix(f, ".go") {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, fs := range ps.Platform {
|
|
||||||
for _, f := range fs {
|
|
||||||
if strings.HasSuffix(f, ".go") {
|
|
||||||
return f
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
type packageBuilder struct {
|
|
||||||
name, dir, rel string
|
|
||||||
library, binary, test goTargetBuilder
|
|
||||||
proto protoTargetBuilder
|
|
||||||
hasTestdata bool
|
|
||||||
importPath, importPathFile string
|
|
||||||
}
|
|
||||||
|
|
||||||
type goTargetBuilder struct {
|
|
||||||
sources, imports, copts, clinkopts platformStringsBuilder
|
|
||||||
cgo bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type protoTargetBuilder struct {
|
|
||||||
sources, imports platformStringsBuilder
|
|
||||||
hasServices, hasPbGo bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type platformStringsBuilder struct {
|
|
||||||
strs map[string]platformStringInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
type platformStringInfo struct {
|
|
||||||
set platformStringSet
|
|
||||||
oss map[string]bool
|
|
||||||
archs map[string]bool
|
|
||||||
platforms map[config.Platform]bool
|
|
||||||
}
|
|
||||||
|
|
||||||
type platformStringSet int
|
|
||||||
|
|
||||||
const (
|
|
||||||
genericSet platformStringSet = iota
|
|
||||||
osSet
|
|
||||||
archSet
|
|
||||||
platformSet
|
|
||||||
)
|
|
||||||
|
|
||||||
// addFile adds the file described by "info" to a target in the package "p" if
|
|
||||||
// the file is buildable.
|
|
||||||
//
|
|
||||||
// "cgo" tells whether any ".go" file in the package contains cgo code. This
|
|
||||||
// affects whether C files are added to targets.
|
|
||||||
//
|
|
||||||
// An error is returned if a file is buildable but invalid (for example, a
|
|
||||||
// test .go file containing cgo code). Files that are not buildable will not
|
|
||||||
// be added to any target (for example, .txt files).
|
|
||||||
func (pb *packageBuilder) addFile(c *config.Config, info fileInfo, cgo bool) error {
|
|
||||||
switch {
|
|
||||||
case info.category == ignoredExt || info.category == unsupportedExt ||
|
|
||||||
!cgo && (info.category == cExt || info.category == csExt) ||
|
|
||||||
c.ProtoMode == config.DisableProtoMode && info.category == protoExt:
|
|
||||||
return nil
|
|
||||||
case info.isTest:
|
|
||||||
if info.isCgo {
|
|
||||||
return fmt.Errorf("%s: use of cgo in test not supported", info.path)
|
|
||||||
}
|
|
||||||
pb.test.addFile(c, info)
|
|
||||||
case info.category == protoExt:
|
|
||||||
pb.proto.addFile(c, info)
|
|
||||||
default:
|
|
||||||
pb.library.addFile(c, info)
|
|
||||||
}
|
|
||||||
if strings.HasSuffix(info.name, ".pb.go") {
|
|
||||||
pb.proto.hasPbGo = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if info.importPath != "" {
|
|
||||||
if pb.importPath == "" {
|
|
||||||
pb.importPath = info.importPath
|
|
||||||
pb.importPathFile = info.path
|
|
||||||
} else if pb.importPath != info.importPath {
|
|
||||||
return fmt.Errorf("found import comments %q (%s) and %q (%s)", pb.importPath, pb.importPathFile, info.importPath, info.path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// isBuildable returns true if anything in the package is buildable.
|
|
||||||
// This is true if the package has Go code that satisfies build constraints
|
|
||||||
// on any platform or has proto files not in legacy mode.
|
|
||||||
func (pb *packageBuilder) isBuildable(c *config.Config) bool {
|
|
||||||
return pb.firstGoFile() != "" ||
|
|
||||||
len(pb.proto.sources.strs) > 0 && c.ProtoMode == config.DefaultProtoMode
|
|
||||||
}
|
|
||||||
|
|
||||||
// firstGoFile returns the name of a .go file if the package contains at least
|
|
||||||
// one .go file, or "" otherwise.
|
|
||||||
func (pb *packageBuilder) firstGoFile() string {
|
|
||||||
goSrcs := []platformStringsBuilder{
|
|
||||||
pb.library.sources,
|
|
||||||
pb.binary.sources,
|
|
||||||
pb.test.sources,
|
|
||||||
}
|
|
||||||
for _, sb := range goSrcs {
|
|
||||||
if sb.strs != nil {
|
|
||||||
for s, _ := range sb.strs {
|
|
||||||
if strings.HasSuffix(s, ".go") {
|
|
||||||
return s
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pb *packageBuilder) inferImportPath(c *config.Config) error {
|
|
||||||
if pb.importPath != "" {
|
|
||||||
log.Panic("importPath already set")
|
|
||||||
}
|
|
||||||
if pb.rel == c.GoPrefixRel {
|
|
||||||
if c.GoPrefix == "" {
|
|
||||||
return fmt.Errorf("in directory %q, prefix is empty, so importpath would be empty for rules. Set a prefix with a '# gazelle:prefix' comment or with -go_prefix on the command line.", pb.dir)
|
|
||||||
}
|
|
||||||
pb.importPath = c.GoPrefix
|
|
||||||
} else {
|
|
||||||
fromPrefixRel := strings.TrimPrefix(pb.rel, c.GoPrefixRel+"/")
|
|
||||||
pb.importPath = path.Join(c.GoPrefix, fromPrefixRel)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (pb *packageBuilder) build() *Package {
|
|
||||||
return &Package{
|
|
||||||
Name: pb.name,
|
|
||||||
Dir: pb.dir,
|
|
||||||
Rel: pb.rel,
|
|
||||||
ImportPath: pb.importPath,
|
|
||||||
Library: pb.library.build(),
|
|
||||||
Binary: pb.binary.build(),
|
|
||||||
Test: pb.test.build(),
|
|
||||||
Proto: pb.proto.build(),
|
|
||||||
HasTestdata: pb.hasTestdata,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tb *goTargetBuilder) addFile(c *config.Config, info fileInfo) {
|
|
||||||
tb.cgo = tb.cgo || info.isCgo
|
|
||||||
add := getPlatformStringsAddFunction(c, info, nil)
|
|
||||||
add(&tb.sources, info.name)
|
|
||||||
add(&tb.imports, info.imports...)
|
|
||||||
for _, copts := range info.copts {
|
|
||||||
optAdd := add
|
|
||||||
if len(copts.tags) > 0 {
|
|
||||||
optAdd = getPlatformStringsAddFunction(c, info, copts.tags)
|
|
||||||
}
|
|
||||||
optAdd(&tb.copts, copts.opts)
|
|
||||||
}
|
|
||||||
for _, clinkopts := range info.clinkopts {
|
|
||||||
optAdd := add
|
|
||||||
if len(clinkopts.tags) > 0 {
|
|
||||||
optAdd = getPlatformStringsAddFunction(c, info, clinkopts.tags)
|
|
||||||
}
|
|
||||||
optAdd(&tb.clinkopts, clinkopts.opts)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tb *goTargetBuilder) build() GoTarget {
|
|
||||||
return GoTarget{
|
|
||||||
Sources: tb.sources.build(),
|
|
||||||
Imports: tb.imports.build(),
|
|
||||||
COpts: tb.copts.build(),
|
|
||||||
CLinkOpts: tb.clinkopts.build(),
|
|
||||||
Cgo: tb.cgo,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tb *protoTargetBuilder) addFile(c *config.Config, info fileInfo) {
|
|
||||||
add := getPlatformStringsAddFunction(c, info, nil)
|
|
||||||
add(&tb.sources, info.name)
|
|
||||||
add(&tb.imports, info.imports...)
|
|
||||||
tb.hasServices = tb.hasServices || info.hasServices
|
|
||||||
}
|
|
||||||
|
|
||||||
func (tb *protoTargetBuilder) build() ProtoTarget {
|
|
||||||
return ProtoTarget{
|
|
||||||
Sources: tb.sources.build(),
|
|
||||||
Imports: tb.imports.build(),
|
|
||||||
HasServices: tb.hasServices,
|
|
||||||
HasPbGo: tb.hasPbGo,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getPlatformStringsAddFunction returns a function used to add strings to
|
|
||||||
// a *platformStringsBuilder under the same set of constraints. This is a
|
|
||||||
// performance optimization to avoid evaluating constraints repeatedly.
|
|
||||||
func getPlatformStringsAddFunction(c *config.Config, info fileInfo, cgoTags tagLine) func(sb *platformStringsBuilder, ss ...string) {
|
|
||||||
isOSSpecific, isArchSpecific := isOSArchSpecific(info, cgoTags)
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case !isOSSpecific && !isArchSpecific:
|
|
||||||
if checkConstraints(c, "", "", info.goos, info.goarch, info.tags, cgoTags) {
|
|
||||||
return func(sb *platformStringsBuilder, ss ...string) {
|
|
||||||
for _, s := range ss {
|
|
||||||
sb.addGenericString(s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case isOSSpecific && !isArchSpecific:
|
|
||||||
var osMatch []string
|
|
||||||
for _, os := range config.KnownOSs {
|
|
||||||
if checkConstraints(c, os, "", info.goos, info.goarch, info.tags, cgoTags) {
|
|
||||||
osMatch = append(osMatch, os)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(osMatch) > 0 {
|
|
||||||
return func(sb *platformStringsBuilder, ss ...string) {
|
|
||||||
for _, s := range ss {
|
|
||||||
sb.addOSString(s, osMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case !isOSSpecific && isArchSpecific:
|
|
||||||
var archMatch []string
|
|
||||||
for _, arch := range config.KnownArchs {
|
|
||||||
if checkConstraints(c, "", arch, info.goos, info.goarch, info.tags, cgoTags) {
|
|
||||||
archMatch = append(archMatch, arch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(archMatch) > 0 {
|
|
||||||
return func(sb *platformStringsBuilder, ss ...string) {
|
|
||||||
for _, s := range ss {
|
|
||||||
sb.addArchString(s, archMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
var platformMatch []config.Platform
|
|
||||||
for _, platform := range config.KnownPlatforms {
|
|
||||||
if checkConstraints(c, platform.OS, platform.Arch, info.goos, info.goarch, info.tags, cgoTags) {
|
|
||||||
platformMatch = append(platformMatch, platform)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(platformMatch) > 0 {
|
|
||||||
return func(sb *platformStringsBuilder, ss ...string) {
|
|
||||||
for _, s := range ss {
|
|
||||||
sb.addPlatformString(s, platformMatch)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return func(_ *platformStringsBuilder, _ ...string) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb *platformStringsBuilder) addGenericString(s string) {
|
|
||||||
if sb.strs == nil {
|
|
||||||
sb.strs = make(map[string]platformStringInfo)
|
|
||||||
}
|
|
||||||
sb.strs[s] = platformStringInfo{set: genericSet}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb *platformStringsBuilder) addOSString(s string, oss []string) {
|
|
||||||
if sb.strs == nil {
|
|
||||||
sb.strs = make(map[string]platformStringInfo)
|
|
||||||
}
|
|
||||||
si, ok := sb.strs[s]
|
|
||||||
if !ok {
|
|
||||||
si.set = osSet
|
|
||||||
si.oss = make(map[string]bool)
|
|
||||||
}
|
|
||||||
switch si.set {
|
|
||||||
case genericSet:
|
|
||||||
return
|
|
||||||
case osSet:
|
|
||||||
for _, os := range oss {
|
|
||||||
si.oss[os] = true
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
si.convertToPlatforms()
|
|
||||||
for _, os := range oss {
|
|
||||||
for _, arch := range config.KnownOSArchs[os] {
|
|
||||||
si.platforms[config.Platform{OS: os, Arch: arch}] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sb.strs[s] = si
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb *platformStringsBuilder) addArchString(s string, archs []string) {
|
|
||||||
if sb.strs == nil {
|
|
||||||
sb.strs = make(map[string]platformStringInfo)
|
|
||||||
}
|
|
||||||
si, ok := sb.strs[s]
|
|
||||||
if !ok {
|
|
||||||
si.set = archSet
|
|
||||||
si.archs = make(map[string]bool)
|
|
||||||
}
|
|
||||||
switch si.set {
|
|
||||||
case genericSet:
|
|
||||||
return
|
|
||||||
case archSet:
|
|
||||||
for _, arch := range archs {
|
|
||||||
si.archs[arch] = true
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
si.convertToPlatforms()
|
|
||||||
for _, arch := range archs {
|
|
||||||
for _, os := range config.KnownArchOSs[arch] {
|
|
||||||
si.platforms[config.Platform{OS: os, Arch: arch}] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sb.strs[s] = si
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb *platformStringsBuilder) addPlatformString(s string, platforms []config.Platform) {
|
|
||||||
if sb.strs == nil {
|
|
||||||
sb.strs = make(map[string]platformStringInfo)
|
|
||||||
}
|
|
||||||
si, ok := sb.strs[s]
|
|
||||||
if !ok {
|
|
||||||
si.set = platformSet
|
|
||||||
si.platforms = make(map[config.Platform]bool)
|
|
||||||
}
|
|
||||||
switch si.set {
|
|
||||||
case genericSet:
|
|
||||||
return
|
|
||||||
default:
|
|
||||||
si.convertToPlatforms()
|
|
||||||
for _, p := range platforms {
|
|
||||||
si.platforms[p] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sb.strs[s] = si
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sb *platformStringsBuilder) build() PlatformStrings {
|
|
||||||
var ps PlatformStrings
|
|
||||||
for s, si := range sb.strs {
|
|
||||||
switch si.set {
|
|
||||||
case genericSet:
|
|
||||||
ps.Generic = append(ps.Generic, s)
|
|
||||||
case osSet:
|
|
||||||
if ps.OS == nil {
|
|
||||||
ps.OS = make(map[string][]string)
|
|
||||||
}
|
|
||||||
for os, _ := range si.oss {
|
|
||||||
ps.OS[os] = append(ps.OS[os], s)
|
|
||||||
}
|
|
||||||
case archSet:
|
|
||||||
if ps.Arch == nil {
|
|
||||||
ps.Arch = make(map[string][]string)
|
|
||||||
}
|
|
||||||
for arch, _ := range si.archs {
|
|
||||||
ps.Arch[arch] = append(ps.Arch[arch], s)
|
|
||||||
}
|
|
||||||
case platformSet:
|
|
||||||
if ps.Platform == nil {
|
|
||||||
ps.Platform = make(map[config.Platform][]string)
|
|
||||||
}
|
|
||||||
for p, _ := range si.platforms {
|
|
||||||
ps.Platform[p] = append(ps.Platform[p], s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sort.Strings(ps.Generic)
|
|
||||||
if ps.OS != nil {
|
|
||||||
for _, ss := range ps.OS {
|
|
||||||
sort.Strings(ss)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ps.Arch != nil {
|
|
||||||
for _, ss := range ps.Arch {
|
|
||||||
sort.Strings(ss)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if ps.Platform != nil {
|
|
||||||
for _, ss := range ps.Platform {
|
|
||||||
sort.Strings(ss)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ps
|
|
||||||
}
|
|
||||||
|
|
||||||
func (si *platformStringInfo) convertToPlatforms() {
|
|
||||||
switch si.set {
|
|
||||||
case genericSet:
|
|
||||||
log.Panic("cannot convert generic string to platforms")
|
|
||||||
case platformSet:
|
|
||||||
return
|
|
||||||
case osSet:
|
|
||||||
si.set = platformSet
|
|
||||||
si.platforms = make(map[config.Platform]bool)
|
|
||||||
for os, _ := range si.oss {
|
|
||||||
for _, arch := range config.KnownOSArchs[os] {
|
|
||||||
si.platforms[config.Platform{OS: os, Arch: arch}] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
si.oss = nil
|
|
||||||
case archSet:
|
|
||||||
si.set = platformSet
|
|
||||||
si.platforms = make(map[config.Platform]bool)
|
|
||||||
for arch, _ := range si.archs {
|
|
||||||
for _, os := range config.KnownArchOSs[arch] {
|
|
||||||
si.platforms[config.Platform{OS: os, Arch: arch}] = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
si.archs = nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// MapSlice applies a function that processes slices of strings to the strings
|
|
||||||
// in "ps" and returns a new PlatformStrings with the results.
|
|
||||||
func (ps *PlatformStrings) MapSlice(f func([]string) ([]string, error)) (PlatformStrings, []error) {
|
|
||||||
var errors []error
|
|
||||||
|
|
||||||
mapSlice := func(ss []string) []string {
|
|
||||||
rs, err := f(ss)
|
|
||||||
if err != nil {
|
|
||||||
errors = append(errors, err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return rs
|
|
||||||
}
|
|
||||||
|
|
||||||
mapStringMap := func(m map[string][]string) map[string][]string {
|
|
||||||
if m == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
rm := make(map[string][]string)
|
|
||||||
for k, ss := range m {
|
|
||||||
ss = mapSlice(ss)
|
|
||||||
if len(ss) > 0 {
|
|
||||||
rm[k] = ss
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(rm) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return rm
|
|
||||||
}
|
|
||||||
|
|
||||||
mapPlatformMap := func(m map[config.Platform][]string) map[config.Platform][]string {
|
|
||||||
if m == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
rm := make(map[config.Platform][]string)
|
|
||||||
for k, ss := range m {
|
|
||||||
ss = mapSlice(ss)
|
|
||||||
if len(ss) > 0 {
|
|
||||||
rm[k] = ss
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(rm) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
return rm
|
|
||||||
}
|
|
||||||
|
|
||||||
result := PlatformStrings{
|
|
||||||
Generic: mapSlice(ps.Generic),
|
|
||||||
OS: mapStringMap(ps.OS),
|
|
||||||
Arch: mapStringMap(ps.Arch),
|
|
||||||
Platform: mapPlatformMap(ps.Platform),
|
|
||||||
}
|
|
||||||
return result, errors
|
|
||||||
}
|
|
||||||
462
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/walk.go
generated
vendored
462
vendor/github.com/bazelbuild/bazel-gazelle/internal/packages/walk.go
generated
vendored
@@ -1,462 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package packages
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/build"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
|
||||||
|
|
||||||
// A WalkFunc is a callback called by Walk in each visited directory.
|
|
||||||
//
|
|
||||||
// dir is the absolute file system path to the directory being visited.
|
|
||||||
//
|
|
||||||
// rel is the relative slash-separated path to the directory from the
|
|
||||||
// repository root. Will be "" for the repository root directory itself.
|
|
||||||
//
|
|
||||||
// c is the configuration for the current directory. This may have been
|
|
||||||
// modified by directives in the directory's build file.
|
|
||||||
//
|
|
||||||
// pkg contains information about how to build source code in the directory.
|
|
||||||
// Will be nil for directories that don't contain buildable code, directories
|
|
||||||
// that Gazelle was not asked update, and directories where Walk
|
|
||||||
// encountered errors.
|
|
||||||
//
|
|
||||||
// oldFile is the existing build file in the directory. Will be nil if there
|
|
||||||
// was no file.
|
|
||||||
//
|
|
||||||
// isUpdateDir is true for directories that Gazelle was asked to update.
|
|
||||||
type WalkFunc func(dir, rel string, c *config.Config, pkg *Package, oldFile *bf.File, isUpdateDir bool)
|
|
||||||
|
|
||||||
// Walk traverses a directory tree. In each directory, Walk parses existing
|
|
||||||
// build files. In directories that Gazelle was asked to update (c.Dirs), Walk
|
|
||||||
// also parses source files and infers build information.
|
|
||||||
//
|
|
||||||
// c is the base configuration for the repository. c may be copied and modified
|
|
||||||
// by directives found in build files.
|
|
||||||
//
|
|
||||||
// root is an absolute file path to the directory to traverse.
|
|
||||||
//
|
|
||||||
// f is a function that will be called for each visited directory.
|
|
||||||
func Walk(c *config.Config, root string, f WalkFunc) {
|
|
||||||
// Determine relative paths for the directories to be updated.
|
|
||||||
var updateRels []string
|
|
||||||
for _, dir := range c.Dirs {
|
|
||||||
rel, err := filepath.Rel(c.RepoRoot, dir)
|
|
||||||
if err != nil {
|
|
||||||
// This should have been verified when c was built.
|
|
||||||
log.Panicf("%s: not a subdirectory of repository root %q", dir, c.RepoRoot)
|
|
||||||
}
|
|
||||||
rel = filepath.ToSlash(rel)
|
|
||||||
if rel == "." || rel == "/" {
|
|
||||||
rel = ""
|
|
||||||
}
|
|
||||||
updateRels = append(updateRels, rel)
|
|
||||||
}
|
|
||||||
rootRel, err := filepath.Rel(c.RepoRoot, root)
|
|
||||||
if err != nil {
|
|
||||||
log.Panicf("%s: not a subdirectory of repository root %q", root, c.RepoRoot)
|
|
||||||
}
|
|
||||||
if rootRel == "." || rootRel == "/" {
|
|
||||||
rootRel = ""
|
|
||||||
}
|
|
||||||
|
|
||||||
symlinks := symlinkResolver{root: root, visited: []string{root}}
|
|
||||||
|
|
||||||
// visit walks the directory tree in post-order. It returns whether the
|
|
||||||
// given directory or any subdirectory contained a build file or buildable
|
|
||||||
// source code. This affects whether "testdata" directories are considered
|
|
||||||
// data dependencies.
|
|
||||||
var visit func(*config.Config, string, string, bool, []string) bool
|
|
||||||
visit = func(c *config.Config, dir, rel string, isUpdateDir bool, excluded []string) bool {
|
|
||||||
// Check if this directory should be updated.
|
|
||||||
if !isUpdateDir {
|
|
||||||
for _, updateRel := range updateRels {
|
|
||||||
if pathtools.HasPrefix(rel, updateRel) {
|
|
||||||
isUpdateDir = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Look for an existing BUILD file.
|
|
||||||
var oldFile *bf.File
|
|
||||||
haveError := false
|
|
||||||
for _, base := range c.ValidBuildFileNames {
|
|
||||||
oldPath := filepath.Join(dir, base)
|
|
||||||
st, err := os.Stat(oldPath)
|
|
||||||
if os.IsNotExist(err) || err == nil && st.IsDir() {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
oldData, err := ioutil.ReadFile(oldPath)
|
|
||||||
if err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
haveError = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if oldFile != nil {
|
|
||||||
log.Printf("in directory %s, multiple Bazel files are present: %s, %s",
|
|
||||||
dir, filepath.Base(oldFile.Path), base)
|
|
||||||
haveError = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
oldFile, err = bf.Parse(oldPath, oldData)
|
|
||||||
if err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
haveError = true
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process directives in the build file. If this is a vendor directory,
|
|
||||||
// set an empty prefix.
|
|
||||||
if path.Base(rel) == "vendor" {
|
|
||||||
cCopy := *c
|
|
||||||
cCopy.GoPrefix = ""
|
|
||||||
cCopy.GoPrefixRel = rel
|
|
||||||
cCopy.GoImportMapPrefix = path.Join(c.RepoName, rel)
|
|
||||||
cCopy.GoImportMapPrefixRel = rel
|
|
||||||
c = &cCopy
|
|
||||||
}
|
|
||||||
var directives []config.Directive
|
|
||||||
if oldFile != nil {
|
|
||||||
directives = config.ParseDirectives(oldFile)
|
|
||||||
c = config.ApplyDirectives(c, directives, rel)
|
|
||||||
}
|
|
||||||
c = config.InferProtoMode(c, rel, oldFile, directives)
|
|
||||||
|
|
||||||
var ignore bool
|
|
||||||
for _, d := range directives {
|
|
||||||
switch d.Key {
|
|
||||||
case "exclude":
|
|
||||||
excluded = append(excluded, d.Value)
|
|
||||||
case "ignore":
|
|
||||||
ignore = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// List files and subdirectories.
|
|
||||||
files, err := ioutil.ReadDir(dir)
|
|
||||||
if err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if c.ProtoMode == config.DefaultProtoMode {
|
|
||||||
excluded = append(excluded, findPbGoFiles(files, excluded)...)
|
|
||||||
}
|
|
||||||
|
|
||||||
var pkgFiles, otherFiles, subdirs []string
|
|
||||||
for _, f := range files {
|
|
||||||
base := f.Name()
|
|
||||||
switch {
|
|
||||||
case base == "" || base[0] == '.' || base[0] == '_' || isExcluded(excluded, base):
|
|
||||||
continue
|
|
||||||
|
|
||||||
case f.IsDir():
|
|
||||||
subdirs = append(subdirs, base)
|
|
||||||
|
|
||||||
case strings.HasSuffix(base, ".go") ||
|
|
||||||
(c.ProtoMode != config.DisableProtoMode && strings.HasSuffix(base, ".proto")):
|
|
||||||
pkgFiles = append(pkgFiles, base)
|
|
||||||
|
|
||||||
case f.Mode()&os.ModeSymlink != 0 && symlinks.follow(dir, base):
|
|
||||||
subdirs = append(subdirs, base)
|
|
||||||
|
|
||||||
default:
|
|
||||||
otherFiles = append(otherFiles, base)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Recurse into subdirectories.
|
|
||||||
hasTestdata := false
|
|
||||||
subdirHasPackage := false
|
|
||||||
for _, sub := range subdirs {
|
|
||||||
subdirExcluded := excludedForSubdir(excluded, sub)
|
|
||||||
hasPackage := visit(c, filepath.Join(dir, sub), path.Join(rel, sub), isUpdateDir, subdirExcluded)
|
|
||||||
if sub == "testdata" && !hasPackage {
|
|
||||||
hasTestdata = true
|
|
||||||
}
|
|
||||||
subdirHasPackage = subdirHasPackage || hasPackage
|
|
||||||
}
|
|
||||||
|
|
||||||
hasPackage := subdirHasPackage || oldFile != nil
|
|
||||||
if haveError || !isUpdateDir || ignore {
|
|
||||||
f(dir, rel, c, nil, oldFile, false)
|
|
||||||
return hasPackage
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build a package from files in this directory.
|
|
||||||
var genFiles []string
|
|
||||||
if oldFile != nil {
|
|
||||||
genFiles = findGenFiles(oldFile, excluded)
|
|
||||||
}
|
|
||||||
pkg := buildPackage(c, dir, rel, pkgFiles, otherFiles, genFiles, hasTestdata)
|
|
||||||
f(dir, rel, c, pkg, oldFile, true)
|
|
||||||
return hasPackage || pkg != nil
|
|
||||||
}
|
|
||||||
|
|
||||||
visit(c, root, rootRel, false, nil)
|
|
||||||
}
|
|
||||||
|
|
||||||
// buildPackage reads source files in a given directory and returns a Package
|
|
||||||
// containing information about those files and how to build them.
|
|
||||||
//
|
|
||||||
// If no buildable .go files are found in the directory, nil will be returned.
|
|
||||||
// If the directory contains multiple buildable packages, the package whose
|
|
||||||
// name matches the directory base name will be returned. If there is no such
|
|
||||||
// package or if an error occurs, an error will be logged, and nil will be
|
|
||||||
// returned.
|
|
||||||
func buildPackage(c *config.Config, dir, rel string, pkgFiles, otherFiles, genFiles []string, hasTestdata bool) *Package {
|
|
||||||
// Process .go and .proto files first, since these determine the package name.
|
|
||||||
packageMap := make(map[string]*packageBuilder)
|
|
||||||
cgo := false
|
|
||||||
var pkgFilesWithUnknownPackage []fileInfo
|
|
||||||
for _, f := range pkgFiles {
|
|
||||||
var info fileInfo
|
|
||||||
switch path.Ext(f) {
|
|
||||||
case ".go":
|
|
||||||
info = goFileInfo(c, dir, rel, f)
|
|
||||||
case ".proto":
|
|
||||||
info = protoFileInfo(c, dir, rel, f)
|
|
||||||
default:
|
|
||||||
log.Panicf("file cannot determine package name: %s", f)
|
|
||||||
}
|
|
||||||
if info.packageName == "" {
|
|
||||||
pkgFilesWithUnknownPackage = append(pkgFilesWithUnknownPackage, info)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if info.packageName == "documentation" {
|
|
||||||
// go/build ignores this package
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
cgo = cgo || info.isCgo
|
|
||||||
|
|
||||||
if _, ok := packageMap[info.packageName]; !ok {
|
|
||||||
packageMap[info.packageName] = &packageBuilder{
|
|
||||||
name: info.packageName,
|
|
||||||
dir: dir,
|
|
||||||
rel: rel,
|
|
||||||
hasTestdata: hasTestdata,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if err := packageMap[info.packageName].addFile(c, info, false); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select a package to generate rules for.
|
|
||||||
pkg, err := selectPackage(c, dir, packageMap)
|
|
||||||
if err != nil {
|
|
||||||
if _, ok := err.(*build.NoGoError); !ok {
|
|
||||||
log.Print(err)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add files with unknown packages. This happens when there are parse
|
|
||||||
// or I/O errors. We should keep the file in the srcs list and let the
|
|
||||||
// compiler deal with the error.
|
|
||||||
for _, info := range pkgFilesWithUnknownPackage {
|
|
||||||
if err := pkg.addFile(c, info, cgo); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process the other static files.
|
|
||||||
for _, file := range otherFiles {
|
|
||||||
info := otherFileInfo(dir, rel, file)
|
|
||||||
if err := pkg.addFile(c, info, cgo); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process generated files. Note that generated files may have the same names
|
|
||||||
// as static files. Bazel will use the generated files, but we will look at
|
|
||||||
// the content of static files, assuming they will be the same.
|
|
||||||
staticFiles := make(map[string]bool)
|
|
||||||
for _, f := range pkgFiles {
|
|
||||||
staticFiles[f] = true
|
|
||||||
}
|
|
||||||
for _, f := range otherFiles {
|
|
||||||
staticFiles[f] = true
|
|
||||||
}
|
|
||||||
for _, f := range genFiles {
|
|
||||||
if staticFiles[f] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
info := fileNameInfo(dir, rel, f)
|
|
||||||
if err := pkg.addFile(c, info, cgo); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if pkg.importPath == "" {
|
|
||||||
if err := pkg.inferImportPath(c); err != nil {
|
|
||||||
log.Print(err)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pkg.build()
|
|
||||||
}
|
|
||||||
|
|
||||||
func selectPackage(c *config.Config, dir string, packageMap map[string]*packageBuilder) (*packageBuilder, error) {
|
|
||||||
buildablePackages := make(map[string]*packageBuilder)
|
|
||||||
for name, pkg := range packageMap {
|
|
||||||
if pkg.isBuildable(c) {
|
|
||||||
buildablePackages[name] = pkg
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(buildablePackages) == 0 {
|
|
||||||
return nil, &build.NoGoError{Dir: dir}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(buildablePackages) == 1 {
|
|
||||||
for _, pkg := range buildablePackages {
|
|
||||||
return pkg, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if pkg, ok := buildablePackages[defaultPackageName(c, dir)]; ok {
|
|
||||||
return pkg, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
err := &build.MultiplePackageError{Dir: dir}
|
|
||||||
for name, pkg := range buildablePackages {
|
|
||||||
// Add the first file for each package for the error message.
|
|
||||||
// Error() method expects these lists to be the same length. File
|
|
||||||
// lists must be non-empty. These lists are only created by
|
|
||||||
// buildPackage for packages with .go files present.
|
|
||||||
err.Packages = append(err.Packages, name)
|
|
||||||
err.Files = append(err.Files, pkg.firstGoFile())
|
|
||||||
}
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func defaultPackageName(c *config.Config, dir string) string {
|
|
||||||
if dir != c.RepoRoot {
|
|
||||||
return filepath.Base(dir)
|
|
||||||
}
|
|
||||||
name := path.Base(c.GoPrefix)
|
|
||||||
if name == "." || name == "/" {
|
|
||||||
// This can happen if go_prefix is empty or is all slashes.
|
|
||||||
return "unnamed"
|
|
||||||
}
|
|
||||||
return name
|
|
||||||
}
|
|
||||||
|
|
||||||
func findGenFiles(f *bf.File, excluded []string) []string {
|
|
||||||
var strs []string
|
|
||||||
for _, r := range f.Rules("") {
|
|
||||||
for _, key := range []string{"out", "outs"} {
|
|
||||||
switch e := r.Attr(key).(type) {
|
|
||||||
case *bf.StringExpr:
|
|
||||||
strs = append(strs, e.Value)
|
|
||||||
case *bf.ListExpr:
|
|
||||||
for _, elem := range e.List {
|
|
||||||
if s, ok := elem.(*bf.StringExpr); ok {
|
|
||||||
strs = append(strs, s.Value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var genFiles []string
|
|
||||||
for _, s := range strs {
|
|
||||||
if !isExcluded(excluded, s) {
|
|
||||||
genFiles = append(genFiles, s)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return genFiles
|
|
||||||
}
|
|
||||||
|
|
||||||
func findPbGoFiles(files []os.FileInfo, excluded []string) []string {
|
|
||||||
var pbGoFiles []string
|
|
||||||
for _, f := range files {
|
|
||||||
name := f.Name()
|
|
||||||
if strings.HasSuffix(name, ".proto") && !isExcluded(excluded, name) {
|
|
||||||
pbGoFiles = append(pbGoFiles, name[:len(name)-len(".proto")]+".pb.go")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return pbGoFiles
|
|
||||||
}
|
|
||||||
|
|
||||||
func isExcluded(excluded []string, base string) bool {
|
|
||||||
for _, e := range excluded {
|
|
||||||
if base == e {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func excludedForSubdir(excluded []string, subdir string) []string {
|
|
||||||
var filtered []string
|
|
||||||
for _, e := range excluded {
|
|
||||||
i := strings.IndexByte(e, '/')
|
|
||||||
if i < 0 || i == len(e)-1 || e[:i] != subdir {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
filtered = append(filtered, e[i+1:])
|
|
||||||
}
|
|
||||||
return filtered
|
|
||||||
}
|
|
||||||
|
|
||||||
type symlinkResolver struct {
|
|
||||||
root string
|
|
||||||
visited []string
|
|
||||||
}
|
|
||||||
|
|
||||||
// Decide if symlink dir/base should be followed.
|
|
||||||
func (r *symlinkResolver) follow(dir, base string) bool {
|
|
||||||
if dir == r.root && strings.HasPrefix(base, "bazel-") {
|
|
||||||
// Links such as bazel-<workspace>, bazel-out, bazel-genfiles are created by
|
|
||||||
// Bazel to point to internal build directories.
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
// See if the symlink points to a tree that has been already visited.
|
|
||||||
fullpath := filepath.Join(dir, base)
|
|
||||||
dest, err := filepath.EvalSymlinks(fullpath)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
if !filepath.IsAbs(dest) {
|
|
||||||
dest, err = filepath.Abs(filepath.Join(dir, dest))
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, p := range r.visited {
|
|
||||||
if pathtools.HasPrefix(dest, p) || pathtools.HasPrefix(p, dest) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
r.visited = append(r.visited, dest)
|
|
||||||
stat, err := os.Stat(fullpath)
|
|
||||||
if err != nil {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return stat.IsDir()
|
|
||||||
}
|
|
||||||
3
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/BUILD
generated
vendored
3
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/BUILD
generated
vendored
@@ -11,10 +11,9 @@ go_library(
|
|||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/repos",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/repos",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/generator:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
"//vendor/github.com/pelletier/go-toml:go_default_library",
|
"//vendor/github.com/pelletier/go-toml:go_default_library",
|
||||||
"//vendor/golang.org/x/tools/go/vcs:go_default_library",
|
"//vendor/golang.org/x/tools/go/vcs:go_default_library",
|
||||||
],
|
],
|
||||||
|
|||||||
7
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/remote.go
generated
vendored
7
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/remote.go
generated
vendored
@@ -272,7 +272,12 @@ func defaultHeadCmd(remote, vcs string) (string, error) {
|
|||||||
return "", nil
|
return "", nil
|
||||||
|
|
||||||
case "git":
|
case "git":
|
||||||
cmd := exec.Command("git", "ls-remote", "--", remote, "HEAD")
|
// Old versions of git ls-remote exit with code 129 when "--" is passed.
|
||||||
|
// We'll try to validate the argument here instead.
|
||||||
|
if strings.HasPrefix(remote, "-") {
|
||||||
|
return "", fmt.Errorf("remote must not start with '-': %q", remote)
|
||||||
|
}
|
||||||
|
cmd := exec.Command("git", "ls-remote", remote, "HEAD")
|
||||||
out, err := cmd.Output()
|
out, err := cmd.Output()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
|
|||||||
32
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/repo.go
generated
vendored
32
vendor/github.com/bazelbuild/bazel-gazelle/internal/repos/repo.go
generated
vendored
@@ -22,8 +22,7 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/generator"
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Repo describes an external repository rule declared in a Bazel
|
// Repo describes an external repository rule declared in a Bazel
|
||||||
@@ -72,7 +71,7 @@ var lockFileParsers = map[lockFileFormat]func(string) ([]Repo, error){
|
|||||||
// a list of equivalent repository rules that can be merged into a WORKSPACE
|
// a list of equivalent repository rules that can be merged into a WORKSPACE
|
||||||
// file. The format of the file is inferred from its basename. Currently,
|
// file. The format of the file is inferred from its basename. Currently,
|
||||||
// only Gopkg.lock is supported.
|
// only Gopkg.lock is supported.
|
||||||
func ImportRepoRules(filename string) ([]bf.Expr, error) {
|
func ImportRepoRules(filename string) ([]*rule.Rule, error) {
|
||||||
format := getLockFileFormat(filename)
|
format := getLockFileFormat(filename)
|
||||||
if format == unknownFormat {
|
if format == unknownFormat {
|
||||||
return nil, fmt.Errorf(`%s: unrecognized lock file format. Expected "Gopkg.lock"`, filename)
|
return nil, fmt.Errorf(`%s: unrecognized lock file format. Expected "Gopkg.lock"`, filename)
|
||||||
@@ -84,7 +83,7 @@ func ImportRepoRules(filename string) ([]bf.Expr, error) {
|
|||||||
}
|
}
|
||||||
sort.Stable(byName(repos))
|
sort.Stable(byName(repos))
|
||||||
|
|
||||||
rules := make([]bf.Expr, 0, len(repos))
|
rules := make([]*rule.Rule, 0, len(repos))
|
||||||
for _, repo := range repos {
|
for _, repo := range repos {
|
||||||
rules = append(rules, GenerateRule(repo))
|
rules = append(rules, GenerateRule(repo))
|
||||||
}
|
}
|
||||||
@@ -102,19 +101,17 @@ func getLockFileFormat(filename string) lockFileFormat {
|
|||||||
|
|
||||||
// GenerateRule returns a repository rule for the given repository that can
|
// GenerateRule returns a repository rule for the given repository that can
|
||||||
// be written in a WORKSPACE file.
|
// be written in a WORKSPACE file.
|
||||||
func GenerateRule(repo Repo) bf.Expr {
|
func GenerateRule(repo Repo) *rule.Rule {
|
||||||
attrs := []generator.KeyValue{
|
r := rule.NewRule("go_repository", repo.Name)
|
||||||
{Key: "name", Value: repo.Name},
|
r.SetAttr("commit", repo.Commit)
|
||||||
{Key: "commit", Value: repo.Commit},
|
r.SetAttr("importpath", repo.GoPrefix)
|
||||||
{Key: "importpath", Value: repo.GoPrefix},
|
|
||||||
}
|
|
||||||
if repo.Remote != "" {
|
if repo.Remote != "" {
|
||||||
attrs = append(attrs, generator.KeyValue{Key: "remote", Value: repo.Remote})
|
r.SetAttr("remote", repo.Remote)
|
||||||
}
|
}
|
||||||
if repo.VCS != "" {
|
if repo.VCS != "" {
|
||||||
attrs = append(attrs, generator.KeyValue{Key: "vcs", Value: repo.VCS})
|
r.SetAttr("vcs", repo.VCS)
|
||||||
}
|
}
|
||||||
return generator.NewRule("go_repository", attrs)
|
return r
|
||||||
}
|
}
|
||||||
|
|
||||||
// FindExternalRepo attempts to locate the directory where Bazel has fetched
|
// FindExternalRepo attempts to locate the directory where Bazel has fetched
|
||||||
@@ -149,14 +146,9 @@ func FindExternalRepo(repoRoot, name string) (string, error) {
|
|||||||
//
|
//
|
||||||
// The set of repositories returned is necessarily incomplete, since we don't
|
// The set of repositories returned is necessarily incomplete, since we don't
|
||||||
// evaluate the file, and repositories may be declared in macros in other files.
|
// evaluate the file, and repositories may be declared in macros in other files.
|
||||||
func ListRepositories(workspace *bf.File) []Repo {
|
func ListRepositories(workspace *rule.File) []Repo {
|
||||||
var repos []Repo
|
var repos []Repo
|
||||||
for _, e := range workspace.Stmt {
|
for _, r := range workspace.Rules {
|
||||||
call, ok := e.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
r := bf.Rule{Call: call}
|
|
||||||
name := r.Name()
|
name := r.Name()
|
||||||
if name == "" {
|
if name == "" {
|
||||||
continue
|
continue
|
||||||
|
|||||||
11
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/BUILD
generated
vendored
11
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/BUILD
generated
vendored
@@ -2,22 +2,15 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
|||||||
|
|
||||||
go_library(
|
go_library(
|
||||||
name = "go_default_library",
|
name = "go_default_library",
|
||||||
srcs = [
|
srcs = ["index.go"],
|
||||||
"index.go",
|
|
||||||
"resolve.go",
|
|
||||||
"resolve_external.go",
|
|
||||||
"resolve_vendored.go",
|
|
||||||
"std_package_list.go",
|
|
||||||
],
|
|
||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/resolve",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/repos:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
404
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/index.go
generated
vendored
404
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/index.go
generated
vendored
@@ -1,4 +1,4 @@
|
|||||||
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
@@ -16,97 +16,118 @@ limitations under the License.
|
|||||||
package resolve
|
package resolve
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
|
||||||
"log"
|
"log"
|
||||||
"path"
|
|
||||||
"path/filepath"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// ImportSpec describes a library to be imported. Imp is an import string for
|
||||||
|
// the library. Lang is the language in which the import string appears (this
|
||||||
|
// should match Resolver.Name).
|
||||||
|
type ImportSpec struct {
|
||||||
|
Lang, Imp string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolver is an interface that language extensions can implement to resolve
|
||||||
|
// dependencies in rules they generate.
|
||||||
|
type Resolver interface {
|
||||||
|
// Name returns the name of the language. This should be a prefix of the
|
||||||
|
// kinds of rules generated by the language, e.g., "go" for the Go extension
|
||||||
|
// since it generates "go_library" rules.
|
||||||
|
Name() string
|
||||||
|
|
||||||
|
// Imports returns a list of ImportSpecs that can be used to import the rule
|
||||||
|
// r. This is used to populate RuleIndex.
|
||||||
|
//
|
||||||
|
// If nil is returned, the rule will not be indexed. If any non-nil slice is
|
||||||
|
// returned, including an empty slice, the rule will be indexed.
|
||||||
|
Imports(c *config.Config, r *rule.Rule, f *rule.File) []ImportSpec
|
||||||
|
|
||||||
|
// Embeds returns a list of labels of rules that the given rule embeds. If
|
||||||
|
// a rule is embedded by another importable rule of the same language, only
|
||||||
|
// the embedding rule will be indexed. The embedding rule will inherit
|
||||||
|
// the imports of the embedded rule.
|
||||||
|
Embeds(r *rule.Rule, from label.Label) []label.Label
|
||||||
|
|
||||||
|
// Resolve translates imported libraries for a given rule into Bazel
|
||||||
|
// dependencies. A list of imported libraries is typically stored in a
|
||||||
|
// private attribute of the rule when it's generated (this interface doesn't
|
||||||
|
// dictate how that is stored or represented). Resolve generates a "deps"
|
||||||
|
// attribute (or the appropriate language-specific equivalent) for each
|
||||||
|
// import according to language-specific rules and heuristics.
|
||||||
|
Resolve(c *config.Config, ix *RuleIndex, rc *repos.RemoteCache, r *rule.Rule, from label.Label)
|
||||||
|
}
|
||||||
|
|
||||||
// RuleIndex is a table of rules in a workspace, indexed by label and by
|
// RuleIndex is a table of rules in a workspace, indexed by label and by
|
||||||
// import path. Used by Resolver to map import paths to labels.
|
// import path. Used by Resolver to map import paths to labels.
|
||||||
type RuleIndex struct {
|
type RuleIndex struct {
|
||||||
rules []*ruleRecord
|
rules []*ruleRecord
|
||||||
labelMap map[label.Label]*ruleRecord
|
labelMap map[label.Label]*ruleRecord
|
||||||
importMap map[importSpec][]*ruleRecord
|
importMap map[ImportSpec][]*ruleRecord
|
||||||
|
kindToResolver map[string]Resolver
|
||||||
}
|
}
|
||||||
|
|
||||||
// ruleRecord contains information about a rule relevant to import indexing.
|
// ruleRecord contains information about a rule relevant to import indexing.
|
||||||
type ruleRecord struct {
|
type ruleRecord struct {
|
||||||
rule bf.Rule
|
rule *rule.Rule
|
||||||
label label.Label
|
label label.Label
|
||||||
lang config.Language
|
|
||||||
importedAs []importSpec
|
// importedAs is a list of ImportSpecs by which this rule may be imported.
|
||||||
embedded bool
|
// Used to build a map from ImportSpecs to ruleRecords.
|
||||||
|
importedAs []ImportSpec
|
||||||
|
|
||||||
|
// embeds is the transitive closure of labels for rules that this rule embeds
|
||||||
|
// (as determined by the Embeds method). This only includes rules in the same
|
||||||
|
// language (i.e., it includes a go_library embedding a go_proto_library, but
|
||||||
|
// not a go_proto_library embedding a proto_library).
|
||||||
|
embeds []label.Label
|
||||||
|
|
||||||
|
// embedded indicates whether another rule of the same language embeds this
|
||||||
|
// rule. Embedded rules should not be indexed.
|
||||||
|
embedded bool
|
||||||
|
|
||||||
|
didCollectEmbeds bool
|
||||||
}
|
}
|
||||||
|
|
||||||
// importSpec describes a package to be imported. Language is specified, since
|
// NewRuleIndex creates a new index.
|
||||||
// different languages have different formats for their imports.
|
//
|
||||||
type importSpec struct {
|
// kindToResolver is a map from rule kinds (for example, "go_library") to
|
||||||
lang config.Language
|
// Resolvers that support those kinds.
|
||||||
imp string
|
func NewRuleIndex(kindToResolver map[string]Resolver) *RuleIndex {
|
||||||
}
|
|
||||||
|
|
||||||
func NewRuleIndex() *RuleIndex {
|
|
||||||
return &RuleIndex{
|
return &RuleIndex{
|
||||||
labelMap: make(map[label.Label]*ruleRecord),
|
labelMap: make(map[label.Label]*ruleRecord),
|
||||||
|
kindToResolver: kindToResolver,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// AddRulesFromFile adds existing rules to the index from file
|
// AddRule adds a rule r to the index. The rule will only be indexed if there
|
||||||
// (which must not be nil).
|
// is a known resolver for the rule's kind and Resolver.Imports returns a
|
||||||
func (ix *RuleIndex) AddRulesFromFile(c *config.Config, file *bf.File) {
|
// non-nil slice.
|
||||||
buildRel, err := filepath.Rel(c.RepoRoot, file.Path)
|
//
|
||||||
if err != nil {
|
// AddRule may only be called before Finish.
|
||||||
log.Panicf("file not in repo: %s", file.Path)
|
func (ix *RuleIndex) AddRule(c *config.Config, r *rule.Rule, f *rule.File) {
|
||||||
|
var imps []ImportSpec
|
||||||
|
if rslv, ok := ix.kindToResolver[r.Kind()]; ok {
|
||||||
|
imps = rslv.Imports(c, r, f)
|
||||||
}
|
}
|
||||||
buildRel = path.Dir(filepath.ToSlash(buildRel))
|
// If imps == nil, the rule is not importable. If imps is the empty slice,
|
||||||
if buildRel == "." || buildRel == "/" {
|
// it may still be importable if it embeds importable libraries.
|
||||||
buildRel = ""
|
if imps == nil {
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, stmt := range file.Stmt {
|
|
||||||
if call, ok := stmt.(*bf.CallExpr); ok {
|
|
||||||
ix.addRule(call, c.GoPrefix, buildRel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ix *RuleIndex) addRule(call *bf.CallExpr, goPrefix, buildRel string) {
|
|
||||||
rule := bf.Rule{Call: call}
|
|
||||||
record := &ruleRecord{
|
record := &ruleRecord{
|
||||||
rule: rule,
|
rule: r,
|
||||||
label: label.New("", buildRel, rule.Name()),
|
label: label.New(c.RepoName, f.Pkg, r.Name()),
|
||||||
|
importedAs: imps,
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := ix.labelMap[record.label]; ok {
|
if _, ok := ix.labelMap[record.label]; ok {
|
||||||
log.Printf("multiple rules found with label %s", record.label)
|
log.Printf("multiple rules found with label %s", record.label)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
kind := rule.Kind()
|
|
||||||
switch {
|
|
||||||
case isGoLibrary(kind):
|
|
||||||
record.lang = config.GoLang
|
|
||||||
if imp := rule.AttrString("importpath"); imp != "" {
|
|
||||||
record.importedAs = []importSpec{{lang: config.GoLang, imp: imp}}
|
|
||||||
}
|
|
||||||
// Additional proto imports may be added in Finish.
|
|
||||||
|
|
||||||
case kind == "proto_library":
|
|
||||||
record.lang = config.ProtoLang
|
|
||||||
for _, s := range findSources(rule, buildRel, ".proto") {
|
|
||||||
record.importedAs = append(record.importedAs, importSpec{lang: config.ProtoLang, imp: s})
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
ix.rules = append(ix.rules, record)
|
ix.rules = append(ix.rules, record)
|
||||||
ix.labelMap[record.label] = record
|
ix.labelMap[record.label] = record
|
||||||
}
|
}
|
||||||
@@ -115,232 +136,111 @@ func (ix *RuleIndex) addRule(call *bf.CallExpr, goPrefix, buildRel string) {
|
|||||||
// actions after all rules have been added. This step is necessary because
|
// actions after all rules have been added. This step is necessary because
|
||||||
// a rule may be indexed differently based on what rules are added later.
|
// a rule may be indexed differently based on what rules are added later.
|
||||||
//
|
//
|
||||||
// This function must be called after all AddRulesFromFile calls but before any
|
// Finish must be called after all AddRule calls and before any
|
||||||
// findRuleByImport calls.
|
// FindRulesByImport calls.
|
||||||
func (ix *RuleIndex) Finish() {
|
func (ix *RuleIndex) Finish() {
|
||||||
ix.skipGoEmbds()
|
for _, r := range ix.rules {
|
||||||
|
ix.collectEmbeds(r)
|
||||||
|
}
|
||||||
ix.buildImportIndex()
|
ix.buildImportIndex()
|
||||||
}
|
}
|
||||||
|
|
||||||
// skipGoEmbeds sets the embedded flag on Go library rules that are imported
|
func (ix *RuleIndex) collectEmbeds(r *ruleRecord) {
|
||||||
// by other Go library rules with the same import path. Note that embedded
|
if r.didCollectEmbeds {
|
||||||
// rules may still be imported with non-Go imports. For example, a
|
return
|
||||||
// go_proto_library may be imported with either a Go import path or a proto
|
}
|
||||||
// path. If the library is embedded, only the proto path will be indexed.
|
r.didCollectEmbeds = true
|
||||||
func (ix *RuleIndex) skipGoEmbds() {
|
embedLabels := ix.kindToResolver[r.rule.Kind()].Embeds(r.rule, r.label)
|
||||||
for _, r := range ix.rules {
|
r.embeds = embedLabels
|
||||||
if !isGoLibrary(r.rule.Kind()) {
|
for _, e := range embedLabels {
|
||||||
|
er, ok := ix.findRuleByLabel(e, r.label)
|
||||||
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
importpath := r.rule.AttrString("importpath")
|
ix.collectEmbeds(er)
|
||||||
|
if ix.kindToResolver[r.rule.Kind()] == ix.kindToResolver[er.rule.Kind()] {
|
||||||
var embedLabels []label.Label
|
er.embedded = true
|
||||||
if embedList, ok := r.rule.Attr("embed").(*bf.ListExpr); ok {
|
r.embeds = append(r.embeds, er.embeds...)
|
||||||
for _, embedElem := range embedList.List {
|
|
||||||
embedStr, ok := embedElem.(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
embedLabel, err := label.Parse(embedStr.Value)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
embedLabels = append(embedLabels, embedLabel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if libraryStr, ok := r.rule.Attr("library").(*bf.StringExpr); ok {
|
|
||||||
if libraryLabel, err := label.Parse(libraryStr.Value); err == nil {
|
|
||||||
embedLabels = append(embedLabels, libraryLabel)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, l := range embedLabels {
|
|
||||||
embed, ok := ix.findRuleByLabel(l, r.label)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if embed.rule.AttrString("importpath") != importpath {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
embed.embedded = true
|
|
||||||
}
|
}
|
||||||
|
r.importedAs = append(r.importedAs, er.importedAs...)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// buildImportIndex constructs the map used by findRuleByImport.
|
// buildImportIndex constructs the map used by FindRulesByImport.
|
||||||
func (ix *RuleIndex) buildImportIndex() {
|
func (ix *RuleIndex) buildImportIndex() {
|
||||||
ix.importMap = make(map[importSpec][]*ruleRecord)
|
ix.importMap = make(map[ImportSpec][]*ruleRecord)
|
||||||
for _, r := range ix.rules {
|
for _, r := range ix.rules {
|
||||||
if isGoProtoLibrary(r.rule.Kind()) {
|
if r.embedded {
|
||||||
protoImports := findGoProtoSources(ix, r)
|
continue
|
||||||
r.importedAs = append(r.importedAs, protoImports...)
|
|
||||||
}
|
}
|
||||||
|
indexed := make(map[ImportSpec]bool)
|
||||||
for _, imp := range r.importedAs {
|
for _, imp := range r.importedAs {
|
||||||
if imp.lang == config.GoLang && r.embedded {
|
if indexed[imp] {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
indexed[imp] = true
|
||||||
ix.importMap[imp] = append(ix.importMap[imp], r)
|
ix.importMap[imp] = append(ix.importMap[imp], r)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ruleNotFoundError struct {
|
|
||||||
from label.Label
|
|
||||||
imp string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e ruleNotFoundError) Error() string {
|
|
||||||
return fmt.Sprintf("no rule found for import %q, needed in %s", e.imp, e.from)
|
|
||||||
}
|
|
||||||
|
|
||||||
type selfImportError struct {
|
|
||||||
from label.Label
|
|
||||||
imp string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e selfImportError) Error() string {
|
|
||||||
return fmt.Sprintf("rule %s imports itself with path %q", e.from, e.imp)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ix *RuleIndex) findRuleByLabel(label label.Label, from label.Label) (*ruleRecord, bool) {
|
func (ix *RuleIndex) findRuleByLabel(label label.Label, from label.Label) (*ruleRecord, bool) {
|
||||||
label = label.Abs(from.Repo, from.Pkg)
|
label = label.Abs(from.Repo, from.Pkg)
|
||||||
r, ok := ix.labelMap[label]
|
r, ok := ix.labelMap[label]
|
||||||
return r, ok
|
return r, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
// findRuleByImport attempts to resolve an import string to a rule record.
|
type FindResult struct {
|
||||||
|
// Label is the absolute label (including repository and package name) for
|
||||||
|
// a matched rule.
|
||||||
|
Label label.Label
|
||||||
|
|
||||||
|
Rule *rule.Rule
|
||||||
|
|
||||||
|
// Embeds is the transitive closure of labels for rules that the matched
|
||||||
|
// rule embeds. It may contains duplicates and does not include the label
|
||||||
|
// for the rule itself.
|
||||||
|
Embeds []label.Label
|
||||||
|
}
|
||||||
|
|
||||||
|
// FindRulesByImport attempts to resolve an import string to a rule record.
|
||||||
// imp is the import to resolve (which includes the target language). lang is
|
// imp is the import to resolve (which includes the target language). lang is
|
||||||
// the language of the rule with the dependency (for example, in
|
// the language of the rule with the dependency (for example, in
|
||||||
// go_proto_library, imp will have ProtoLang and lang will be GoLang).
|
// go_proto_library, imp will have ProtoLang and lang will be GoLang).
|
||||||
// from is the rule which is doing the dependency. This is used to check
|
// from is the rule which is doing the dependency. This is used to check
|
||||||
// vendoring visibility and to check for self-imports.
|
// vendoring visibility and to check for self-imports.
|
||||||
//
|
//
|
||||||
// Any number of rules may provide the same import. If no rules provide the
|
// FindRulesByImport returns a list of rules, since any number of rules may
|
||||||
// import, ruleNotFoundError is returned. If a rule imports itself,
|
// provide the same import. Callers may need to resolve ambiguities using
|
||||||
// selfImportError is returned. If multiple rules provide the import, this
|
// language-specific heuristics.
|
||||||
// function will attempt to choose one based on Go vendoring logic. In
|
func (ix *RuleIndex) FindRulesByImport(imp ImportSpec, lang string) []FindResult {
|
||||||
// ambiguous cases, an error is returned.
|
|
||||||
func (ix *RuleIndex) findRuleByImport(imp importSpec, lang config.Language, from label.Label) (*ruleRecord, error) {
|
|
||||||
matches := ix.importMap[imp]
|
matches := ix.importMap[imp]
|
||||||
var bestMatch *ruleRecord
|
results := make([]FindResult, 0, len(matches))
|
||||||
var bestMatchIsVendored bool
|
|
||||||
var bestMatchVendorRoot string
|
|
||||||
var matchError error
|
|
||||||
for _, m := range matches {
|
for _, m := range matches {
|
||||||
if m.lang != lang {
|
if ix.kindToResolver[m.rule.Kind()].Name() != lang {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
results = append(results, FindResult{
|
||||||
|
Label: m.label,
|
||||||
|
Rule: m.rule,
|
||||||
|
Embeds: m.embeds,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return results
|
||||||
|
}
|
||||||
|
|
||||||
switch imp.lang {
|
// IsSelfImport returns true if the result's label matches the given label
|
||||||
case config.GoLang:
|
// or the result's rule transitively embeds the rule with the given label.
|
||||||
// Apply vendoring logic for Go libraries. A library in a vendor directory
|
// Self imports cause cyclic dependencies, so the caller may want to omit
|
||||||
// is only visible in the parent tree. Vendored libraries supercede
|
// the dependency or report an error.
|
||||||
// non-vendored libraries, and libraries closer to from.Pkg supercede
|
func (r FindResult) IsSelfImport(from label.Label) bool {
|
||||||
// those further up the tree.
|
if from.Equal(r.Label) {
|
||||||
isVendored := false
|
return true
|
||||||
vendorRoot := ""
|
}
|
||||||
parts := strings.Split(m.label.Pkg, "/")
|
for _, e := range r.Embeds {
|
||||||
for i := len(parts) - 1; i >= 0; i-- {
|
if from.Equal(e) {
|
||||||
if parts[i] == "vendor" {
|
return true
|
||||||
isVendored = true
|
|
||||||
vendorRoot = strings.Join(parts[:i], "/")
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if isVendored && !label.New(m.label.Repo, vendorRoot, "").Contains(from) {
|
|
||||||
// vendor directory not visible
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if bestMatch == nil || isVendored && (!bestMatchIsVendored || len(vendorRoot) > len(bestMatchVendorRoot)) {
|
|
||||||
// Current match is better
|
|
||||||
bestMatch = m
|
|
||||||
bestMatchIsVendored = isVendored
|
|
||||||
bestMatchVendorRoot = vendorRoot
|
|
||||||
matchError = nil
|
|
||||||
} else if (!isVendored && bestMatchIsVendored) || (isVendored && len(vendorRoot) < len(bestMatchVendorRoot)) {
|
|
||||||
// Current match is worse
|
|
||||||
} else {
|
|
||||||
// Match is ambiguous
|
|
||||||
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.label, m.label, imp.imp, from)
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
if bestMatch == nil {
|
|
||||||
bestMatch = m
|
|
||||||
} else {
|
|
||||||
matchError = fmt.Errorf("multiple rules (%s and %s) may be imported with %q from %s", bestMatch.label, m.label, imp.imp, from)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if matchError != nil {
|
return false
|
||||||
return nil, matchError
|
|
||||||
}
|
|
||||||
if bestMatch == nil {
|
|
||||||
return nil, ruleNotFoundError{from, imp.imp}
|
|
||||||
}
|
|
||||||
if bestMatch.label.Equal(from) {
|
|
||||||
return nil, selfImportError{from, imp.imp}
|
|
||||||
}
|
|
||||||
|
|
||||||
if imp.lang == config.ProtoLang && lang == config.GoLang {
|
|
||||||
importpath := bestMatch.rule.AttrString("importpath")
|
|
||||||
if betterMatch, err := ix.findRuleByImport(importSpec{config.GoLang, importpath}, config.GoLang, from); err == nil {
|
|
||||||
return betterMatch, nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return bestMatch, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (ix *RuleIndex) findLabelByImport(imp importSpec, lang config.Language, from label.Label) (label.Label, error) {
|
|
||||||
r, err := ix.findRuleByImport(imp, lang, from)
|
|
||||||
if err != nil {
|
|
||||||
return label.NoLabel, err
|
|
||||||
}
|
|
||||||
return r.label, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func findGoProtoSources(ix *RuleIndex, r *ruleRecord) []importSpec {
|
|
||||||
protoLabel, err := label.Parse(r.rule.AttrString("proto"))
|
|
||||||
if err != nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
proto, ok := ix.findRuleByLabel(protoLabel, r.label)
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
var importedAs []importSpec
|
|
||||||
for _, source := range findSources(proto.rule, proto.label.Pkg, ".proto") {
|
|
||||||
importedAs = append(importedAs, importSpec{lang: config.ProtoLang, imp: source})
|
|
||||||
}
|
|
||||||
return importedAs
|
|
||||||
}
|
|
||||||
|
|
||||||
func findSources(r bf.Rule, buildRel, ext string) []string {
|
|
||||||
srcsExpr := r.Attr("srcs")
|
|
||||||
srcsList, ok := srcsExpr.(*bf.ListExpr)
|
|
||||||
if !ok {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
var srcs []string
|
|
||||||
for _, srcExpr := range srcsList.List {
|
|
||||||
src, ok := srcExpr.(*bf.StringExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
label, err := label.Parse(src.Value)
|
|
||||||
if err != nil || !label.Relative || !strings.HasSuffix(label.Name, ext) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
srcs = append(srcs, path.Join(buildRel, label.Name))
|
|
||||||
}
|
|
||||||
return srcs
|
|
||||||
}
|
|
||||||
|
|
||||||
func isGoLibrary(kind string) bool {
|
|
||||||
return kind == "go_library" || isGoProtoLibrary(kind)
|
|
||||||
}
|
|
||||||
|
|
||||||
func isGoProtoLibrary(kind string) bool {
|
|
||||||
return kind == "go_proto_library" || kind == "go_grpc_library"
|
|
||||||
}
|
}
|
||||||
|
|||||||
380
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve.go
generated
vendored
380
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve.go
generated
vendored
@@ -1,380 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package resolve
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/build"
|
|
||||||
"log"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
|
||||||
)
|
|
||||||
|
|
||||||
// Resolver resolves import strings in source files (import paths in Go,
|
|
||||||
// import statements in protos) into Bazel labels.
|
|
||||||
type Resolver struct {
|
|
||||||
c *config.Config
|
|
||||||
l *label.Labeler
|
|
||||||
ix *RuleIndex
|
|
||||||
external nonlocalResolver
|
|
||||||
}
|
|
||||||
|
|
||||||
// nonlocalResolver resolves import paths outside of the current repository's
|
|
||||||
// prefix. Once we have smarter import path resolution, this shouldn't
|
|
||||||
// be necessary, and we can remove this abstraction.
|
|
||||||
type nonlocalResolver interface {
|
|
||||||
resolve(imp string) (label.Label, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewResolver(c *config.Config, l *label.Labeler, ix *RuleIndex, rc *repos.RemoteCache) *Resolver {
|
|
||||||
var e nonlocalResolver
|
|
||||||
switch c.DepMode {
|
|
||||||
case config.ExternalMode:
|
|
||||||
e = newExternalResolver(l, rc)
|
|
||||||
case config.VendorMode:
|
|
||||||
e = newVendoredResolver(l)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &Resolver{
|
|
||||||
c: c,
|
|
||||||
l: l,
|
|
||||||
ix: ix,
|
|
||||||
external: e,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ResolveRule copies and modifies a generated rule e by replacing the import
|
|
||||||
// paths in the "_gazelle_imports" attribute with labels in a "deps"
|
|
||||||
// attribute. This may be safely called on expressions that aren't Go rules
|
|
||||||
// (the original expression will be returned). Any existing "deps" attribute
|
|
||||||
// is deleted, so it may be necessary to merge the result.
|
|
||||||
func (r *Resolver) ResolveRule(e bf.Expr, pkgRel string) bf.Expr {
|
|
||||||
call, ok := e.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
rule := bf.Rule{Call: call}
|
|
||||||
from := label.New("", pkgRel, rule.Name())
|
|
||||||
|
|
||||||
var resolve func(imp string, from label.Label) (label.Label, error)
|
|
||||||
var embeds []label.Label
|
|
||||||
switch rule.Kind() {
|
|
||||||
case "go_library", "go_binary", "go_test":
|
|
||||||
resolve = r.resolveGo
|
|
||||||
embeds = getEmbedsGo(call, from)
|
|
||||||
case "proto_library":
|
|
||||||
resolve = r.resolveProto
|
|
||||||
case "go_proto_library", "go_grpc_library":
|
|
||||||
resolve = r.resolveGoProto
|
|
||||||
embeds = getEmbedsGo(call, from)
|
|
||||||
default:
|
|
||||||
return e
|
|
||||||
}
|
|
||||||
|
|
||||||
resolved := *call
|
|
||||||
resolved.List = append([]bf.Expr{}, call.List...)
|
|
||||||
rule.Call = &resolved
|
|
||||||
|
|
||||||
imports := rule.Attr(config.GazelleImportsKey)
|
|
||||||
rule.DelAttr(config.GazelleImportsKey)
|
|
||||||
rule.DelAttr("deps")
|
|
||||||
deps := mapExprStrings(imports, func(imp string) string {
|
|
||||||
label, err := resolve(imp, from)
|
|
||||||
if err != nil {
|
|
||||||
switch err.(type) {
|
|
||||||
case standardImportError, selfImportError:
|
|
||||||
return ""
|
|
||||||
default:
|
|
||||||
log.Print(err)
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, e := range embeds {
|
|
||||||
if label.Equal(e) {
|
|
||||||
return ""
|
|
||||||
}
|
|
||||||
}
|
|
||||||
label.Relative = label.Repo == "" && label.Pkg == pkgRel
|
|
||||||
return label.String()
|
|
||||||
})
|
|
||||||
if deps != nil {
|
|
||||||
rule.SetAttr("deps", deps)
|
|
||||||
}
|
|
||||||
|
|
||||||
return &resolved
|
|
||||||
}
|
|
||||||
|
|
||||||
type standardImportError struct {
|
|
||||||
imp string
|
|
||||||
}
|
|
||||||
|
|
||||||
func (e standardImportError) Error() string {
|
|
||||||
return fmt.Sprintf("import path %q is in the standard library", e.imp)
|
|
||||||
}
|
|
||||||
|
|
||||||
// mapExprStrings applies a function f to the strings in e and returns a new
|
|
||||||
// expression with the results. Scalar strings, lists, dicts, selects, and
|
|
||||||
// concatenations are supported.
|
|
||||||
func mapExprStrings(e bf.Expr, f func(string) string) bf.Expr {
|
|
||||||
if e == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
switch expr := e.(type) {
|
|
||||||
case *bf.StringExpr:
|
|
||||||
s := f(expr.Value)
|
|
||||||
if s == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ret := *expr
|
|
||||||
ret.Value = s
|
|
||||||
return &ret
|
|
||||||
|
|
||||||
case *bf.ListExpr:
|
|
||||||
var list []bf.Expr
|
|
||||||
for _, elem := range expr.List {
|
|
||||||
elem = mapExprStrings(elem, f)
|
|
||||||
if elem != nil {
|
|
||||||
list = append(list, elem)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(list) == 0 && len(expr.List) > 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ret := *expr
|
|
||||||
ret.List = list
|
|
||||||
return &ret
|
|
||||||
|
|
||||||
case *bf.DictExpr:
|
|
||||||
var cases []bf.Expr
|
|
||||||
isEmpty := true
|
|
||||||
for _, kv := range expr.List {
|
|
||||||
keyval, ok := kv.(*bf.KeyValueExpr)
|
|
||||||
if !ok {
|
|
||||||
log.Panicf("unexpected expression in generated imports dict: %#v", kv)
|
|
||||||
}
|
|
||||||
value := mapExprStrings(keyval.Value, f)
|
|
||||||
if value != nil {
|
|
||||||
cases = append(cases, &bf.KeyValueExpr{Key: keyval.Key, Value: value})
|
|
||||||
if key, ok := keyval.Key.(*bf.StringExpr); !ok || key.Value != "//conditions:default" {
|
|
||||||
isEmpty = false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if isEmpty {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
ret := *expr
|
|
||||||
ret.List = cases
|
|
||||||
return &ret
|
|
||||||
|
|
||||||
case *bf.CallExpr:
|
|
||||||
if x, ok := expr.X.(*bf.LiteralExpr); !ok || x.Token != "select" || len(expr.List) != 1 {
|
|
||||||
log.Panicf("unexpected call expression in generated imports: %#v", e)
|
|
||||||
}
|
|
||||||
arg := mapExprStrings(expr.List[0], f)
|
|
||||||
if arg == nil {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
call := *expr
|
|
||||||
call.List[0] = arg
|
|
||||||
return &call
|
|
||||||
|
|
||||||
case *bf.BinaryExpr:
|
|
||||||
x := mapExprStrings(expr.X, f)
|
|
||||||
y := mapExprStrings(expr.Y, f)
|
|
||||||
if x == nil {
|
|
||||||
return y
|
|
||||||
}
|
|
||||||
if y == nil {
|
|
||||||
return x
|
|
||||||
}
|
|
||||||
binop := *expr
|
|
||||||
binop.X = x
|
|
||||||
binop.Y = y
|
|
||||||
return &binop
|
|
||||||
|
|
||||||
default:
|
|
||||||
log.Panicf("unexpected expression in generated imports: %#v", e)
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolveGo resolves an import path from a Go source file to a label.
|
|
||||||
// pkgRel is the path to the Go package relative to the repository root; it
|
|
||||||
// is used to resolve relative imports.
|
|
||||||
func (r *Resolver) resolveGo(imp string, from label.Label) (label.Label, error) {
|
|
||||||
if build.IsLocalImport(imp) {
|
|
||||||
cleanRel := path.Clean(path.Join(from.Pkg, imp))
|
|
||||||
if build.IsLocalImport(cleanRel) {
|
|
||||||
return label.NoLabel, fmt.Errorf("relative import path %q from %q points outside of repository", imp, from.Pkg)
|
|
||||||
}
|
|
||||||
imp = path.Join(r.c.GoPrefix, cleanRel)
|
|
||||||
}
|
|
||||||
|
|
||||||
if IsStandard(imp) {
|
|
||||||
return label.NoLabel, standardImportError{imp}
|
|
||||||
}
|
|
||||||
|
|
||||||
if l := resolveWellKnownGo(imp); !l.Equal(label.NoLabel) {
|
|
||||||
return l, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if l, err := r.ix.findLabelByImport(importSpec{config.GoLang, imp}, config.GoLang, from); err != nil {
|
|
||||||
if _, ok := err.(ruleNotFoundError); !ok {
|
|
||||||
return label.NoLabel, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return l, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if pathtools.HasPrefix(imp, r.c.GoPrefix) {
|
|
||||||
return r.l.LibraryLabel(pathtools.TrimPrefix(imp, r.c.GoPrefix)), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return r.external.resolve(imp)
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolveProto resolves an import statement in a .proto file to a label
|
|
||||||
// for a proto_library rule.
|
|
||||||
func (r *Resolver) resolveProto(imp string, from label.Label) (label.Label, error) {
|
|
||||||
if !strings.HasSuffix(imp, ".proto") {
|
|
||||||
return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp)
|
|
||||||
}
|
|
||||||
if isWellKnownProto(imp) {
|
|
||||||
name := path.Base(imp[:len(imp)-len(".proto")]) + "_proto"
|
|
||||||
return label.New(config.WellKnownTypesProtoRepo, "", name), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if l, err := r.ix.findLabelByImport(importSpec{config.ProtoLang, imp}, config.ProtoLang, from); err != nil {
|
|
||||||
if _, ok := err.(ruleNotFoundError); !ok {
|
|
||||||
return label.NoLabel, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return l, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
rel := path.Dir(imp)
|
|
||||||
if rel == "." {
|
|
||||||
rel = ""
|
|
||||||
}
|
|
||||||
name := pathtools.RelBaseName(rel, r.c.GoPrefix, r.c.RepoRoot)
|
|
||||||
return r.l.ProtoLabel(rel, name), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// resolveGoProto resolves an import statement in a .proto file to a
|
|
||||||
// label for a go_library rule that embeds the corresponding go_proto_library.
|
|
||||||
func (r *Resolver) resolveGoProto(imp string, from label.Label) (label.Label, error) {
|
|
||||||
if !strings.HasSuffix(imp, ".proto") {
|
|
||||||
return label.NoLabel, fmt.Errorf("can't import non-proto: %q", imp)
|
|
||||||
}
|
|
||||||
stem := imp[:len(imp)-len(".proto")]
|
|
||||||
|
|
||||||
if isWellKnownProto(stem) {
|
|
||||||
return label.NoLabel, standardImportError{imp}
|
|
||||||
}
|
|
||||||
|
|
||||||
if l, err := r.ix.findLabelByImport(importSpec{config.ProtoLang, imp}, config.GoLang, from); err != nil {
|
|
||||||
if _, ok := err.(ruleNotFoundError); !ok {
|
|
||||||
return label.NoLabel, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
return l, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// As a fallback, guess the label based on the proto file name. We assume
|
|
||||||
// all proto files in a directory belong to the same package, and the
|
|
||||||
// package name matches the directory base name. We also assume that protos
|
|
||||||
// in the vendor directory must refer to something else in vendor.
|
|
||||||
rel := path.Dir(imp)
|
|
||||||
if rel == "." {
|
|
||||||
rel = ""
|
|
||||||
}
|
|
||||||
if from.Pkg == "vendor" || strings.HasPrefix(from.Pkg, "vendor/") {
|
|
||||||
rel = path.Join("vendor", rel)
|
|
||||||
}
|
|
||||||
return r.l.LibraryLabel(rel), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getEmbedsGo(call *bf.CallExpr, from label.Label) []label.Label {
|
|
||||||
rule := bf.Rule{Call: call}
|
|
||||||
embedStrings := rule.AttrStrings("embed")
|
|
||||||
embedLabels := make([]label.Label, 0, len(embedStrings))
|
|
||||||
for _, s := range embedStrings {
|
|
||||||
l, err := label.Parse(s)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
l = l.Abs(from.Repo, from.Pkg)
|
|
||||||
embedLabels = append(embedLabels, l)
|
|
||||||
}
|
|
||||||
return embedLabels
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsStandard returns whether a package is in the standard library.
|
|
||||||
func IsStandard(imp string) bool {
|
|
||||||
return stdPackages[imp]
|
|
||||||
}
|
|
||||||
|
|
||||||
func isWellKnownProto(imp string) bool {
|
|
||||||
return pathtools.HasPrefix(imp, config.WellKnownTypesProtoPrefix) && pathtools.TrimPrefix(imp, config.WellKnownTypesProtoPrefix) == path.Base(imp)
|
|
||||||
}
|
|
||||||
|
|
||||||
func resolveWellKnownGo(imp string) label.Label {
|
|
||||||
// keep in sync with @io_bazel_rules_go//proto/wkt:well_known_types.bzl
|
|
||||||
// TODO(jayconrod): in well_known_types.bzl, write the import paths and
|
|
||||||
// targets in a public dict. Import it here, and use it to generate this code.
|
|
||||||
switch imp {
|
|
||||||
case "github.com/golang/protobuf/ptypes/any",
|
|
||||||
"github.com/golang/protobuf/ptypes/api",
|
|
||||||
"github.com/golang/protobuf/protoc-gen-go/descriptor",
|
|
||||||
"github.com/golang/protobuf/ptypes/duration",
|
|
||||||
"github.com/golang/protobuf/ptypes/empty",
|
|
||||||
"google.golang.org/genproto/protobuf/field_mask",
|
|
||||||
"google.golang.org/genproto/protobuf/source_context",
|
|
||||||
"github.com/golang/protobuf/ptypes/struct",
|
|
||||||
"github.com/golang/protobuf/ptypes/timestamp",
|
|
||||||
"github.com/golang/protobuf/ptypes/wrappers":
|
|
||||||
return label.Label{
|
|
||||||
Repo: config.RulesGoRepoName,
|
|
||||||
Pkg: config.WellKnownTypesPkg,
|
|
||||||
Name: path.Base(imp) + "_go_proto",
|
|
||||||
}
|
|
||||||
case "github.com/golang/protobuf/protoc-gen-go/plugin":
|
|
||||||
return label.Label{
|
|
||||||
Repo: config.RulesGoRepoName,
|
|
||||||
Pkg: config.WellKnownTypesPkg,
|
|
||||||
Name: "compiler_plugin_go_proto",
|
|
||||||
}
|
|
||||||
case "google.golang.org/genproto/protobuf/ptype":
|
|
||||||
return label.Label{
|
|
||||||
Repo: config.RulesGoRepoName,
|
|
||||||
Pkg: config.WellKnownTypesPkg,
|
|
||||||
Name: "type_go_proto",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return label.NoLabel
|
|
||||||
}
|
|
||||||
|
|
||||||
func isWellKnownGo(imp string) bool {
|
|
||||||
prefix := config.WellKnownTypesGoPrefix + "/ptypes/"
|
|
||||||
return strings.HasPrefix(imp, prefix) && strings.TrimPrefix(imp, prefix) == path.Base(imp)
|
|
||||||
}
|
|
||||||
59
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve_external.go
generated
vendored
59
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve_external.go
generated
vendored
@@ -1,59 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package resolve
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/repos"
|
|
||||||
)
|
|
||||||
|
|
||||||
// externalResolver resolves import paths to external repositories. It uses
|
|
||||||
// vcs to determine the prefix of the import path that corresponds to the root
|
|
||||||
// of the repository (this will perform a network fetch for unqualified paths).
|
|
||||||
// The prefix is converted to a Bazel external name repo according to the
|
|
||||||
// guidelines in http://bazel.io/docs/be/functions.html#workspace. The remaining
|
|
||||||
// portion of the import path is treated as the package name.
|
|
||||||
type externalResolver struct {
|
|
||||||
l *label.Labeler
|
|
||||||
rc *repos.RemoteCache
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ nonlocalResolver = (*externalResolver)(nil)
|
|
||||||
|
|
||||||
func newExternalResolver(l *label.Labeler, rc *repos.RemoteCache) *externalResolver {
|
|
||||||
return &externalResolver{l: l, rc: rc}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Resolve resolves "importPath" into a label, assuming that it is a label in an
|
|
||||||
// external repository. It also assumes that the external repository follows the
|
|
||||||
// recommended reverse-DNS form of workspace name as described in
|
|
||||||
// http://bazel.io/docs/be/functions.html#workspace.
|
|
||||||
func (r *externalResolver) resolve(importPath string) (label.Label, error) {
|
|
||||||
prefix, repo, err := r.rc.Root(importPath)
|
|
||||||
if err != nil {
|
|
||||||
return label.NoLabel, err
|
|
||||||
}
|
|
||||||
|
|
||||||
var pkg string
|
|
||||||
if importPath != prefix {
|
|
||||||
pkg = pathtools.TrimPrefix(importPath, prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
l := r.l.LibraryLabel(pkg)
|
|
||||||
l.Repo = repo
|
|
||||||
return l, nil
|
|
||||||
}
|
|
||||||
35
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve_vendored.go
generated
vendored
35
vendor/github.com/bazelbuild/bazel-gazelle/internal/resolve/resolve_vendored.go
generated
vendored
@@ -1,35 +0,0 @@
|
|||||||
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package resolve
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
|
||||||
)
|
|
||||||
|
|
||||||
// vendoredResolver resolves external packages as packages in vendor/.
|
|
||||||
type vendoredResolver struct {
|
|
||||||
l *label.Labeler
|
|
||||||
}
|
|
||||||
|
|
||||||
var _ nonlocalResolver = (*vendoredResolver)(nil)
|
|
||||||
|
|
||||||
func newVendoredResolver(l *label.Labeler) *vendoredResolver {
|
|
||||||
return &vendoredResolver{l}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (v *vendoredResolver) resolve(importpath string) (label.Label, error) {
|
|
||||||
return v.l.LibraryLabel("vendor/" + importpath), nil
|
|
||||||
}
|
|
||||||
@@ -3,19 +3,21 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
|||||||
go_library(
|
go_library(
|
||||||
name = "go_default_library",
|
name = "go_default_library",
|
||||||
srcs = [
|
srcs = [
|
||||||
"construct.go",
|
"directives.go",
|
||||||
"doc.go",
|
"expr.go",
|
||||||
"generator.go",
|
"merge.go",
|
||||||
|
"platform.go",
|
||||||
|
"platform_strings.go",
|
||||||
|
"rule.go",
|
||||||
"sort_labels.go",
|
"sort_labels.go",
|
||||||
|
"types.go",
|
||||||
|
"value.go",
|
||||||
],
|
],
|
||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/generator",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/rule",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/generator",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/rule",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/label:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/packages:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/tables:go_default_library",
|
"//vendor/github.com/bazelbuild/buildtools/tables:go_default_library",
|
||||||
],
|
],
|
||||||
64
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/directives.go
generated
vendored
Normal file
64
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/directives.go
generated
vendored
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Directive is a key-value pair extracted from a top-level comment in
|
||||||
|
// a build file. Directives have the following format:
|
||||||
|
//
|
||||||
|
// # gazelle:key value
|
||||||
|
//
|
||||||
|
// Keys may not contain spaces. Values may be empty and may contain spaces,
|
||||||
|
// but surrounding space is trimmed.
|
||||||
|
type Directive struct {
|
||||||
|
Key, Value string
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO(jayconrod): annotation directives will apply to an individual rule.
|
||||||
|
// They must appear in the block of comments above that rule.
|
||||||
|
|
||||||
|
// ParseDirectives scans f for Gazelle directives. The full list of directives
|
||||||
|
// is returned. Errors are reported for unrecognized directives and directives
|
||||||
|
// out of place (after the first statement).
|
||||||
|
func ParseDirectives(f *bzl.File) []Directive {
|
||||||
|
var directives []Directive
|
||||||
|
parseComment := func(com bzl.Comment) {
|
||||||
|
match := directiveRe.FindStringSubmatch(com.Token)
|
||||||
|
if match == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
key, value := match[1], match[2]
|
||||||
|
directives = append(directives, Directive{key, value})
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, s := range f.Stmt {
|
||||||
|
coms := s.Comment()
|
||||||
|
for _, com := range coms.Before {
|
||||||
|
parseComment(com)
|
||||||
|
}
|
||||||
|
for _, com := range coms.After {
|
||||||
|
parseComment(com)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return directives
|
||||||
|
}
|
||||||
|
|
||||||
|
var directiveRe = regexp.MustCompile(`^#\s*gazelle:(\w+)\s*(.*?)\s*$`)
|
||||||
354
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/expr.go
generated
vendored
Normal file
354
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/expr.go
generated
vendored
Normal file
@@ -0,0 +1,354 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/label"
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MapExprStrings applies a function to string sub-expressions within e.
|
||||||
|
// An expression containing the results with the same structure as e is
|
||||||
|
// returned.
|
||||||
|
func MapExprStrings(e bzl.Expr, f func(string) string) bzl.Expr {
|
||||||
|
if e == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
switch expr := e.(type) {
|
||||||
|
case *bzl.StringExpr:
|
||||||
|
s := f(expr.Value)
|
||||||
|
if s == "" {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ret := *expr
|
||||||
|
ret.Value = s
|
||||||
|
return &ret
|
||||||
|
|
||||||
|
case *bzl.ListExpr:
|
||||||
|
var list []bzl.Expr
|
||||||
|
for _, elem := range expr.List {
|
||||||
|
elem = MapExprStrings(elem, f)
|
||||||
|
if elem != nil {
|
||||||
|
list = append(list, elem)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(list) == 0 && len(expr.List) > 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ret := *expr
|
||||||
|
ret.List = list
|
||||||
|
return &ret
|
||||||
|
|
||||||
|
case *bzl.DictExpr:
|
||||||
|
var cases []bzl.Expr
|
||||||
|
isEmpty := true
|
||||||
|
for _, kv := range expr.List {
|
||||||
|
keyval, ok := kv.(*bzl.KeyValueExpr)
|
||||||
|
if !ok {
|
||||||
|
log.Panicf("unexpected expression in generated imports dict: %#v", kv)
|
||||||
|
}
|
||||||
|
value := MapExprStrings(keyval.Value, f)
|
||||||
|
if value != nil {
|
||||||
|
cases = append(cases, &bzl.KeyValueExpr{Key: keyval.Key, Value: value})
|
||||||
|
if key, ok := keyval.Key.(*bzl.StringExpr); !ok || key.Value != "//conditions:default" {
|
||||||
|
isEmpty = false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if isEmpty {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
ret := *expr
|
||||||
|
ret.List = cases
|
||||||
|
return &ret
|
||||||
|
|
||||||
|
case *bzl.CallExpr:
|
||||||
|
if x, ok := expr.X.(*bzl.LiteralExpr); !ok || x.Token != "select" || len(expr.List) != 1 {
|
||||||
|
log.Panicf("unexpected call expression in generated imports: %#v", e)
|
||||||
|
}
|
||||||
|
arg := MapExprStrings(expr.List[0], f)
|
||||||
|
if arg == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
call := *expr
|
||||||
|
call.List[0] = arg
|
||||||
|
return &call
|
||||||
|
|
||||||
|
case *bzl.BinaryExpr:
|
||||||
|
x := MapExprStrings(expr.X, f)
|
||||||
|
y := MapExprStrings(expr.Y, f)
|
||||||
|
if x == nil {
|
||||||
|
return y
|
||||||
|
}
|
||||||
|
if y == nil {
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
binop := *expr
|
||||||
|
binop.X = x
|
||||||
|
binop.Y = y
|
||||||
|
return &binop
|
||||||
|
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// FlattenExpr takes an expression that may have been generated from
|
||||||
|
// PlatformStrings and returns its values in a flat, sorted, de-duplicated
|
||||||
|
// list. Comments are accumulated and de-duplicated across duplicate
|
||||||
|
// expressions. If the expression could not have been generted by
|
||||||
|
// PlatformStrings, the expression will be returned unmodified.
|
||||||
|
func FlattenExpr(e bzl.Expr) bzl.Expr {
|
||||||
|
ps, err := extractPlatformStringsExprs(e)
|
||||||
|
if err != nil {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
ls := makeListSquasher()
|
||||||
|
addElem := func(e bzl.Expr) bool {
|
||||||
|
s, ok := e.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
ls.add(s)
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
addList := func(e bzl.Expr) bool {
|
||||||
|
l, ok := e.(*bzl.ListExpr)
|
||||||
|
if !ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for _, elem := range l.List {
|
||||||
|
if !addElem(elem) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
addDict := func(d *bzl.DictExpr) bool {
|
||||||
|
for _, kv := range d.List {
|
||||||
|
if !addList(kv.(*bzl.KeyValueExpr).Value) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
if ps.generic != nil {
|
||||||
|
if !addList(ps.generic) {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, d := range []*bzl.DictExpr{ps.os, ps.arch, ps.platform} {
|
||||||
|
if d == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !addDict(d) {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return ls.list()
|
||||||
|
}
|
||||||
|
|
||||||
|
func isScalar(e bzl.Expr) bool {
|
||||||
|
switch e.(type) {
|
||||||
|
case *bzl.StringExpr, *bzl.LiteralExpr:
|
||||||
|
return true
|
||||||
|
default:
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func dictEntryKeyValue(e bzl.Expr) (string, *bzl.ListExpr, error) {
|
||||||
|
kv, ok := e.(*bzl.KeyValueExpr)
|
||||||
|
if !ok {
|
||||||
|
return "", nil, fmt.Errorf("dict entry was not a key-value pair: %#v", e)
|
||||||
|
}
|
||||||
|
k, ok := kv.Key.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return "", nil, fmt.Errorf("dict key was not string: %#v", kv.Key)
|
||||||
|
}
|
||||||
|
v, ok := kv.Value.(*bzl.ListExpr)
|
||||||
|
if !ok {
|
||||||
|
return "", nil, fmt.Errorf("dict value was not list: %#v", kv.Value)
|
||||||
|
}
|
||||||
|
return k.Value, v, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func stringValue(e bzl.Expr) string {
|
||||||
|
s, ok := e.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return s.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
// platformStringsExprs is a set of sub-expressions that match the structure
|
||||||
|
// of package.PlatformStrings. ExprFromValue produces expressions that
|
||||||
|
// follow this structure for srcs, deps, and other attributes, so this matches
|
||||||
|
// all non-scalar expressions generated by Gazelle.
|
||||||
|
//
|
||||||
|
// The matched expression has the form:
|
||||||
|
//
|
||||||
|
// [] + select({}) + select({}) + select({})
|
||||||
|
//
|
||||||
|
// The four collections may appear in any order, and some or all of them may
|
||||||
|
// be omitted (all fields are nil for a nil expression).
|
||||||
|
type platformStringsExprs struct {
|
||||||
|
generic *bzl.ListExpr
|
||||||
|
os, arch, platform *bzl.DictExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// extractPlatformStringsExprs matches an expression and attempts to extract
|
||||||
|
// sub-expressions in platformStringsExprs. The sub-expressions can then be
|
||||||
|
// merged with corresponding sub-expressions. Any field in the returned
|
||||||
|
// structure may be nil. An error is returned if the given expression does
|
||||||
|
// not follow the pattern described by platformStringsExprs.
|
||||||
|
func extractPlatformStringsExprs(expr bzl.Expr) (platformStringsExprs, error) {
|
||||||
|
var ps platformStringsExprs
|
||||||
|
if expr == nil {
|
||||||
|
return ps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Break the expression into a sequence of expressions combined with +.
|
||||||
|
var parts []bzl.Expr
|
||||||
|
for {
|
||||||
|
binop, ok := expr.(*bzl.BinaryExpr)
|
||||||
|
if !ok {
|
||||||
|
parts = append(parts, expr)
|
||||||
|
break
|
||||||
|
}
|
||||||
|
parts = append(parts, binop.Y)
|
||||||
|
expr = binop.X
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each part. They may be in any order.
|
||||||
|
for _, part := range parts {
|
||||||
|
switch part := part.(type) {
|
||||||
|
case *bzl.ListExpr:
|
||||||
|
if ps.generic != nil {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple list expressions")
|
||||||
|
}
|
||||||
|
ps.generic = part
|
||||||
|
|
||||||
|
case *bzl.CallExpr:
|
||||||
|
x, ok := part.X.(*bzl.LiteralExpr)
|
||||||
|
if !ok || x.Token != "select" || len(part.List) != 1 {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: callee other than select or wrong number of args")
|
||||||
|
}
|
||||||
|
arg, ok := part.List[0].(*bzl.DictExpr)
|
||||||
|
if !ok {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: select argument not dict")
|
||||||
|
}
|
||||||
|
var dict **bzl.DictExpr
|
||||||
|
for _, item := range arg.List {
|
||||||
|
kv := item.(*bzl.KeyValueExpr) // parser guarantees this
|
||||||
|
k, ok := kv.Key.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict keys are not all strings")
|
||||||
|
}
|
||||||
|
if k.Value == "//conditions:default" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
key, err := label.Parse(k.Value)
|
||||||
|
if err != nil {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key is not label: %q", k.Value)
|
||||||
|
}
|
||||||
|
if KnownOSSet[key.Name] {
|
||||||
|
dict = &ps.os
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if KnownArchSet[key.Name] {
|
||||||
|
dict = &ps.arch
|
||||||
|
break
|
||||||
|
}
|
||||||
|
osArch := strings.Split(key.Name, "_")
|
||||||
|
if len(osArch) != 2 || !KnownOSSet[osArch[0]] || !KnownArchSet[osArch[1]] {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: dict key contains unknown platform: %q", k.Value)
|
||||||
|
}
|
||||||
|
dict = &ps.platform
|
||||||
|
break
|
||||||
|
}
|
||||||
|
if dict == nil {
|
||||||
|
// We could not identify the dict because it's empty or only contains
|
||||||
|
// //conditions:default. We'll call it the platform dict to avoid
|
||||||
|
// dropping it.
|
||||||
|
dict = &ps.platform
|
||||||
|
}
|
||||||
|
if *dict != nil {
|
||||||
|
return platformStringsExprs{}, fmt.Errorf("expression could not be matched: multiple selects that are either os-specific, arch-specific, or platform-specific")
|
||||||
|
}
|
||||||
|
*dict = arg
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// makePlatformStringsExpr constructs a single expression from the
|
||||||
|
// sub-expressions in ps.
|
||||||
|
func makePlatformStringsExpr(ps platformStringsExprs) bzl.Expr {
|
||||||
|
makeSelect := func(dict *bzl.DictExpr) bzl.Expr {
|
||||||
|
return &bzl.CallExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: "select"},
|
||||||
|
List: []bzl.Expr{dict},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
forceMultiline := func(e bzl.Expr) {
|
||||||
|
switch e := e.(type) {
|
||||||
|
case *bzl.ListExpr:
|
||||||
|
e.ForceMultiLine = true
|
||||||
|
case *bzl.CallExpr:
|
||||||
|
e.List[0].(*bzl.DictExpr).ForceMultiLine = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var parts []bzl.Expr
|
||||||
|
if ps.generic != nil {
|
||||||
|
parts = append(parts, ps.generic)
|
||||||
|
}
|
||||||
|
if ps.os != nil {
|
||||||
|
parts = append(parts, makeSelect(ps.os))
|
||||||
|
}
|
||||||
|
if ps.arch != nil {
|
||||||
|
parts = append(parts, makeSelect(ps.arch))
|
||||||
|
}
|
||||||
|
if ps.platform != nil {
|
||||||
|
parts = append(parts, makeSelect(ps.platform))
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(parts) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if len(parts) == 1 {
|
||||||
|
return parts[0]
|
||||||
|
}
|
||||||
|
expr := parts[0]
|
||||||
|
forceMultiline(expr)
|
||||||
|
for _, part := range parts[1:] {
|
||||||
|
forceMultiline(part)
|
||||||
|
expr = &bzl.BinaryExpr{
|
||||||
|
Op: "+",
|
||||||
|
X: expr,
|
||||||
|
Y: part,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return expr
|
||||||
|
}
|
||||||
489
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/merge.go
generated
vendored
Normal file
489
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/merge.go
generated
vendored
Normal file
@@ -0,0 +1,489 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MergeRules copies information from src into dst, usually discarding
|
||||||
|
// information in dst when they have the same attributes.
|
||||||
|
//
|
||||||
|
// If dst is marked with a "# keep" comment, either above the rule or as
|
||||||
|
// a suffix, nothing will be changed.
|
||||||
|
//
|
||||||
|
// If src has an attribute that is not in dst, it will be copied into dst.
|
||||||
|
//
|
||||||
|
// If src and dst have the same attribute and the attribute is mergeable and the
|
||||||
|
// attribute in dst is not marked with a "# keep" comment, values in the dst
|
||||||
|
// attribute not marked with a "# keep" comment will be dropped, and values from
|
||||||
|
// src will be copied in.
|
||||||
|
//
|
||||||
|
// If dst has an attribute not in src, and the attribute is mergeable and not
|
||||||
|
// marked with a "# keep" comment, values in the attribute not marked with
|
||||||
|
// a "# keep" comment will be dropped. If the attribute is empty afterward,
|
||||||
|
// it will be deleted.
|
||||||
|
func MergeRules(src, dst *Rule, mergeable map[string]bool, filename string) {
|
||||||
|
if dst.ShouldKeep() {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process attributes that are in dst but not in src.
|
||||||
|
for key, dstAttr := range dst.attrs {
|
||||||
|
if _, ok := src.attrs[key]; ok || !mergeable[key] || ShouldKeep(dstAttr) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
dstValue := dstAttr.Y
|
||||||
|
if mergedValue, err := mergeExprs(nil, dstValue); err != nil {
|
||||||
|
start, end := dstValue.Span()
|
||||||
|
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
|
||||||
|
} else if mergedValue == nil {
|
||||||
|
dst.DelAttr(key)
|
||||||
|
} else {
|
||||||
|
dst.SetAttr(key, mergedValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge attributes from src into dst.
|
||||||
|
for key, srcAttr := range src.attrs {
|
||||||
|
srcValue := srcAttr.Y
|
||||||
|
if dstAttr, ok := dst.attrs[key]; !ok {
|
||||||
|
dst.SetAttr(key, srcValue)
|
||||||
|
} else if mergeable[key] && !ShouldKeep(dstAttr) {
|
||||||
|
dstValue := dstAttr.Y
|
||||||
|
if mergedValue, err := mergeExprs(srcValue, dstValue); err != nil {
|
||||||
|
start, end := dstValue.Span()
|
||||||
|
log.Printf("%s:%d.%d-%d.%d: could not merge expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
|
||||||
|
} else {
|
||||||
|
dst.SetAttr(key, mergedValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// mergeExprs combines information from src and dst and returns a merged
|
||||||
|
// expression. dst may be modified during this process. The returned expression
|
||||||
|
// may be different from dst when a structural change is needed.
|
||||||
|
//
|
||||||
|
// The following kinds of expressions are recognized.
|
||||||
|
//
|
||||||
|
// * nil
|
||||||
|
// * strings (can only be merged with strings)
|
||||||
|
// * lists of strings
|
||||||
|
// * a call to select with a dict argument. The dict keys must be strings,
|
||||||
|
// and the values must be lists of strings.
|
||||||
|
// * a list of strings combined with a select call using +. The list must
|
||||||
|
// be the left operand.
|
||||||
|
//
|
||||||
|
// An error is returned if the expressions can't be merged, for example
|
||||||
|
// because they are not in one of the above formats.
|
||||||
|
func mergeExprs(src, dst bzl.Expr) (bzl.Expr, error) {
|
||||||
|
if ShouldKeep(dst) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if src == nil && (dst == nil || isScalar(dst)) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if isScalar(src) {
|
||||||
|
return src, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
srcExprs, err := extractPlatformStringsExprs(src)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dstExprs, err := extractPlatformStringsExprs(dst)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mergedExprs, err := mergePlatformStringsExprs(srcExprs, dstExprs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return makePlatformStringsExpr(mergedExprs), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergePlatformStringsExprs(src, dst platformStringsExprs) (platformStringsExprs, error) {
|
||||||
|
var ps platformStringsExprs
|
||||||
|
var err error
|
||||||
|
ps.generic = mergeList(src.generic, dst.generic)
|
||||||
|
if ps.os, err = mergeDict(src.os, dst.os); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
if ps.arch, err = mergeDict(src.arch, dst.arch); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
if ps.platform, err = mergeDict(src.platform, dst.platform); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
return ps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeList(src, dst *bzl.ListExpr) *bzl.ListExpr {
|
||||||
|
if dst == nil {
|
||||||
|
return src
|
||||||
|
}
|
||||||
|
if src == nil {
|
||||||
|
src = &bzl.ListExpr{List: []bzl.Expr{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build a list of strings from the src list and keep matching strings
|
||||||
|
// in the dst list. This preserves comments. Also keep anything with
|
||||||
|
// a "# keep" comment, whether or not it's in the src list.
|
||||||
|
srcSet := make(map[string]bool)
|
||||||
|
for _, v := range src.List {
|
||||||
|
if s := stringValue(v); s != "" {
|
||||||
|
srcSet[s] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var merged []bzl.Expr
|
||||||
|
kept := make(map[string]bool)
|
||||||
|
keepComment := false
|
||||||
|
for _, v := range dst.List {
|
||||||
|
s := stringValue(v)
|
||||||
|
if keep := ShouldKeep(v); keep || srcSet[s] {
|
||||||
|
keepComment = keepComment || keep
|
||||||
|
merged = append(merged, v)
|
||||||
|
if s != "" {
|
||||||
|
kept[s] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add anything in the src list that wasn't kept.
|
||||||
|
for _, v := range src.List {
|
||||||
|
if s := stringValue(v); kept[s] {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
merged = append(merged, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(merged) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return &bzl.ListExpr{
|
||||||
|
List: merged,
|
||||||
|
ForceMultiLine: src.ForceMultiLine || dst.ForceMultiLine || keepComment,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func mergeDict(src, dst *bzl.DictExpr) (*bzl.DictExpr, error) {
|
||||||
|
if dst == nil {
|
||||||
|
return src, nil
|
||||||
|
}
|
||||||
|
if src == nil {
|
||||||
|
src = &bzl.DictExpr{List: []bzl.Expr{}}
|
||||||
|
}
|
||||||
|
|
||||||
|
var entries []*dictEntry
|
||||||
|
entryMap := make(map[string]*dictEntry)
|
||||||
|
|
||||||
|
for _, kv := range dst.List {
|
||||||
|
k, v, err := dictEntryKeyValue(kv)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if _, ok := entryMap[k]; ok {
|
||||||
|
return nil, fmt.Errorf("dst dict contains more than one case named %q", k)
|
||||||
|
}
|
||||||
|
e := &dictEntry{key: k, dstValue: v}
|
||||||
|
entries = append(entries, e)
|
||||||
|
entryMap[k] = e
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, kv := range src.List {
|
||||||
|
k, v, err := dictEntryKeyValue(kv)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
e, ok := entryMap[k]
|
||||||
|
if !ok {
|
||||||
|
e = &dictEntry{key: k}
|
||||||
|
entries = append(entries, e)
|
||||||
|
entryMap[k] = e
|
||||||
|
}
|
||||||
|
e.srcValue = v
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := make([]string, 0, len(entries))
|
||||||
|
haveDefault := false
|
||||||
|
for _, e := range entries {
|
||||||
|
e.mergedValue = mergeList(e.srcValue, e.dstValue)
|
||||||
|
if e.key == "//conditions:default" {
|
||||||
|
// Keep the default case, even if it's empty.
|
||||||
|
haveDefault = true
|
||||||
|
if e.mergedValue == nil {
|
||||||
|
e.mergedValue = &bzl.ListExpr{}
|
||||||
|
}
|
||||||
|
} else if e.mergedValue != nil {
|
||||||
|
keys = append(keys, e.key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(keys) == 0 && (!haveDefault || len(entryMap["//conditions:default"].mergedValue.List) == 0) {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
// Always put the default case last.
|
||||||
|
if haveDefault {
|
||||||
|
keys = append(keys, "//conditions:default")
|
||||||
|
}
|
||||||
|
|
||||||
|
mergedEntries := make([]bzl.Expr, len(keys))
|
||||||
|
for i, k := range keys {
|
||||||
|
e := entryMap[k]
|
||||||
|
mergedEntries[i] = &bzl.KeyValueExpr{
|
||||||
|
Key: &bzl.StringExpr{Value: e.key},
|
||||||
|
Value: e.mergedValue,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &bzl.DictExpr{List: mergedEntries, ForceMultiLine: true}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type dictEntry struct {
|
||||||
|
key string
|
||||||
|
dstValue, srcValue, mergedValue *bzl.ListExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// SquashRules copies information from src into dst without discarding
|
||||||
|
// information in dst. SquashRules detects duplicate elements in lists and
|
||||||
|
// dictionaries, but it doesn't sort elements after squashing. If squashing
|
||||||
|
// fails because the expression is not understood, an error is returned,
|
||||||
|
// and neither rule is modified.
|
||||||
|
func SquashRules(src, dst *Rule, filename string) error {
|
||||||
|
if dst.ShouldKeep() {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for key, srcAttr := range src.attrs {
|
||||||
|
srcValue := srcAttr.Y
|
||||||
|
if dstAttr, ok := dst.attrs[key]; !ok {
|
||||||
|
dst.SetAttr(key, srcValue)
|
||||||
|
} else if !ShouldKeep(dstAttr) {
|
||||||
|
dstValue := dstAttr.Y
|
||||||
|
if squashedValue, err := squashExprs(srcValue, dstValue); err != nil {
|
||||||
|
start, end := dstValue.Span()
|
||||||
|
return fmt.Errorf("%s:%d.%d-%d.%d: could not squash expression", filename, start.Line, start.LineRune, end.Line, end.LineRune)
|
||||||
|
} else {
|
||||||
|
dst.SetAttr(key, squashedValue)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dst.call.Comments.Before = append(dst.call.Comments.Before, src.call.Comments.Before...)
|
||||||
|
dst.call.Comments.Suffix = append(dst.call.Comments.Suffix, src.call.Comments.Suffix...)
|
||||||
|
dst.call.Comments.After = append(dst.call.Comments.After, src.call.Comments.After...)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func squashExprs(src, dst bzl.Expr) (bzl.Expr, error) {
|
||||||
|
if ShouldKeep(dst) {
|
||||||
|
return dst, nil
|
||||||
|
}
|
||||||
|
if isScalar(dst) {
|
||||||
|
// may lose src, but they should always be the same.
|
||||||
|
return dst, nil
|
||||||
|
}
|
||||||
|
srcExprs, err := extractPlatformStringsExprs(src)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
dstExprs, err := extractPlatformStringsExprs(dst)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
squashedExprs, err := squashPlatformStringsExprs(srcExprs, dstExprs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return makePlatformStringsExpr(squashedExprs), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func squashPlatformStringsExprs(x, y platformStringsExprs) (platformStringsExprs, error) {
|
||||||
|
var ps platformStringsExprs
|
||||||
|
var err error
|
||||||
|
if ps.generic, err = squashList(x.generic, y.generic); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
if ps.os, err = squashDict(x.os, y.os); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
if ps.arch, err = squashDict(x.arch, y.arch); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
if ps.platform, err = squashDict(x.platform, y.platform); err != nil {
|
||||||
|
return platformStringsExprs{}, err
|
||||||
|
}
|
||||||
|
return ps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func squashList(x, y *bzl.ListExpr) (*bzl.ListExpr, error) {
|
||||||
|
if x == nil {
|
||||||
|
return y, nil
|
||||||
|
}
|
||||||
|
if y == nil {
|
||||||
|
return x, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
ls := makeListSquasher()
|
||||||
|
for _, e := range x.List {
|
||||||
|
s, ok := e.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("could not squash non-string")
|
||||||
|
}
|
||||||
|
ls.add(s)
|
||||||
|
}
|
||||||
|
for _, e := range y.List {
|
||||||
|
s, ok := e.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil, errors.New("could not squash non-string")
|
||||||
|
}
|
||||||
|
ls.add(s)
|
||||||
|
}
|
||||||
|
squashed := ls.list()
|
||||||
|
squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...)
|
||||||
|
squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...)
|
||||||
|
squashed.Comments.After = append(x.Comments.After, y.Comments.After...)
|
||||||
|
return squashed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func squashDict(x, y *bzl.DictExpr) (*bzl.DictExpr, error) {
|
||||||
|
if x == nil {
|
||||||
|
return y, nil
|
||||||
|
}
|
||||||
|
if y == nil {
|
||||||
|
return x, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
cases := make(map[string]*bzl.KeyValueExpr)
|
||||||
|
addCase := func(e bzl.Expr) error {
|
||||||
|
kv := e.(*bzl.KeyValueExpr)
|
||||||
|
key, ok := kv.Key.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return errors.New("could not squash non-string dict key")
|
||||||
|
}
|
||||||
|
if _, ok := kv.Value.(*bzl.ListExpr); !ok {
|
||||||
|
return errors.New("could not squash non-list dict value")
|
||||||
|
}
|
||||||
|
if c, ok := cases[key.Value]; ok {
|
||||||
|
if sq, err := squashList(kv.Value.(*bzl.ListExpr), c.Value.(*bzl.ListExpr)); err != nil {
|
||||||
|
return err
|
||||||
|
} else {
|
||||||
|
c.Value = sq
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
kvCopy := *kv
|
||||||
|
cases[key.Value] = &kvCopy
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, e := range x.List {
|
||||||
|
if err := addCase(e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, e := range y.List {
|
||||||
|
if err := addCase(e); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
keys := make([]string, 0, len(cases))
|
||||||
|
haveDefault := false
|
||||||
|
for k := range cases {
|
||||||
|
if k == "//conditions:default" {
|
||||||
|
haveDefault = true
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
if haveDefault {
|
||||||
|
keys = append(keys, "//conditions:default") // must be last
|
||||||
|
}
|
||||||
|
|
||||||
|
squashed := *x
|
||||||
|
squashed.Comments.Before = append(x.Comments.Before, y.Comments.Before...)
|
||||||
|
squashed.Comments.Suffix = append(x.Comments.Suffix, y.Comments.Suffix...)
|
||||||
|
squashed.Comments.After = append(x.Comments.After, y.Comments.After...)
|
||||||
|
squashed.List = make([]bzl.Expr, 0, len(cases))
|
||||||
|
for _, k := range keys {
|
||||||
|
squashed.List = append(squashed.List, cases[k])
|
||||||
|
}
|
||||||
|
return &squashed, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// listSquasher builds a sorted, deduplicated list of string expressions. If
|
||||||
|
// a string expression is added multiple times, comments are consolidated.
|
||||||
|
// The original expressions are not modified.
|
||||||
|
type listSquasher struct {
|
||||||
|
unique map[string]*bzl.StringExpr
|
||||||
|
seenComments map[elemComment]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
type elemComment struct {
|
||||||
|
elem, com string
|
||||||
|
}
|
||||||
|
|
||||||
|
func makeListSquasher() listSquasher {
|
||||||
|
return listSquasher{
|
||||||
|
unique: make(map[string]*bzl.StringExpr),
|
||||||
|
seenComments: make(map[elemComment]bool),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ls *listSquasher) add(s *bzl.StringExpr) {
|
||||||
|
sCopy, ok := ls.unique[s.Value]
|
||||||
|
if !ok {
|
||||||
|
// Make a copy of s. We may modify it when we consolidate comments from
|
||||||
|
// duplicate strings. We don't want to modify the original in case this
|
||||||
|
// function fails (due to a later failed pattern match).
|
||||||
|
sCopy = new(bzl.StringExpr)
|
||||||
|
*sCopy = *s
|
||||||
|
sCopy.Comments.Before = make([]bzl.Comment, 0, len(s.Comments.Before))
|
||||||
|
sCopy.Comments.Suffix = make([]bzl.Comment, 0, len(s.Comments.Suffix))
|
||||||
|
ls.unique[s.Value] = sCopy
|
||||||
|
}
|
||||||
|
for _, c := range s.Comment().Before {
|
||||||
|
if key := (elemComment{s.Value, c.Token}); !ls.seenComments[key] {
|
||||||
|
sCopy.Comments.Before = append(sCopy.Comments.Before, c)
|
||||||
|
ls.seenComments[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, c := range s.Comment().Suffix {
|
||||||
|
if key := (elemComment{s.Value, c.Token}); !ls.seenComments[key] {
|
||||||
|
sCopy.Comments.Suffix = append(sCopy.Comments.Suffix, c)
|
||||||
|
ls.seenComments[key] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ls *listSquasher) list() *bzl.ListExpr {
|
||||||
|
sortedExprs := make([]bzl.Expr, 0, len(ls.unique))
|
||||||
|
for _, e := range ls.unique {
|
||||||
|
sortedExprs = append(sortedExprs, e)
|
||||||
|
}
|
||||||
|
sort.Slice(sortedExprs, func(i, j int) bool {
|
||||||
|
return sortedExprs[i].(*bzl.StringExpr).Value < sortedExprs[j].(*bzl.StringExpr).Value
|
||||||
|
})
|
||||||
|
return &bzl.ListExpr{List: sortedExprs}
|
||||||
|
}
|
||||||
@@ -13,7 +13,7 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package config
|
package rule
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sort"
|
"sort"
|
||||||
192
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/platform_strings.go
generated
vendored
Normal file
192
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/platform_strings.go
generated
vendored
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
/* Copyright 2017 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PlatformStrings contains a set of strings associated with a buildable
|
||||||
|
// target in a package. This is used to store source file names,
|
||||||
|
// import paths, and flags.
|
||||||
|
//
|
||||||
|
// Strings are stored in four sets: generic strings, OS-specific strings,
|
||||||
|
// arch-specific strings, and OS-and-arch-specific strings. A string may not
|
||||||
|
// be duplicated within a list or across sets; however, a string may appear
|
||||||
|
// in more than one list within a set (e.g., in "linux" and "windows" within
|
||||||
|
// the OS set). Strings within each list should be sorted, though this may
|
||||||
|
// not be relied upon.
|
||||||
|
type PlatformStrings struct {
|
||||||
|
// Generic is a list of strings not specific to any platform.
|
||||||
|
Generic []string
|
||||||
|
|
||||||
|
// OS is a map from OS name (anything in KnownOSs) to
|
||||||
|
// OS-specific strings.
|
||||||
|
OS map[string][]string
|
||||||
|
|
||||||
|
// Arch is a map from architecture name (anything in KnownArchs) to
|
||||||
|
// architecture-specific strings.
|
||||||
|
Arch map[string][]string
|
||||||
|
|
||||||
|
// Platform is a map from platforms to OS and architecture-specific strings.
|
||||||
|
Platform map[Platform][]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// HasExt returns whether this set contains a file with the given extension.
|
||||||
|
func (ps *PlatformStrings) HasExt(ext string) bool {
|
||||||
|
return ps.firstExtFile(ext) != ""
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *PlatformStrings) IsEmpty() bool {
|
||||||
|
return len(ps.Generic) == 0 && len(ps.OS) == 0 && len(ps.Arch) == 0 && len(ps.Platform) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flat returns all the strings in the set, sorted and de-duplicated.
|
||||||
|
func (ps *PlatformStrings) Flat() []string {
|
||||||
|
unique := make(map[string]struct{})
|
||||||
|
for _, s := range ps.Generic {
|
||||||
|
unique[s] = struct{}{}
|
||||||
|
}
|
||||||
|
for _, ss := range ps.OS {
|
||||||
|
for _, s := range ss {
|
||||||
|
unique[s] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, ss := range ps.Arch {
|
||||||
|
for _, s := range ss {
|
||||||
|
unique[s] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, ss := range ps.Platform {
|
||||||
|
for _, s := range ss {
|
||||||
|
unique[s] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flat := make([]string, 0, len(unique))
|
||||||
|
for s := range unique {
|
||||||
|
flat = append(flat, s)
|
||||||
|
}
|
||||||
|
sort.Strings(flat)
|
||||||
|
return flat
|
||||||
|
}
|
||||||
|
|
||||||
|
func (ps *PlatformStrings) firstExtFile(ext string) string {
|
||||||
|
for _, f := range ps.Generic {
|
||||||
|
if strings.HasSuffix(f, ext) {
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, fs := range ps.OS {
|
||||||
|
for _, f := range fs {
|
||||||
|
if strings.HasSuffix(f, ext) {
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, fs := range ps.Arch {
|
||||||
|
for _, f := range fs {
|
||||||
|
if strings.HasSuffix(f, ext) {
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, fs := range ps.Platform {
|
||||||
|
for _, f := range fs {
|
||||||
|
if strings.HasSuffix(f, ext) {
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map applies a function that processes individual strings to the strings
|
||||||
|
// in "ps" and returns a new PlatformStrings with the result. Empty strings
|
||||||
|
// returned by the function are dropped.
|
||||||
|
func (ps *PlatformStrings) Map(f func(s string) (string, error)) (PlatformStrings, []error) {
|
||||||
|
var errors []error
|
||||||
|
mapSlice := func(ss []string) ([]string, error) {
|
||||||
|
rs := make([]string, 0, len(ss))
|
||||||
|
for _, s := range ss {
|
||||||
|
if r, err := f(s); err != nil {
|
||||||
|
errors = append(errors, err)
|
||||||
|
} else if r != "" {
|
||||||
|
rs = append(rs, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rs, nil
|
||||||
|
}
|
||||||
|
result, _ := ps.MapSlice(mapSlice)
|
||||||
|
return result, errors
|
||||||
|
}
|
||||||
|
|
||||||
|
// MapSlice applies a function that processes slices of strings to the strings
|
||||||
|
// in "ps" and returns a new PlatformStrings with the results.
|
||||||
|
func (ps *PlatformStrings) MapSlice(f func([]string) ([]string, error)) (PlatformStrings, []error) {
|
||||||
|
var errors []error
|
||||||
|
|
||||||
|
mapSlice := func(ss []string) []string {
|
||||||
|
rs, err := f(ss)
|
||||||
|
if err != nil {
|
||||||
|
errors = append(errors, err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return rs
|
||||||
|
}
|
||||||
|
|
||||||
|
mapStringMap := func(m map[string][]string) map[string][]string {
|
||||||
|
if m == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
rm := make(map[string][]string)
|
||||||
|
for k, ss := range m {
|
||||||
|
ss = mapSlice(ss)
|
||||||
|
if len(ss) > 0 {
|
||||||
|
rm[k] = ss
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(rm) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return rm
|
||||||
|
}
|
||||||
|
|
||||||
|
mapPlatformMap := func(m map[Platform][]string) map[Platform][]string {
|
||||||
|
if m == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
rm := make(map[Platform][]string)
|
||||||
|
for k, ss := range m {
|
||||||
|
ss = mapSlice(ss)
|
||||||
|
if len(ss) > 0 {
|
||||||
|
rm[k] = ss
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if len(rm) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return rm
|
||||||
|
}
|
||||||
|
|
||||||
|
result := PlatformStrings{
|
||||||
|
Generic: mapSlice(ps.Generic),
|
||||||
|
OS: mapStringMap(ps.OS),
|
||||||
|
Arch: mapStringMap(ps.Arch),
|
||||||
|
Platform: mapPlatformMap(ps.Platform),
|
||||||
|
}
|
||||||
|
return result, errors
|
||||||
|
}
|
||||||
701
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/rule.go
generated
vendored
Normal file
701
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/rule.go
generated
vendored
Normal file
@@ -0,0 +1,701 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Package rule provides tools for editing Bazel build files. It is intended to
|
||||||
|
// be a more powerful replacement for
|
||||||
|
// github.com/bazelbuild/buildtools/build.Rule, adapted for Gazelle's usage. It
|
||||||
|
// is language agnostic, but it may be used for language-specific rules by
|
||||||
|
// providing configuration.
|
||||||
|
//
|
||||||
|
// File is the primary interface to this package. Rule and Load are used to
|
||||||
|
// create, read, update, and delete rules. Once modifications are performed,
|
||||||
|
// File.Sync() may be called to write the changes back to the original AST,
|
||||||
|
// which may then be formatted and written back to a file.
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
bt "github.com/bazelbuild/buildtools/tables"
|
||||||
|
)
|
||||||
|
|
||||||
|
// File provides editing functionality on top of a Skylark syntax tree. This
|
||||||
|
// is the primary interface Gazelle uses for reading and updating build files.
|
||||||
|
// To use, create a new file with EmptyFile or wrap a syntax tree with
|
||||||
|
// LoadFile. Perform edits on Loads and Rules, then call Sync() to write
|
||||||
|
// changes back to the AST.
|
||||||
|
type File struct {
|
||||||
|
// File is the underlying build file syntax tree. Some editing operations
|
||||||
|
// may modify this, but editing is not complete until Sync() is called.
|
||||||
|
File *bzl.File
|
||||||
|
|
||||||
|
// Pkg is the Bazel package this build file defines.
|
||||||
|
Pkg string
|
||||||
|
|
||||||
|
// Path is the file system path to the build file (same as File.Path).
|
||||||
|
Path string
|
||||||
|
|
||||||
|
// Directives is a list of configuration directives found in top-level
|
||||||
|
// comments in the file. This should not be modified after the file is read.
|
||||||
|
Directives []Directive
|
||||||
|
|
||||||
|
// Loads is a list of load statements within the file. This should not
|
||||||
|
// be modified directly; use Load methods instead.
|
||||||
|
Loads []*Load
|
||||||
|
|
||||||
|
// Rules is a list of rules within the file (or function calls that look like
|
||||||
|
// rules). This should not be modified directly; use Rule methods instead.
|
||||||
|
Rules []*Rule
|
||||||
|
}
|
||||||
|
|
||||||
|
// EmptyFile creates a File wrapped around an empty syntax tree.
|
||||||
|
func EmptyFile(path, pkg string) *File {
|
||||||
|
return &File{
|
||||||
|
File: &bzl.File{Path: path},
|
||||||
|
Path: path,
|
||||||
|
Pkg: pkg,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadFile loads a build file from disk, parses it, and scans for rules and
|
||||||
|
// load statements. The syntax tree within the returned File will be modified
|
||||||
|
// by editing methods.
|
||||||
|
//
|
||||||
|
// This function returns I/O and parse errors without modification. It's safe
|
||||||
|
// to use os.IsNotExist and similar predicates.
|
||||||
|
func LoadFile(path, pkg string) (*File, error) {
|
||||||
|
data, err := ioutil.ReadFile(path)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return LoadData(path, pkg, data)
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadData parses a build file from a byte slice and scans it for rules and
|
||||||
|
// load statements. The syntax tree within the returned File will be modified
|
||||||
|
// by editing methods.
|
||||||
|
func LoadData(path, pkg string, data []byte) (*File, error) {
|
||||||
|
ast, err := bzl.Parse(path, data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return ScanAST(pkg, ast), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ScanAST creates a File wrapped around the given syntax tree. This tree
|
||||||
|
// will be modified by editing methods.
|
||||||
|
func ScanAST(pkg string, bzlFile *bzl.File) *File {
|
||||||
|
f := &File{
|
||||||
|
File: bzlFile,
|
||||||
|
Pkg: pkg,
|
||||||
|
Path: bzlFile.Path,
|
||||||
|
}
|
||||||
|
for i, stmt := range f.File.Stmt {
|
||||||
|
call, ok := stmt.(*bzl.CallExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
x, ok := call.X.(*bzl.LiteralExpr)
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if x.Token == "load" {
|
||||||
|
if l := loadFromExpr(i, call); l != nil {
|
||||||
|
f.Loads = append(f.Loads, l)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if r := ruleFromExpr(i, call); r != nil {
|
||||||
|
f.Rules = append(f.Rules, r)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
f.Directives = ParseDirectives(bzlFile)
|
||||||
|
return f
|
||||||
|
}
|
||||||
|
|
||||||
|
// MatchBuildFileName looks for a file in files that has a name from names.
|
||||||
|
// If there is at least one matching file, a path will be returned by joining
|
||||||
|
// dir and the first matching name. If there are no matching files, the
|
||||||
|
// empty string is returned.
|
||||||
|
func MatchBuildFileName(dir string, names []string, files []os.FileInfo) string {
|
||||||
|
for _, name := range names {
|
||||||
|
for _, fi := range files {
|
||||||
|
if fi.Name() == name && !fi.IsDir() {
|
||||||
|
return filepath.Join(dir, name)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sync writes all changes back to the wrapped syntax tree. This should be
|
||||||
|
// called after editing operations, before reading the syntax tree again.
|
||||||
|
func (f *File) Sync() {
|
||||||
|
var inserts, deletes, stmts []*stmt
|
||||||
|
var r, w int
|
||||||
|
for r, w = 0, 0; r < len(f.Loads); r++ {
|
||||||
|
s := f.Loads[r]
|
||||||
|
s.sync()
|
||||||
|
if s.deleted {
|
||||||
|
deletes = append(deletes, &s.stmt)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if s.inserted {
|
||||||
|
inserts = append(inserts, &s.stmt)
|
||||||
|
s.inserted = false
|
||||||
|
} else {
|
||||||
|
stmts = append(stmts, &s.stmt)
|
||||||
|
}
|
||||||
|
f.Loads[w] = s
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
f.Loads = f.Loads[:w]
|
||||||
|
for r, w = 0, 0; r < len(f.Rules); r++ {
|
||||||
|
s := f.Rules[r]
|
||||||
|
s.sync()
|
||||||
|
if s.deleted {
|
||||||
|
deletes = append(deletes, &s.stmt)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if s.inserted {
|
||||||
|
inserts = append(inserts, &s.stmt)
|
||||||
|
s.inserted = false
|
||||||
|
} else {
|
||||||
|
stmts = append(stmts, &s.stmt)
|
||||||
|
}
|
||||||
|
f.Rules[w] = s
|
||||||
|
w++
|
||||||
|
}
|
||||||
|
f.Rules = f.Rules[:w]
|
||||||
|
sort.Stable(byIndex(deletes))
|
||||||
|
sort.Stable(byIndex(inserts))
|
||||||
|
sort.Stable(byIndex(stmts))
|
||||||
|
|
||||||
|
oldStmt := f.File.Stmt
|
||||||
|
f.File.Stmt = make([]bzl.Expr, 0, len(oldStmt)-len(deletes)+len(inserts))
|
||||||
|
var ii, di, si int
|
||||||
|
for i, stmt := range oldStmt {
|
||||||
|
for ii < len(inserts) && inserts[ii].index == i {
|
||||||
|
inserts[ii].index = len(f.File.Stmt)
|
||||||
|
f.File.Stmt = append(f.File.Stmt, inserts[ii].call)
|
||||||
|
ii++
|
||||||
|
}
|
||||||
|
if di < len(deletes) && deletes[di].index == i {
|
||||||
|
di++
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if si < len(stmts) && stmts[si].call == stmt {
|
||||||
|
stmts[si].index = len(f.File.Stmt)
|
||||||
|
si++
|
||||||
|
}
|
||||||
|
f.File.Stmt = append(f.File.Stmt, stmt)
|
||||||
|
}
|
||||||
|
for ii < len(inserts) {
|
||||||
|
inserts[ii].index = len(f.File.Stmt)
|
||||||
|
f.File.Stmt = append(f.File.Stmt, inserts[ii].call)
|
||||||
|
ii++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Format formats the build file in a form that can be written to disk.
|
||||||
|
// This method calls Sync internally.
|
||||||
|
func (f *File) Format() []byte {
|
||||||
|
f.Sync()
|
||||||
|
return bzl.Format(f.File)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save writes the build file to disk. This method calls Sync internally.
|
||||||
|
func (f *File) Save(path string) error {
|
||||||
|
f.Sync()
|
||||||
|
data := bzl.Format(f.File)
|
||||||
|
return ioutil.WriteFile(path, data, 0666)
|
||||||
|
}
|
||||||
|
|
||||||
|
type stmt struct {
|
||||||
|
index int
|
||||||
|
deleted, inserted, updated bool
|
||||||
|
call *bzl.CallExpr
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index returns the index for this statement within the build file. For
|
||||||
|
// inserted rules, this is where the rule will be inserted (rules with the
|
||||||
|
// same index will be inserted in the order Insert was called). For existing
|
||||||
|
// rules, this is the index of the original statement.
|
||||||
|
func (s *stmt) Index() int { return s.index }
|
||||||
|
|
||||||
|
// Delete marks this statement for deletion. It will be removed from the
|
||||||
|
// syntax tree when File.Sync is called.
|
||||||
|
func (s *stmt) Delete() { s.deleted = true }
|
||||||
|
|
||||||
|
type byIndex []*stmt
|
||||||
|
|
||||||
|
func (s byIndex) Len() int {
|
||||||
|
return len(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byIndex) Less(i, j int) bool {
|
||||||
|
return s[i].index < s[j].index
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byIndex) Swap(i, j int) {
|
||||||
|
s[i], s[j] = s[j], s[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load represents a load statement within a build file.
|
||||||
|
type Load struct {
|
||||||
|
stmt
|
||||||
|
name string
|
||||||
|
symbols map[string]bzl.Expr
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewLoad creates a new, empty load statement for the given file name.
|
||||||
|
func NewLoad(name string) *Load {
|
||||||
|
return &Load{
|
||||||
|
stmt: stmt{
|
||||||
|
call: &bzl.CallExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: "load"},
|
||||||
|
List: []bzl.Expr{&bzl.StringExpr{Value: name}},
|
||||||
|
ForceCompact: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
name: name,
|
||||||
|
symbols: make(map[string]bzl.Expr),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadFromExpr(index int, call *bzl.CallExpr) *Load {
|
||||||
|
l := &Load{
|
||||||
|
stmt: stmt{index: index, call: call},
|
||||||
|
symbols: make(map[string]bzl.Expr),
|
||||||
|
}
|
||||||
|
if len(call.List) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
name, ok := call.List[0].(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
l.name = name.Value
|
||||||
|
for _, arg := range call.List[1:] {
|
||||||
|
switch arg := arg.(type) {
|
||||||
|
case *bzl.StringExpr:
|
||||||
|
l.symbols[arg.Value] = arg
|
||||||
|
case *bzl.BinaryExpr:
|
||||||
|
x, ok := arg.X.(*bzl.LiteralExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if _, ok := arg.Y.(*bzl.StringExpr); !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
l.symbols[x.Token] = arg
|
||||||
|
default:
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return l
|
||||||
|
}
|
||||||
|
|
||||||
|
// Name returns the name of the file this statement loads.
|
||||||
|
func (l *Load) Name() string {
|
||||||
|
return l.name
|
||||||
|
}
|
||||||
|
|
||||||
|
// Symbols returns a list of symbols this statement loads.
|
||||||
|
func (l *Load) Symbols() []string {
|
||||||
|
syms := make([]string, 0, len(l.symbols))
|
||||||
|
for sym := range l.symbols {
|
||||||
|
syms = append(syms, sym)
|
||||||
|
}
|
||||||
|
sort.Strings(syms)
|
||||||
|
return syms
|
||||||
|
}
|
||||||
|
|
||||||
|
// Has returns true if sym is loaded by this statement.
|
||||||
|
func (l *Load) Has(sym string) bool {
|
||||||
|
_, ok := l.symbols[sym]
|
||||||
|
return ok
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add inserts a new symbol into the load statement. This has no effect if
|
||||||
|
// the symbol is already loaded. Symbols will be sorted, so the order
|
||||||
|
// doesn't matter.
|
||||||
|
func (l *Load) Add(sym string) {
|
||||||
|
if _, ok := l.symbols[sym]; !ok {
|
||||||
|
l.symbols[sym] = &bzl.StringExpr{Value: sym}
|
||||||
|
l.updated = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove deletes a symbol from the load statement. This has no effect if
|
||||||
|
// the symbol is not loaded.
|
||||||
|
func (l *Load) Remove(sym string) {
|
||||||
|
if _, ok := l.symbols[sym]; ok {
|
||||||
|
delete(l.symbols, sym)
|
||||||
|
l.updated = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns whether this statement loads any symbols.
|
||||||
|
func (l *Load) IsEmpty() bool {
|
||||||
|
return len(l.symbols) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert marks this statement for insertion at the given index. If multiple
|
||||||
|
// statements are inserted at the same index, they will be inserted in the
|
||||||
|
// order Insert is called.
|
||||||
|
func (l *Load) Insert(f *File, index int) {
|
||||||
|
l.index = index
|
||||||
|
l.inserted = true
|
||||||
|
f.Loads = append(f.Loads, l)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (l *Load) sync() {
|
||||||
|
if !l.updated {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
l.updated = false
|
||||||
|
|
||||||
|
args := make([]*bzl.StringExpr, 0, len(l.symbols))
|
||||||
|
kwargs := make([]*bzl.BinaryExpr, 0, len(l.symbols))
|
||||||
|
for _, e := range l.symbols {
|
||||||
|
if a, ok := e.(*bzl.StringExpr); ok {
|
||||||
|
args = append(args, a)
|
||||||
|
} else {
|
||||||
|
kwargs = append(kwargs, e.(*bzl.BinaryExpr))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Slice(args, func(i, j int) bool {
|
||||||
|
return args[i].Value < args[j].Value
|
||||||
|
})
|
||||||
|
sort.Slice(kwargs, func(i, j int) bool {
|
||||||
|
return kwargs[i].X.(*bzl.StringExpr).Value < kwargs[j].Y.(*bzl.StringExpr).Value
|
||||||
|
})
|
||||||
|
|
||||||
|
list := make([]bzl.Expr, 0, 1+len(l.symbols))
|
||||||
|
list = append(list, l.call.List[0])
|
||||||
|
for _, a := range args {
|
||||||
|
list = append(list, a)
|
||||||
|
}
|
||||||
|
for _, a := range kwargs {
|
||||||
|
list = append(list, a)
|
||||||
|
}
|
||||||
|
l.call.List = list
|
||||||
|
l.call.ForceCompact = len(kwargs) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
// Rule represents a rule statement within a build file.
|
||||||
|
type Rule struct {
|
||||||
|
stmt
|
||||||
|
kind string
|
||||||
|
args []bzl.Expr
|
||||||
|
attrs map[string]*bzl.BinaryExpr
|
||||||
|
private map[string]interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewRule creates a new, empty rule with the given kind and name.
|
||||||
|
func NewRule(kind, name string) *Rule {
|
||||||
|
nameAttr := &bzl.BinaryExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: "name"},
|
||||||
|
Y: &bzl.StringExpr{Value: name},
|
||||||
|
Op: "=",
|
||||||
|
}
|
||||||
|
r := &Rule{
|
||||||
|
stmt: stmt{
|
||||||
|
call: &bzl.CallExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: kind},
|
||||||
|
List: []bzl.Expr{nameAttr},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
kind: kind,
|
||||||
|
attrs: map[string]*bzl.BinaryExpr{"name": nameAttr},
|
||||||
|
private: map[string]interface{}{},
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
|
func ruleFromExpr(index int, expr bzl.Expr) *Rule {
|
||||||
|
call, ok := expr.(*bzl.CallExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
x, ok := call.X.(*bzl.LiteralExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
kind := x.Token
|
||||||
|
var args []bzl.Expr
|
||||||
|
attrs := make(map[string]*bzl.BinaryExpr)
|
||||||
|
for _, arg := range call.List {
|
||||||
|
attr, ok := arg.(*bzl.BinaryExpr)
|
||||||
|
if ok && attr.Op == "=" {
|
||||||
|
key := attr.X.(*bzl.LiteralExpr) // required by parser
|
||||||
|
attrs[key.Token] = attr
|
||||||
|
} else {
|
||||||
|
args = append(args, arg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return &Rule{
|
||||||
|
stmt: stmt{
|
||||||
|
index: index,
|
||||||
|
call: call,
|
||||||
|
},
|
||||||
|
kind: kind,
|
||||||
|
args: args,
|
||||||
|
attrs: attrs,
|
||||||
|
private: map[string]interface{}{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ShouldKeep returns whether the rule is marked with a "# keep" comment. Rules
|
||||||
|
// that are kept should not be modified. This does not check whether
|
||||||
|
// subexpressions within the rule should be kept.
|
||||||
|
func (r *Rule) ShouldKeep() bool {
|
||||||
|
return ShouldKeep(r.call)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) Kind() string {
|
||||||
|
return r.kind
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) SetKind(kind string) {
|
||||||
|
r.kind = kind
|
||||||
|
r.updated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) Name() string {
|
||||||
|
return r.AttrString("name")
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) SetName(name string) {
|
||||||
|
r.SetAttr("name", name)
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttrKeys returns a sorted list of attribute keys used in this rule.
|
||||||
|
func (r *Rule) AttrKeys() []string {
|
||||||
|
keys := make([]string, 0, len(r.attrs))
|
||||||
|
for k := range r.attrs {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.SliceStable(keys, func(i, j int) bool {
|
||||||
|
if cmp := bt.NamePriority[keys[i]] - bt.NamePriority[keys[j]]; cmp != 0 {
|
||||||
|
return cmp < 0
|
||||||
|
}
|
||||||
|
return keys[i] < keys[j]
|
||||||
|
})
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attr returns the value of the named attribute. nil is returned when the
|
||||||
|
// attribute is not set.
|
||||||
|
func (r *Rule) Attr(key string) bzl.Expr {
|
||||||
|
attr, ok := r.attrs[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
return attr.Y
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttrString returns the value of the named attribute if it is a scalar string.
|
||||||
|
// "" is returned if the attribute is not set or is not a string.
|
||||||
|
func (r *Rule) AttrString(key string) string {
|
||||||
|
attr, ok := r.attrs[key]
|
||||||
|
if !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
str, ok := attr.Y.(*bzl.StringExpr)
|
||||||
|
if !ok {
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
return str.Value
|
||||||
|
}
|
||||||
|
|
||||||
|
// AttrStrings returns the string values of an attribute if it is a list.
|
||||||
|
// nil is returned if the attribute is not set or is not a list. Non-string
|
||||||
|
// values within the list won't be returned.
|
||||||
|
func (r *Rule) AttrStrings(key string) []string {
|
||||||
|
attr, ok := r.attrs[key]
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
list, ok := attr.Y.(*bzl.ListExpr)
|
||||||
|
if !ok {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
strs := make([]string, 0, len(list.List))
|
||||||
|
for _, e := range list.List {
|
||||||
|
if str, ok := e.(*bzl.StringExpr); ok {
|
||||||
|
strs = append(strs, str.Value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return strs
|
||||||
|
}
|
||||||
|
|
||||||
|
// DelAttr removes the named attribute from the rule.
|
||||||
|
func (r *Rule) DelAttr(key string) {
|
||||||
|
delete(r.attrs, key)
|
||||||
|
r.updated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetAttr adds or replaces the named attribute with an expression produced
|
||||||
|
// by ExprFromValue.
|
||||||
|
func (r *Rule) SetAttr(key string, value interface{}) {
|
||||||
|
y := ExprFromValue(value)
|
||||||
|
if attr, ok := r.attrs[key]; ok {
|
||||||
|
attr.Y = y
|
||||||
|
} else {
|
||||||
|
r.attrs[key] = &bzl.BinaryExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: key},
|
||||||
|
Y: y,
|
||||||
|
Op: "=",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.updated = true
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrivateAttrKeys returns a sorted list of private attribute names.
|
||||||
|
func (r *Rule) PrivateAttrKeys() []string {
|
||||||
|
keys := make([]string, 0, len(r.private))
|
||||||
|
for k := range r.private {
|
||||||
|
keys = append(keys, k)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
return keys
|
||||||
|
}
|
||||||
|
|
||||||
|
// PrivateAttr return the private value associated with a key.
|
||||||
|
func (r *Rule) PrivateAttr(key string) interface{} {
|
||||||
|
return r.private[key]
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetPrivateAttr associates a value with a key. Unlike SetAttr, this value
|
||||||
|
// is not converted to a build syntax tree and will not be written to a build
|
||||||
|
// file.
|
||||||
|
func (r *Rule) SetPrivateAttr(key string, value interface{}) {
|
||||||
|
r.private[key] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// Args returns positional arguments passed to a rule.
|
||||||
|
func (r *Rule) Args() []bzl.Expr {
|
||||||
|
return r.args
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert marks this statement for insertion at the end of the file. Multiple
|
||||||
|
// statements will be inserted in the order Insert is called.
|
||||||
|
func (r *Rule) Insert(f *File) {
|
||||||
|
// TODO(jayconrod): should rules always be inserted at the end? Should there
|
||||||
|
// be some sort order?
|
||||||
|
r.index = len(f.File.Stmt)
|
||||||
|
r.inserted = true
|
||||||
|
f.Rules = append(f.Rules, r)
|
||||||
|
}
|
||||||
|
|
||||||
|
// IsEmpty returns true when the rule contains none of the attributes in attrs
|
||||||
|
// for its kind. attrs should contain attributes that make the rule buildable
|
||||||
|
// like srcs or deps and not descriptive attributes like name or visibility.
|
||||||
|
func (r *Rule) IsEmpty(info KindInfo) bool {
|
||||||
|
if info.NonEmptyAttrs == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for k := range info.NonEmptyAttrs {
|
||||||
|
if _, ok := r.attrs[k]; ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) IsEmptyOld(attrs map[string]bool) bool {
|
||||||
|
if attrs == nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
for k := range attrs {
|
||||||
|
if _, ok := r.attrs[k]; ok {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Rule) sync() {
|
||||||
|
if !r.updated {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
r.updated = false
|
||||||
|
|
||||||
|
for _, k := range []string{"srcs", "deps"} {
|
||||||
|
if attr, ok := r.attrs[k]; ok {
|
||||||
|
bzl.Walk(attr.Y, sortExprLabels)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
call := r.call
|
||||||
|
call.X.(*bzl.LiteralExpr).Token = r.kind
|
||||||
|
|
||||||
|
list := make([]bzl.Expr, 0, len(r.args)+len(r.attrs))
|
||||||
|
list = append(list, r.args...)
|
||||||
|
for _, attr := range r.attrs {
|
||||||
|
list = append(list, attr)
|
||||||
|
}
|
||||||
|
sortedAttrs := list[len(r.args):]
|
||||||
|
key := func(e bzl.Expr) string { return e.(*bzl.BinaryExpr).X.(*bzl.LiteralExpr).Token }
|
||||||
|
sort.SliceStable(sortedAttrs, func(i, j int) bool {
|
||||||
|
ki := key(sortedAttrs[i])
|
||||||
|
kj := key(sortedAttrs[j])
|
||||||
|
if cmp := bt.NamePriority[ki] - bt.NamePriority[kj]; cmp != 0 {
|
||||||
|
return cmp < 0
|
||||||
|
}
|
||||||
|
return ki < kj
|
||||||
|
})
|
||||||
|
|
||||||
|
r.call.List = list
|
||||||
|
r.updated = false
|
||||||
|
}
|
||||||
|
|
||||||
|
// ShouldKeep returns whether e is marked with a "# keep" comment. Kept
|
||||||
|
// expressions should not be removed or modified.
|
||||||
|
func ShouldKeep(e bzl.Expr) bool {
|
||||||
|
for _, c := range append(e.Comment().Before, e.Comment().Suffix...) {
|
||||||
|
text := strings.TrimSpace(strings.TrimPrefix(c.Token, "#"))
|
||||||
|
if text == "keep" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type byAttrName []KeyValue
|
||||||
|
|
||||||
|
var _ sort.Interface = byAttrName{}
|
||||||
|
|
||||||
|
func (s byAttrName) Len() int {
|
||||||
|
return len(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byAttrName) Less(i, j int) bool {
|
||||||
|
if cmp := bt.NamePriority[s[i].Key] - bt.NamePriority[s[j].Key]; cmp != 0 {
|
||||||
|
return cmp < 0
|
||||||
|
}
|
||||||
|
return s[i].Key < s[j].Key
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byAttrName) Swap(i, j int) {
|
||||||
|
s[i], s[j] = s[j], s[i]
|
||||||
|
}
|
||||||
@@ -13,58 +13,27 @@ See the License for the specific language governing permissions and
|
|||||||
limitations under the License.
|
limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package generator
|
package rule
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
bf "github.com/bazelbuild/buildtools/build"
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
// sortExprLabels sorts lists of strings using the same order as buildifier.
|
||||||
goRuleKinds = map[string]bool{
|
// Buildifier also sorts string lists, but not those involved with "select"
|
||||||
"cgo_library": true,
|
// expressions. This function is intended to be used with bzl.Walk.
|
||||||
"go_binary": true,
|
func sortExprLabels(e bzl.Expr, _ []bzl.Expr) {
|
||||||
"go_library": true,
|
list, ok := e.(*bzl.ListExpr)
|
||||||
"go_test": true,
|
|
||||||
}
|
|
||||||
sortedAttrs = []string{"srcs", "deps"}
|
|
||||||
)
|
|
||||||
|
|
||||||
// SortLabels sorts lists of strings in "srcs" and "deps" attributes of
|
|
||||||
// Go rules using the same order as buildifier. Buildifier also sorts string
|
|
||||||
// lists, but not those involved with "select" expressions.
|
|
||||||
// TODO(jayconrod): remove this when bazelbuild/buildtools#122 is fixed.
|
|
||||||
func SortLabels(f *bf.File) {
|
|
||||||
for _, s := range f.Stmt {
|
|
||||||
c, ok := s.(*bf.CallExpr)
|
|
||||||
if !ok {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
r := bf.Rule{Call: c}
|
|
||||||
if !goRuleKinds[r.Kind()] {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for _, key := range []string{"srcs", "deps"} {
|
|
||||||
attr := r.AttrDefn(key)
|
|
||||||
if attr == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
bf.Walk(attr.Y, sortExprLabels)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func sortExprLabels(e bf.Expr, _ []bf.Expr) {
|
|
||||||
list, ok := e.(*bf.ListExpr)
|
|
||||||
if !ok || len(list.List) == 0 {
|
if !ok || len(list.List) == 0 {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
keys := make([]stringSortKey, len(list.List))
|
keys := make([]stringSortKey, len(list.List))
|
||||||
for i, elem := range list.List {
|
for i, elem := range list.List {
|
||||||
s, ok := elem.(*bf.StringExpr)
|
s, ok := elem.(*bzl.StringExpr)
|
||||||
if !ok {
|
if !ok {
|
||||||
return // don't sort lists unless all elements are strings
|
return // don't sort lists unless all elements are strings
|
||||||
}
|
}
|
||||||
@@ -94,10 +63,10 @@ type stringSortKey struct {
|
|||||||
split []string
|
split []string
|
||||||
value string
|
value string
|
||||||
original int
|
original int
|
||||||
x bf.Expr
|
x bzl.Expr
|
||||||
}
|
}
|
||||||
|
|
||||||
func makeSortKey(index int, x *bf.StringExpr) stringSortKey {
|
func makeSortKey(index int, x *bzl.StringExpr) stringSortKey {
|
||||||
key := stringSortKey{
|
key := stringSortKey{
|
||||||
value: x.Value,
|
value: x.Value,
|
||||||
original: index,
|
original: index,
|
||||||
63
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/types.go
generated
vendored
Normal file
63
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/types.go
generated
vendored
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
// MergableAttrs is the set of attribute names for each kind of rule that
|
||||||
|
// may be merged. When an attribute is mergeable, a generated value may
|
||||||
|
// replace or augment an existing value. If an attribute is not mergeable,
|
||||||
|
// existing values are preserved. Generated non-mergeable attributes may
|
||||||
|
// still be added to a rule if there is no corresponding existing attribute.
|
||||||
|
type MergeableAttrs map[string]map[string]bool
|
||||||
|
|
||||||
|
// LoadInfo describes a file that Gazelle knows about and the symbols
|
||||||
|
// it defines.
|
||||||
|
type LoadInfo struct {
|
||||||
|
Name string
|
||||||
|
Symbols []string
|
||||||
|
After []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// KindInfo stores metadata for a kind or fule, for example, "go_library".
|
||||||
|
type KindInfo struct {
|
||||||
|
// MatchAny is true if a rule of this kind may be matched with any rule
|
||||||
|
// of the same kind, regardless of attributes, if exactly one rule is
|
||||||
|
// present a build file.
|
||||||
|
MatchAny bool
|
||||||
|
|
||||||
|
// MatchAttrs is a list of attributes used in matching. For example,
|
||||||
|
// for go_library, this list contains "importpath". Attributes are matched
|
||||||
|
// in order.
|
||||||
|
MatchAttrs []string
|
||||||
|
|
||||||
|
// NonEmptyAttrs is a set of attributes that, if present, disqualify a rule
|
||||||
|
// from being deleted after merge.
|
||||||
|
NonEmptyAttrs map[string]bool
|
||||||
|
|
||||||
|
// SubstituteAttrs is a set of attributes that should be substituted
|
||||||
|
// after matching and before merging. For example, suppose generated rule A
|
||||||
|
// references B via an "embed" attribute, and B matches against rule C.
|
||||||
|
// The label for B in A's "embed" must be substituted with a label for C.
|
||||||
|
// "embed" would need to be in this set.
|
||||||
|
SubstituteAttrs map[string]bool
|
||||||
|
|
||||||
|
// MergeableAttrs is a set of attributes that should be merged before
|
||||||
|
// dependency resolution. See rule.Merge.
|
||||||
|
MergeableAttrs map[string]bool
|
||||||
|
|
||||||
|
// ResolveAttrs is a set of attributes that should be merged after
|
||||||
|
// dependency resolution. See rule.Merge.
|
||||||
|
ResolveAttrs map[string]bool
|
||||||
|
}
|
||||||
184
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/value.go
generated
vendored
Normal file
184
vendor/github.com/bazelbuild/bazel-gazelle/internal/rule/value.go
generated
vendored
Normal file
@@ -0,0 +1,184 @@
|
|||||||
|
/* Copyright 2016 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package rule
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"reflect"
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
bzl "github.com/bazelbuild/buildtools/build"
|
||||||
|
)
|
||||||
|
|
||||||
|
// KeyValue represents a key-value pair. This gets converted into a
|
||||||
|
// rule attribute, i.e., a Skylark keyword argument.
|
||||||
|
type KeyValue struct {
|
||||||
|
Key string
|
||||||
|
Value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// GlobValue represents a Bazel glob expression.
|
||||||
|
type GlobValue struct {
|
||||||
|
Patterns []string
|
||||||
|
Excludes []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExprFromValue converts a value into an expression that can be written into
|
||||||
|
// a Bazel build file. The following types of values can be converted:
|
||||||
|
//
|
||||||
|
// * bools, integers, floats, strings.
|
||||||
|
// * slices, arrays (converted to lists).
|
||||||
|
// * maps (converted to select expressions; keys must be rules in
|
||||||
|
// @io_bazel_rules_go//go/platform).
|
||||||
|
// * GlobValue (converted to glob expressions).
|
||||||
|
// * PlatformStrings (converted to a concatenation of a list and selects).
|
||||||
|
//
|
||||||
|
// Converting unsupported types will cause a panic.
|
||||||
|
func ExprFromValue(val interface{}) bzl.Expr {
|
||||||
|
if e, ok := val.(bzl.Expr); ok {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
|
||||||
|
rv := reflect.ValueOf(val)
|
||||||
|
switch rv.Kind() {
|
||||||
|
case reflect.Bool:
|
||||||
|
tok := "False"
|
||||||
|
if rv.Bool() {
|
||||||
|
tok = "True"
|
||||||
|
}
|
||||||
|
return &bzl.LiteralExpr{Token: tok}
|
||||||
|
|
||||||
|
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
|
||||||
|
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
|
||||||
|
return &bzl.LiteralExpr{Token: fmt.Sprintf("%d", val)}
|
||||||
|
|
||||||
|
case reflect.Float32, reflect.Float64:
|
||||||
|
return &bzl.LiteralExpr{Token: fmt.Sprintf("%f", val)}
|
||||||
|
|
||||||
|
case reflect.String:
|
||||||
|
return &bzl.StringExpr{Value: val.(string)}
|
||||||
|
|
||||||
|
case reflect.Slice, reflect.Array:
|
||||||
|
var list []bzl.Expr
|
||||||
|
for i := 0; i < rv.Len(); i++ {
|
||||||
|
elem := ExprFromValue(rv.Index(i).Interface())
|
||||||
|
list = append(list, elem)
|
||||||
|
}
|
||||||
|
return &bzl.ListExpr{List: list}
|
||||||
|
|
||||||
|
case reflect.Map:
|
||||||
|
rkeys := rv.MapKeys()
|
||||||
|
sort.Sort(byString(rkeys))
|
||||||
|
args := make([]bzl.Expr, len(rkeys))
|
||||||
|
for i, rk := range rkeys {
|
||||||
|
label := fmt.Sprintf("@io_bazel_rules_go//go/platform:%s", mapKeyString(rk))
|
||||||
|
k := &bzl.StringExpr{Value: label}
|
||||||
|
v := ExprFromValue(rv.MapIndex(rk).Interface())
|
||||||
|
if l, ok := v.(*bzl.ListExpr); ok {
|
||||||
|
l.ForceMultiLine = true
|
||||||
|
}
|
||||||
|
args[i] = &bzl.KeyValueExpr{Key: k, Value: v}
|
||||||
|
}
|
||||||
|
args = append(args, &bzl.KeyValueExpr{
|
||||||
|
Key: &bzl.StringExpr{Value: "//conditions:default"},
|
||||||
|
Value: &bzl.ListExpr{},
|
||||||
|
})
|
||||||
|
sel := &bzl.CallExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: "select"},
|
||||||
|
List: []bzl.Expr{&bzl.DictExpr{List: args, ForceMultiLine: true}},
|
||||||
|
}
|
||||||
|
return sel
|
||||||
|
|
||||||
|
case reflect.Struct:
|
||||||
|
switch val := val.(type) {
|
||||||
|
case GlobValue:
|
||||||
|
patternsValue := ExprFromValue(val.Patterns)
|
||||||
|
globArgs := []bzl.Expr{patternsValue}
|
||||||
|
if len(val.Excludes) > 0 {
|
||||||
|
excludesValue := ExprFromValue(val.Excludes)
|
||||||
|
globArgs = append(globArgs, &bzl.KeyValueExpr{
|
||||||
|
Key: &bzl.StringExpr{Value: "excludes"},
|
||||||
|
Value: excludesValue,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
return &bzl.CallExpr{
|
||||||
|
X: &bzl.LiteralExpr{Token: "glob"},
|
||||||
|
List: globArgs,
|
||||||
|
}
|
||||||
|
|
||||||
|
case PlatformStrings:
|
||||||
|
var pieces []bzl.Expr
|
||||||
|
if len(val.Generic) > 0 {
|
||||||
|
pieces = append(pieces, ExprFromValue(val.Generic))
|
||||||
|
}
|
||||||
|
if len(val.OS) > 0 {
|
||||||
|
pieces = append(pieces, ExprFromValue(val.OS))
|
||||||
|
}
|
||||||
|
if len(val.Arch) > 0 {
|
||||||
|
pieces = append(pieces, ExprFromValue(val.Arch))
|
||||||
|
}
|
||||||
|
if len(val.Platform) > 0 {
|
||||||
|
pieces = append(pieces, ExprFromValue(val.Platform))
|
||||||
|
}
|
||||||
|
if len(pieces) == 0 {
|
||||||
|
return &bzl.ListExpr{}
|
||||||
|
} else if len(pieces) == 1 {
|
||||||
|
return pieces[0]
|
||||||
|
} else {
|
||||||
|
e := pieces[0]
|
||||||
|
if list, ok := e.(*bzl.ListExpr); ok {
|
||||||
|
list.ForceMultiLine = true
|
||||||
|
}
|
||||||
|
for _, piece := range pieces[1:] {
|
||||||
|
e = &bzl.BinaryExpr{X: e, Y: piece, Op: "+"}
|
||||||
|
}
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.Panicf("type not supported: %T", val)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func mapKeyString(k reflect.Value) string {
|
||||||
|
switch s := k.Interface().(type) {
|
||||||
|
case string:
|
||||||
|
return s
|
||||||
|
case Platform:
|
||||||
|
return s.String()
|
||||||
|
default:
|
||||||
|
log.Panicf("unexpected map key: %v", k)
|
||||||
|
return ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type byString []reflect.Value
|
||||||
|
|
||||||
|
var _ sort.Interface = byString{}
|
||||||
|
|
||||||
|
func (s byString) Len() int {
|
||||||
|
return len(s)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byString) Less(i, j int) bool {
|
||||||
|
return mapKeyString(s[i]) < mapKeyString(s[j])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s byString) Swap(i, j int) {
|
||||||
|
s[i], s[j] = s[j], s[i]
|
||||||
|
}
|
||||||
@@ -3,20 +3,16 @@ load("@io_bazel_rules_go//go:def.bzl", "go_library")
|
|||||||
go_library(
|
go_library(
|
||||||
name = "go_default_library",
|
name = "go_default_library",
|
||||||
srcs = [
|
srcs = [
|
||||||
"doc.go",
|
"config.go",
|
||||||
"fileinfo.go",
|
|
||||||
"fileinfo_go.go",
|
|
||||||
"fileinfo_proto.go",
|
|
||||||
"package.go",
|
|
||||||
"walk.go",
|
"walk.go",
|
||||||
],
|
],
|
||||||
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/packages",
|
importmap = "k8s.io/kubernetes/vendor/github.com/bazelbuild/bazel-gazelle/internal/walk",
|
||||||
importpath = "github.com/bazelbuild/bazel-gazelle/internal/packages",
|
importpath = "github.com/bazelbuild/bazel-gazelle/internal/walk",
|
||||||
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
visibility = ["//vendor/github.com/bazelbuild/bazel-gazelle:__subpackages__"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/config:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/pathtools:go_default_library",
|
||||||
"//vendor/github.com/bazelbuild/buildtools/build:go_default_library",
|
"//vendor/github.com/bazelbuild/bazel-gazelle/internal/rule:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
86
vendor/github.com/bazelbuild/bazel-gazelle/internal/walk/config.go
generated
vendored
Normal file
86
vendor/github.com/bazelbuild/bazel-gazelle/internal/walk/config.go
generated
vendored
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package walk
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
type walkConfig struct {
|
||||||
|
excludes []string
|
||||||
|
ignore bool
|
||||||
|
}
|
||||||
|
|
||||||
|
const walkName = "_walk"
|
||||||
|
|
||||||
|
func getWalkConfig(c *config.Config) walkConfig {
|
||||||
|
return c.Exts[walkName].(walkConfig)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (wc *walkConfig) isExcluded(base string) bool {
|
||||||
|
for _, x := range wc.excludes {
|
||||||
|
if base == x {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
type walkConfigurer struct{}
|
||||||
|
|
||||||
|
func (_ *walkConfigurer) RegisterFlags(fs *flag.FlagSet, cmd string, c *config.Config) {}
|
||||||
|
|
||||||
|
func (_ *walkConfigurer) CheckFlags(fs *flag.FlagSet, c *config.Config) error { return nil }
|
||||||
|
|
||||||
|
func (_ *walkConfigurer) KnownDirectives() []string {
|
||||||
|
return []string{"exclude", "ignore"}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (_ *walkConfigurer) Configure(c *config.Config, rel string, f *rule.File) {
|
||||||
|
var wc walkConfig
|
||||||
|
if raw, ok := c.Exts[walkName]; ok {
|
||||||
|
wc = raw.(walkConfig)
|
||||||
|
wc.ignore = false
|
||||||
|
if rel != "" {
|
||||||
|
prefix := path.Base(rel) + "/"
|
||||||
|
excludes := make([]string, 0, len(wc.excludes))
|
||||||
|
for _, x := range wc.excludes {
|
||||||
|
if strings.HasPrefix(x, prefix) {
|
||||||
|
excludes = append(excludes, x[len(prefix):])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
wc.excludes = excludes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != nil {
|
||||||
|
for _, d := range f.Directives {
|
||||||
|
switch d.Key {
|
||||||
|
case "exclude":
|
||||||
|
wc.excludes = append(wc.excludes, d.Value)
|
||||||
|
case "ignore":
|
||||||
|
wc.ignore = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.Exts[walkName] = wc
|
||||||
|
}
|
||||||
251
vendor/github.com/bazelbuild/bazel-gazelle/internal/walk/walk.go
generated
vendored
Normal file
251
vendor/github.com/bazelbuild/bazel-gazelle/internal/walk/walk.go
generated
vendored
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
/* Copyright 2018 The Bazel Authors. All rights reserved.
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package walk
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/config"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/pathtools"
|
||||||
|
"github.com/bazelbuild/bazel-gazelle/internal/rule"
|
||||||
|
)
|
||||||
|
|
||||||
|
// WalkFunc is a callback called by Walk in each visited directory.
|
||||||
|
//
|
||||||
|
// dir is the absolute file system path to the directory being visited.
|
||||||
|
//
|
||||||
|
// rel is the relative slash-separated path to the directory from the
|
||||||
|
// repository root. Will be "" for the repository root directory itself.
|
||||||
|
//
|
||||||
|
// c is the configuration for the current directory. This may have been
|
||||||
|
// modified by directives in the directory's build file.
|
||||||
|
//
|
||||||
|
// update is true when the build file may be updated.
|
||||||
|
//
|
||||||
|
// f is the existing build file in the directory. Will be nil if there
|
||||||
|
// was no file.
|
||||||
|
//
|
||||||
|
// subdirs is a list of base names of subdirectories within dir, not
|
||||||
|
// including excluded files.
|
||||||
|
//
|
||||||
|
// regularFiles is a list of base names of regular files within dir, not
|
||||||
|
// including excluded files.
|
||||||
|
//
|
||||||
|
// genFiles is a list of names of generated files, found by reading
|
||||||
|
// "out" and "outs" attributes of rules in f.
|
||||||
|
type WalkFunc func(dir, rel string, c *config.Config, update bool, f *rule.File, subdirs, regularFiles, genFiles []string)
|
||||||
|
|
||||||
|
// Walk traverses the directory tree rooted at c.RepoRoot in depth-first order.
|
||||||
|
//
|
||||||
|
// Walk calls the Configure method on each configuration extension in cexts
|
||||||
|
// in each directory in pre-order, whether a build file is present in the
|
||||||
|
// directory or not.
|
||||||
|
//
|
||||||
|
// Walk calls the callback wf in post-order.
|
||||||
|
func Walk(c *config.Config, cexts []config.Configurer, wf WalkFunc) {
|
||||||
|
cexts = append(cexts, &walkConfigurer{})
|
||||||
|
knownDirectives := make(map[string]bool)
|
||||||
|
for _, cext := range cexts {
|
||||||
|
for _, d := range cext.KnownDirectives() {
|
||||||
|
knownDirectives[d] = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateRels := buildUpdateRels(c.RepoRoot, c.Dirs)
|
||||||
|
symlinks := symlinkResolver{root: c.RepoRoot, visited: []string{c.RepoRoot}}
|
||||||
|
|
||||||
|
var visit func(*config.Config, string, string, bool)
|
||||||
|
visit = func(c *config.Config, dir, rel string, isUpdateDir bool) {
|
||||||
|
haveError := false
|
||||||
|
|
||||||
|
if !isUpdateDir {
|
||||||
|
isUpdateDir = shouldUpdateDir(rel, updateRels)
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: OPT: ReadDir stats all the files, which is slow. We just care about
|
||||||
|
// names and modes, so we should use something like
|
||||||
|
// golang.org/x/tools/internal/fastwalk to speed this up.
|
||||||
|
files, err := ioutil.ReadDir(dir)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
f, err := loadBuildFile(c, rel, dir, files)
|
||||||
|
if err != nil {
|
||||||
|
log.Print(err)
|
||||||
|
haveError = true
|
||||||
|
}
|
||||||
|
|
||||||
|
c = configure(cexts, knownDirectives, c, rel, f)
|
||||||
|
wc := getWalkConfig(c)
|
||||||
|
|
||||||
|
var subdirs, regularFiles []string
|
||||||
|
for _, fi := range files {
|
||||||
|
base := fi.Name()
|
||||||
|
switch {
|
||||||
|
case base == "" || base[0] == '.' || base[0] == '_' || wc.isExcluded(base):
|
||||||
|
continue
|
||||||
|
|
||||||
|
case fi.IsDir() || fi.Mode()&os.ModeSymlink != 0 && symlinks.follow(dir, base):
|
||||||
|
subdirs = append(subdirs, base)
|
||||||
|
|
||||||
|
default:
|
||||||
|
regularFiles = append(regularFiles, base)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, sub := range subdirs {
|
||||||
|
visit(c, filepath.Join(dir, sub), path.Join(rel, sub), isUpdateDir)
|
||||||
|
}
|
||||||
|
|
||||||
|
genFiles := findGenFiles(wc, f)
|
||||||
|
update := !haveError && isUpdateDir && !wc.ignore
|
||||||
|
wf(dir, rel, c, update, f, subdirs, regularFiles, genFiles)
|
||||||
|
}
|
||||||
|
visit(c, c.RepoRoot, "", false)
|
||||||
|
}
|
||||||
|
|
||||||
|
// buildUpdateRels builds a list of relative paths from the repository root
|
||||||
|
// directory (passed as an absolute file path) to directories that Gazelle
|
||||||
|
// may update. The relative paths are slash-separated. "" represents the
|
||||||
|
// root directory itself.
|
||||||
|
func buildUpdateRels(root string, dirs []string) []string {
|
||||||
|
var updateRels []string
|
||||||
|
for _, dir := range dirs {
|
||||||
|
rel, err := filepath.Rel(root, dir)
|
||||||
|
if err != nil {
|
||||||
|
// This should have been verified when c was built.
|
||||||
|
log.Panicf("%s: not a subdirectory of repository root %q", dir, root)
|
||||||
|
}
|
||||||
|
rel = filepath.ToSlash(rel)
|
||||||
|
if rel == "." || rel == "/" {
|
||||||
|
rel = ""
|
||||||
|
}
|
||||||
|
updateRels = append(updateRels, rel)
|
||||||
|
}
|
||||||
|
return updateRels
|
||||||
|
}
|
||||||
|
|
||||||
|
func shouldUpdateDir(rel string, updateRels []string) bool {
|
||||||
|
for _, r := range updateRels {
|
||||||
|
if pathtools.HasPrefix(rel, r) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadBuildFile(c *config.Config, pkg, dir string, files []os.FileInfo) (*rule.File, error) {
|
||||||
|
var err error
|
||||||
|
readDir := dir
|
||||||
|
readFiles := files
|
||||||
|
if c.ReadBuildFilesDir != "" {
|
||||||
|
readDir = filepath.Join(c.ReadBuildFilesDir, filepath.FromSlash(pkg))
|
||||||
|
readFiles, err = ioutil.ReadDir(readDir)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
path := rule.MatchBuildFileName(readDir, c.ValidBuildFileNames, readFiles)
|
||||||
|
if path == "" {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
return rule.LoadFile(path, pkg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func configure(cexts []config.Configurer, knownDirectives map[string]bool, c *config.Config, rel string, f *rule.File) *config.Config {
|
||||||
|
if rel != "" {
|
||||||
|
c = c.Clone()
|
||||||
|
}
|
||||||
|
if f != nil {
|
||||||
|
for _, d := range f.Directives {
|
||||||
|
if !knownDirectives[d.Key] {
|
||||||
|
log.Printf("%s: unknown directive: gazelle:%s", f.Path, d.Key)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, cext := range cexts {
|
||||||
|
cext.Configure(c, rel, f)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}
|
||||||
|
|
||||||
|
func findGenFiles(wc walkConfig, f *rule.File) []string {
|
||||||
|
if f == nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
var strs []string
|
||||||
|
for _, r := range f.Rules {
|
||||||
|
for _, key := range []string{"out", "outs"} {
|
||||||
|
if s := r.AttrString(key); s != "" {
|
||||||
|
strs = append(strs, s)
|
||||||
|
} else if ss := r.AttrStrings(key); len(ss) > 0 {
|
||||||
|
strs = append(strs, ss...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var genFiles []string
|
||||||
|
for _, s := range strs {
|
||||||
|
if !wc.isExcluded(s) {
|
||||||
|
genFiles = append(genFiles, s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return genFiles
|
||||||
|
}
|
||||||
|
|
||||||
|
type symlinkResolver struct {
|
||||||
|
root string
|
||||||
|
visited []string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Decide if symlink dir/base should be followed.
|
||||||
|
func (r *symlinkResolver) follow(dir, base string) bool {
|
||||||
|
if dir == r.root && strings.HasPrefix(base, "bazel-") {
|
||||||
|
// Links such as bazel-<workspace>, bazel-out, bazel-genfiles are created by
|
||||||
|
// Bazel to point to internal build directories.
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
// See if the symlink points to a tree that has been already visited.
|
||||||
|
fullpath := filepath.Join(dir, base)
|
||||||
|
dest, err := filepath.EvalSymlinks(fullpath)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
if !filepath.IsAbs(dest) {
|
||||||
|
dest, err = filepath.Abs(filepath.Join(dir, dest))
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for _, p := range r.visited {
|
||||||
|
if pathtools.HasPrefix(dest, p) || pathtools.HasPrefix(p, dest) {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
r.visited = append(r.visited, dest)
|
||||||
|
stat, err := os.Stat(fullpath)
|
||||||
|
if err != nil {
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return stat.IsDir()
|
||||||
|
}
|
||||||
2
vendor/github.com/container-storage-interface/spec/lib/go/csi/v0/BUILD
generated
vendored
2
vendor/github.com/container-storage-interface/spec/lib/go/csi/v0/BUILD
generated
vendored
@@ -8,9 +8,9 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
|
"//vendor/github.com/golang/protobuf/ptypes/wrappers:go_default_library",
|
||||||
"//vendor/golang.org/x/net/context:go_default_library",
|
"//vendor/golang.org/x/net/context:go_default_library",
|
||||||
"//vendor/google.golang.org/grpc:go_default_library",
|
"//vendor/google.golang.org/grpc:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:wrappers_go_proto",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
4
vendor/github.com/coreos/etcd/pkg/fileutil/BUILD
generated
vendored
4
vendor/github.com/coreos/etcd/pkg/fileutil/BUILD
generated
vendored
@@ -25,9 +25,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/github.com/coreos/etcd/pkg/fileutil",
|
importmap = "k8s.io/kubernetes/vendor/github.com/coreos/etcd/pkg/fileutil",
|
||||||
importpath = "github.com/coreos/etcd/pkg/fileutil",
|
importpath = "github.com/coreos/etcd/pkg/fileutil",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = ["//vendor/github.com/coreos/pkg/capnslog:go_default_library"],
|
||||||
"//vendor/github.com/coreos/pkg/capnslog:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
2
vendor/github.com/golang/protobuf/jsonpb/BUILD
generated
vendored
2
vendor/github.com/golang/protobuf/jsonpb/BUILD
generated
vendored
@@ -8,7 +8,7 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:struct_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/struct:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
6
vendor/github.com/golang/protobuf/ptypes/BUILD
generated
vendored
6
vendor/github.com/golang/protobuf/ptypes/BUILD
generated
vendored
@@ -13,9 +13,9 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:any_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/any:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:duration_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/duration:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/timestamp:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
2
vendor/github.com/google/certificate-transparency-go/client/configpb/BUILD
generated
vendored
2
vendor/github.com/google/certificate-transparency-go/client/configpb/BUILD
generated
vendored
@@ -11,7 +11,7 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/timestamp:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
2
vendor/github.com/googleapis/gnostic/OpenAPIv2/BUILD
generated
vendored
2
vendor/github.com/googleapis/gnostic/OpenAPIv2/BUILD
generated
vendored
@@ -11,9 +11,9 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
|
"//vendor/github.com/golang/protobuf/ptypes/any:go_default_library",
|
||||||
"//vendor/github.com/googleapis/gnostic/compiler:go_default_library",
|
"//vendor/github.com/googleapis/gnostic/compiler:go_default_library",
|
||||||
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:any_go_proto",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
2
vendor/github.com/googleapis/gnostic/compiler/BUILD
generated
vendored
2
vendor/github.com/googleapis/gnostic/compiler/BUILD
generated
vendored
@@ -15,9 +15,9 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
|
"//vendor/github.com/golang/protobuf/ptypes/any:go_default_library",
|
||||||
"//vendor/github.com/googleapis/gnostic/extensions:go_default_library",
|
"//vendor/github.com/googleapis/gnostic/extensions:go_default_library",
|
||||||
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
"//vendor/gopkg.in/yaml.v2:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:any_go_proto",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
2
vendor/github.com/googleapis/gnostic/extensions/BUILD
generated
vendored
2
vendor/github.com/googleapis/gnostic/extensions/BUILD
generated
vendored
@@ -12,7 +12,7 @@ go_library(
|
|||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"//vendor/github.com/golang/protobuf/ptypes:go_default_library",
|
"//vendor/github.com/golang/protobuf/ptypes:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:any_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/any:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
4
vendor/github.com/lib/pq/BUILD
generated
vendored
4
vendor/github.com/lib/pq/BUILD
generated
vendored
@@ -26,9 +26,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/github.com/lib/pq",
|
importmap = "k8s.io/kubernetes/vendor/github.com/lib/pq",
|
||||||
importpath = "github.com/lib/pq",
|
importpath = "github.com/lib/pq",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = ["//vendor/github.com/lib/pq/oid:go_default_library"],
|
||||||
"//vendor/github.com/lib/pq/oid:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
2
vendor/github.com/libopenstorage/openstorage/api/BUILD
generated
vendored
2
vendor/github.com/libopenstorage/openstorage/api/BUILD
generated
vendored
@@ -12,8 +12,8 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
|
"//vendor/github.com/golang/protobuf/ptypes/timestamp:go_default_library",
|
||||||
"//vendor/github.com/mohae/deepcopy:go_default_library",
|
"//vendor/github.com/mohae/deepcopy:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:timestamp_go_proto",
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
4
vendor/github.com/opencontainers/runc/libcontainer/seccomp/BUILD
generated
vendored
4
vendor/github.com/opencontainers/runc/libcontainer/seccomp/BUILD
generated
vendored
@@ -9,9 +9,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/github.com/opencontainers/runc/libcontainer/seccomp",
|
importmap = "k8s.io/kubernetes/vendor/github.com/opencontainers/runc/libcontainer/seccomp",
|
||||||
importpath = "github.com/opencontainers/runc/libcontainer/seccomp",
|
importpath = "github.com/opencontainers/runc/libcontainer/seccomp",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = ["//vendor/github.com/opencontainers/runc/libcontainer/configs:go_default_library"],
|
||||||
"//vendor/github.com/opencontainers/runc/libcontainer/configs:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
4
vendor/github.com/opencontainers/runc/libcontainer/utils/BUILD
generated
vendored
4
vendor/github.com/opencontainers/runc/libcontainer/utils/BUILD
generated
vendored
@@ -10,9 +10,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/github.com/opencontainers/runc/libcontainer/utils",
|
importmap = "k8s.io/kubernetes/vendor/github.com/opencontainers/runc/libcontainer/utils",
|
||||||
importpath = "github.com/opencontainers/runc/libcontainer/utils",
|
importpath = "github.com/opencontainers/runc/libcontainer/utils",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = ["//vendor/golang.org/x/sys/unix:go_default_library"],
|
||||||
"//vendor/golang.org/x/sys/unix:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
1
vendor/golang.org/x/sys/unix/BUILD
generated
vendored
1
vendor/golang.org/x/sys/unix/BUILD
generated
vendored
@@ -48,6 +48,7 @@ go_library(
|
|||||||
"flock_linux_32bit.go",
|
"flock_linux_32bit.go",
|
||||||
"openbsd_pledge.go",
|
"openbsd_pledge.go",
|
||||||
"pagesize_unix.go",
|
"pagesize_unix.go",
|
||||||
|
"race.go",
|
||||||
"race0.go",
|
"race0.go",
|
||||||
"sockcmsg_linux.go",
|
"sockcmsg_linux.go",
|
||||||
"sockcmsg_unix.go",
|
"sockcmsg_unix.go",
|
||||||
|
|||||||
1
vendor/golang.org/x/sys/windows/BUILD
generated
vendored
1
vendor/golang.org/x/sys/windows/BUILD
generated
vendored
@@ -12,6 +12,7 @@ go_library(
|
|||||||
"exec_windows.go",
|
"exec_windows.go",
|
||||||
"memory_windows.go",
|
"memory_windows.go",
|
||||||
"mksyscall.go",
|
"mksyscall.go",
|
||||||
|
"race.go",
|
||||||
"race0.go",
|
"race0.go",
|
||||||
"security_windows.go",
|
"security_windows.go",
|
||||||
"service.go",
|
"service.go",
|
||||||
|
|||||||
4
vendor/golang.org/x/tools/imports/BUILD
generated
vendored
4
vendor/golang.org/x/tools/imports/BUILD
generated
vendored
@@ -16,9 +16,7 @@ go_library(
|
|||||||
importmap = "k8s.io/kubernetes/vendor/golang.org/x/tools/imports",
|
importmap = "k8s.io/kubernetes/vendor/golang.org/x/tools/imports",
|
||||||
importpath = "golang.org/x/tools/imports",
|
importpath = "golang.org/x/tools/imports",
|
||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = ["//vendor/golang.org/x/tools/go/ast/astutil:go_default_library"],
|
||||||
"//vendor/golang.org/x/tools/go/ast/astutil:go_default_library",
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
|
|
||||||
filegroup(
|
filegroup(
|
||||||
|
|||||||
2
vendor/google.golang.org/genproto/googleapis/api/annotations/BUILD
generated
vendored
2
vendor/google.golang.org/genproto/googleapis/api/annotations/BUILD
generated
vendored
@@ -11,7 +11,7 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:descriptor_go_proto",
|
"//vendor/github.com/golang/protobuf/protoc-gen-go/descriptor:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
2
vendor/google.golang.org/genproto/googleapis/rpc/status/BUILD
generated
vendored
2
vendor/google.golang.org/genproto/googleapis/rpc/status/BUILD
generated
vendored
@@ -8,7 +8,7 @@ go_library(
|
|||||||
visibility = ["//visibility:public"],
|
visibility = ["//visibility:public"],
|
||||||
deps = [
|
deps = [
|
||||||
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
"//vendor/github.com/golang/protobuf/proto:go_default_library",
|
||||||
"@io_bazel_rules_go//proto/wkt:any_go_proto",
|
"//vendor/github.com/golang/protobuf/ptypes/any:go_default_library",
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user