removing unused repo-infra files; updating README.md

Signed-off-by: Mike Brown <brownwm@us.ibm.com>
This commit is contained in:
Mike Brown 2017-04-21 14:11:01 -05:00
parent f9e02affcc
commit 8243c1feb7
13 changed files with 8 additions and 635 deletions

View File

@ -1,17 +0,0 @@
language: go
go:
- master
before_install:
- go get -u github.com/alecthomas/gometalinter
install:
- gometalinter --install
build:
- true
script:
- verify/verify-boilerplate.sh --rootdir="${TRAVIS_BUILD_DIR}" -v
- verify/verify-go-src.sh --rootdir "${TRAVIS_BUILD_DIR}" -v

View File

@ -1,61 +1,3 @@
# Kubernetes repository infrastructure
This repository contains repository infrastructure tools for use in
`kubernetes` and `kubernetes-incubator` repositories. Examples:
- Boilerplate verification
- Go source code quality verification
- Golang build infrastructure
---
## Using this repository
This repository can be used via some golang "vendoring" mechanism
(such as glide), or it can be used via
[git subtree](http://git.kernel.org/cgit/git/git.git/plain/contrib/subtree/git-subtree.txt).
### Using "vendoring"
The exact mechanism to pull in this repository will vary depending on
the tool you use. However, unless you end up having this repository
at the root of your project's repository you wll probably need to
make sure you use the `--rootdir` command line parameter to let the
`verify-boilerplate.sh` know its location, eg:
verify-boilerplate.sh --rootdir=/home/myrepo
### Using `git subtree`
When using the git subtree mechanism, this repository should be placed in the
top level of your project.
To add `repo-infra` to your repository, use the following commands from the
root directory of **your** repository.
First, add a git remote for the `repo-infra` repository:
```
$ git remote add repo-infra git://github.com/kubernetes/repo-infra
```
This is not strictly necessary, but reduces the typing required for subsequent
commands.
Next, use `git subtree add` to create a new subtree in the `repo-infra`
directory within your project:
```
$ git subtree add -P repo-infra repo-infra master --squash
```
After this command, you will have:
1. A `repo-infra` directory in your project containing the content of **this**
project
2. 2 new commits in the active branch:
1. A commit that squashes the git history of the `repo-infra` project
2. A merge commit whose ancestors are:
1. The `HEAD` of the branch prior to when you ran `git subtree add`
2. The commit containing the squashed `repo-infra` commits
This is a subtree pulled from githu.com/kubernetes/repo-infra

View File

@ -1 +0,0 @@
workspace(name = "io_kubernetes_build")

View File

@ -1,8 +0,0 @@
package(default_visibility = ["//visibility:public"])
py_binary(
name = "gcs_uploader",
srcs = [
"gcs_uploader.py",
],
)

View File

@ -1,119 +0,0 @@
def _gcs_upload_impl(ctx):
output_lines = []
for t in ctx.attr.data:
label = str(t.label)
upload_path=ctx.attr.upload_paths.get(label, "")
for f in t.files:
output_lines.append("%s\t%s" % (f.short_path, upload_path))
ctx.file_action(
output = ctx.outputs.targets,
content = "\n".join(output_lines),
)
ctx.file_action(
content = "%s --manifest %s --root $PWD -- $@" % (
ctx.attr.uploader.files_to_run.executable.short_path,
ctx.outputs.targets.short_path,
),
output = ctx.outputs.executable,
executable = True,
)
return struct(
runfiles = ctx.runfiles(
files = ctx.files.data + ctx.files.uploader +
[ctx.info_file, ctx.version_file, ctx.outputs.targets]
)
)
# Adds an executable rule to upload the specified artifacts to GCS.
#
# The keys in upload_paths must match the elaborated targets exactly; i.e.,
# one must specify "//foo/bar:bar" and not just "//foo/bar".
#
# Both the upload_paths and the path supplied on the commandline can include
# Python format strings which will be replaced by values from the workspace status,
# e.g. gs://my-bucket-{BUILD_USER}/stash/{STABLE_BUILD_SCM_REVISION}
gcs_upload = rule(
attrs = {
"data": attr.label_list(
mandatory = True,
allow_files = True,
),
"uploader": attr.label(
default = Label("//defs:gcs_uploader"),
allow_files = True,
),
# TODO: combine with 'data' when label_keyed_string_dict is supported in Bazel
"upload_paths": attr.string_dict(
allow_empty = True,
),
},
executable = True,
outputs = {
"targets": "%{name}-targets.txt",
},
implementation = _gcs_upload_impl,
)
# Computes the md5sum of the provided src file, saving it in a file named 'name'.
def md5sum(name, src, visibility=None):
native.genrule(
name = name + "_genmd5sum",
srcs = [src],
outs = [name],
# Currently each go_binary target has two outputs (the binary and the library),
# so we hash both but only save the hash for the binary.
cmd = "for f in $(SRCS); do if command -v md5 >/dev/null; then md5 -q $$f>$@; else md5sum $$f | awk '{print $$1}' > $@; fi; done",
message = "Computing md5sum",
visibility = visibility,
)
# Computes the sha1sum of the provided src file, saving it in a file named 'name'.
def sha1sum(name, src, visibility=None):
native.genrule(
name = name + "_gensha1sum",
srcs = [src],
outs = [name],
# Currently each go_binary target has two outputs (the binary and the library),
# so we hash both but only save the hash for the binary.
cmd = "command -v sha1sum >/dev/null && cmd=sha1sum || cmd='shasum -a1'; for f in $(SRCS); do $$cmd $$f | awk '{print $$1}' > $@; done",
message = "Computing sha1sum",
visibility = visibility,
)
# Creates 3+N rules based on the provided targets:
# * A filegroup with just the provided targets (named 'name')
# * A filegroup containing all of the md5 and sha1 hash files ('name-hashes')
# * A filegroup containing both of the above ('name-and-hashes')
# * All of the necessary md5sum and sha1sum rules
def release_filegroup(name, srcs, visibility=None):
hashes = []
for src in srcs:
parts = src.split(":")
if len(parts) > 1:
basename = parts[1]
else:
basename = src.split("/")[-1]
md5sum(name=basename + ".md5", src=src, visibility=visibility)
hashes.append(basename + ".md5")
sha1sum(name=basename + ".sha1", src=src, visibility=visibility)
hashes.append(basename + ".sha1")
native.filegroup(
name = name,
srcs = srcs,
visibility = visibility,
)
native.filegroup(
name = name + "-hashes",
srcs = hashes,
visibility = visibility,
)
native.filegroup(
name = name + "-and-hashes",
srcs = [name, name + "-hashes"],
visibility = visibility,
)

View File

@ -1,34 +0,0 @@
load("@bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar", "pkg_deb")
KUBERNETES_AUTHORS = "Kubernetes Authors <kubernetes-dev+release@googlegroups.com>"
KUBERNETES_HOMEPAGE = "http://kubernetes.io"
def k8s_deb(name, depends = [], description = ""):
pkg_deb(
name = name,
architecture = "amd64",
data = name + "-data",
depends = depends,
description = description,
homepage = KUBERNETES_HOMEPAGE,
maintainer = KUBERNETES_AUTHORS,
package = name,
version = "1.6.0-alpha",
)
def deb_data(name, data = []):
deps = []
for i, info in enumerate(data):
dname = "%s-deb-data-%s" % (name, i)
deps += [dname]
pkg_tar(
name = dname,
files = info["files"],
mode = info["mode"],
package_dir = info["dir"],
)
pkg_tar(
name = name + "-data",
deps = deps,
)

View File

@ -1,71 +0,0 @@
#!/usr/bin/env python
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import atexit
import os
import os.path
import shutil
import subprocess
import sys
import tempfile
def _workspace_status_dict(root):
d = {}
for f in ("stable-status.txt", "volatile-status.txt"):
with open(os.path.join(root, f)) as info_file:
for info_line in info_file:
info_line = info_line.strip("\n")
key, value = info_line.split(" ")
d[key] = value
return d
def main(argv):
scratch = tempfile.mkdtemp(prefix="bazel-gcs.")
atexit.register(lambda: shutil.rmtree(scratch))
workspace_status = _workspace_status_dict(argv.root)
gcs_path = argv.gcs_path.format(**workspace_status)
with open(argv.manifest) as manifest:
for artifact in manifest:
artifact = artifact.strip("\n")
src_file, dest_dir = artifact.split("\t")
dest_dir = dest_dir.format(**workspace_status)
scratch_dest_dir = os.path.join(scratch, dest_dir)
try:
os.makedirs(scratch_dest_dir)
except (OSError):
# skip directory already exists errors
pass
src = os.path.join(argv.root, src_file)
dest = os.path.join(scratch_dest_dir, os.path.basename(src_file))
os.symlink(src, dest)
ret = subprocess.call(["gsutil", "-m", "rsync", "-C", "-r", scratch, gcs_path])
print "Uploaded to %s" % gcs_path
sys.exit(ret)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Upload build targets to GCS.')
parser.add_argument("--manifest", required=True, help="path to manifest of targets")
parser.add_argument("--root", required=True, help="path to root of workspace")
parser.add_argument("gcs_path", help="path in gcs to push targets")
main(parser.parse_args())

View File

@ -1,100 +0,0 @@
load("@io_bazel_rules_go//go:def.bzl", "go_env_attrs")
go_filetype = ["*.go"]
def _compute_genrule_variables(resolved_srcs, resolved_outs):
variables = {"SRCS": cmd_helper.join_paths(" ", resolved_srcs),
"OUTS": cmd_helper.join_paths(" ", resolved_outs)}
if len(resolved_srcs) == 1:
variables["<"] = list(resolved_srcs)[0].path
if len(resolved_outs) == 1:
variables["@"] = list(resolved_outs)[0].path
return variables
def _go_sources_aspect_impl(target, ctx):
transitive_sources = set(target.go_sources)
for dep in ctx.rule.attr.deps:
transitive_sources = transitive_sources | dep.transitive_sources
return struct(transitive_sources = transitive_sources)
go_sources_aspect = aspect(
attr_aspects = ["deps"],
implementation = _go_sources_aspect_impl,
)
def _compute_genrule_command(ctx):
cmd = [
'set -e',
# setup GOROOT
'export GOROOT=$$(pwd)/' + ctx.file.go_tool.dirname + '/..',
# setup main GOPATH
'export GOPATH=/tmp/gopath',
'export GO_WORKSPACE=$${GOPATH}/src/' + ctx.attr.go_prefix.go_prefix,
'mkdir -p $${GO_WORKSPACE%/*}',
'ln -s $$(pwd) $${GO_WORKSPACE}',
# setup genfile GOPATH
'export GENGOPATH=/tmp/gengopath',
'export GENGO_WORKSPACE=$${GENGOPATH}/src/' + ctx.attr.go_prefix.go_prefix,
'mkdir -p $${GENGO_WORKSPACE%/*}',
'ln -s $$(pwd)/$(GENDIR) $${GENGO_WORKSPACE}',
# drop into WORKSPACE
'export GOPATH=$${GOPATH}:$${GENGOPATH}',
'cd $${GO_WORKSPACE}',
# execute user command
ctx.attr.cmd.strip(' \t\n\r'),
]
return '\n'.join(cmd)
def _go_genrule_impl(ctx):
all_srcs = set(ctx.files.go_src)
label_dict = {}
for dep in ctx.attr.go_deps:
all_srcs = all_srcs | dep.transitive_sources
for dep in ctx.attr.srcs:
all_srcs = all_srcs | dep.files
label_dict[dep.label] = dep.files
cmd = _compute_genrule_command(ctx)
resolved_inputs, argv, runfiles_manifests = ctx.resolve_command(
command=cmd,
attribute="cmd",
expand_locations=True,
make_variables=_compute_genrule_variables(all_srcs, set(ctx.outputs.outs)),
tools=ctx.attr.tools,
label_dict=label_dict
)
ctx.action(
inputs = list(all_srcs) + resolved_inputs,
outputs = ctx.outputs.outs,
env = ctx.configuration.default_shell_env,
command = argv,
progress_message = "%s %s" % (ctx.attr.message, ctx),
mnemonic = "GoGenrule",
)
# We have codegen procedures that depend on the "go/*" stdlib packages
# and thus depend on executing with a valid GOROOT and GOPATH containing
# some amount transitive go src of dependencies. This go_genrule enables
# the creation of these sandboxes.
go_genrule = rule(
attrs = go_env_attrs + {
"srcs": attr.label_list(allow_files = True),
"tools": attr.label_list(
cfg = "host",
allow_files = True,
),
"outs": attr.output_list(mandatory = True),
"cmd": attr.string(mandatory = True),
"go_deps": attr.label_list(
aspects = [go_sources_aspect],
),
"message": attr.string(),
"executable": attr.bool(default = False),
},
output_to_genfiles = True,
implementation = _go_genrule_impl,
)

View File

@ -4,8 +4,8 @@ Collection of scripts that verifies that a project meets requirements set for ku
The scripts are currently being migrated from the main kubernetes repository. If your project requires additional set of verifications, consider creating an issue/PR on repo-infra to avoid code duplication across multiple projects.
If repo-infra is integrated at the root of your project as git submodule at path: `/repo-infra`,
then scripts can be invoked as `repo-infra/verify/verify-*.sh`
If repo-infra is integrated at the root of your project as git submodule at path: `hack/repo-infra`,
then scripts can be invoked as `hack/repo-infra/verify/verify-*.sh`
travis.yaml example:
@ -20,33 +20,10 @@ language: go
go:
- 1.8
before_install:
- go get -u github.com/alecthomas/gometalinter
install:
- gometalinter --install
script:
- repo-infra/verify/verify-go-src.sh -v
- repo-infra/verify/verify-boilerplate.sh
# OR with vendoring
# - vendor/github.com/kubernetes/repo-infra/verify-go-src.sh --rootdir=$(pwd) -v
- hack/repo-infra/verify/verify-boilerplate.sh
```
## Verify boilerplate
Verifies that the boilerplate for various formats (go files, Makefile, etc.) is included in each file: `verify-boilerplate.sh`.
## Verify go source code
Runs a set of scripts on the go source code excluding vendored files: `verify-go-src.sh`. Expects `gometalinter` tooling installed (see travis file above)
With git submodule from your repo root: `repo-infra/verify/verify-go-src.sh -v`
With vendoring: `vendor/repo-infra/verify/verify-go-src.sh -v --rootdir $(pwd)`
Checks include:
1. gofmt
2. gometalinter
3. govet

View File

@ -1,34 +0,0 @@
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
find_files() {
find . -not \( \
\( \
-wholename '*/vendor/*' \
\) -prune \
\) -name '*.go'
}
GOFMT="gofmt -s"
bad_files=$(find_files | xargs $GOFMT -l)
if [[ -n "${bad_files}" ]]; then
echo "!!! '$GOFMT' needs to be run on the following files: "
echo "${bad_files}"
exit 1
fi

View File

@ -1,31 +0,0 @@
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
for d in $(find . -type d -not -iwholename '*.git*' -a -not -iname '.tool' -a -not -iwholename '*vendor*'); do
gometalinter --deadline=50s --vendor \
--cyclo-over=50 --dupl-threshold=100 \
--exclude=".*should not use dot imports \(golint\)$" \
--disable-all \
--enable=vet \
--enable=deadcode \
--enable=golint \
--enable=vetshadow \
--enable=gocyclo \
--tests "${d}"
done

View File

@ -1,20 +0,0 @@
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
go vet -v $(go list ./... | grep -v /vendor/)

View File

@ -1,111 +0,0 @@
#!/bin/bash
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
# This script is intended to be used via subtree in a top-level directory:
# <repo>/
# repo-infra/
# verify/
# Or via vendoring and passing root directory as vendor/repo-infra/verify-*.sh --rootdir **full path to your repo dir**
# <repo>/
# vendor/
# repo-infra/
# ...
#
SILENT=true
REPO_ROOT=$(dirname "${BASH_SOURCE}")/../..
# Convert long opts to short ones to read through getopts
for arg in "$@"; do
shift
case "$arg" in
"--rootdir") set -- "$@" "-r";;
*)
set -- "$@" "$arg"
;;
esac
done
OPTIND=1
while getopts "vr:" opt; do
case ${opt} in
v)
SILENT=false
;;
r)
REPO_ROOT=${OPTARG}
;;
\?)
echo "Invalid flag: -${OPTARG}" >&2
exit 1
;;
esac
done
shift "$(($OPTIND-1))"
echo "Working directory: ${REPO_ROOT}"
GO_TOOLS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/go-tools"
function run-cmd {
if ${SILENT}; then
"$@" &> /dev/null
else
"$@"
fi
}
# Some useful colors.
if [[ -z "${color_start-}" ]]; then
declare -r color_start="\033["
declare -r color_red="${color_start}0;31m"
declare -r color_yellow="${color_start}0;33m"
declare -r color_green="${color_start}0;32m"
declare -r color_norm="${color_start}0m"
fi
function run-checks {
local -r pattern=$1
local -r runner=$2
for t in $(ls ${pattern})
do
echo -e "Verifying ${t}"
local start=$(date +%s)
cd $REPO_ROOT && run-cmd "${runner}" "${t}" && tr=$? || tr=$?
local elapsed=$(($(date +%s) - ${start}))
if [[ ${tr} -eq 0 ]]; then
echo -e "${color_green}SUCCESS${color_norm} ${t}\t${elapsed}s"
else
echo -e "${color_red}FAILED${color_norm} ${t}\t${elapsed}s"
ret=1
fi
done
}
if ${SILENT} ; then
echo "Running in the silent mode, run with -v if you want to see script logs."
fi
ret=0
run-checks "${GO_TOOLS_DIR}/*.sh" bash
exit ${ret}