bazel: deps mirror (#1522)

bazel-deps-mirror is an internal tools used to upload external dependencies
that are referenced in the Bazel WORKSPACE to the Edgeless Systems' mirror.

It also normalizes deps rules.

* hack: add tool to mirror Bazel dependencies
* hack: bazel-deps-mirror tests
* bazel: add deps mirror commands
* ci: upload Bazel dependencies on renovate PRs
* update go mod
* run deps_mirror_upload


Signed-off-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
Co-authored-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
This commit is contained in:
Malte Poll 2023-03-30 09:41:56 +02:00 committed by GitHub
parent d3e2f30f7b
commit 827c4f548d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
36 changed files with 2698 additions and 529 deletions

View File

@ -12,6 +12,9 @@ jobs:
tidycheck:
name: tidy, check and generate
runs-on: ubuntu-22.04
permissions:
id-token: write
contents: read
steps:
- name: Checkout
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0
@ -37,6 +40,18 @@ jobs:
with:
go-version: "1.20.2"
- name: Assume AWS role to upload Bazel dependencies to S3
if: startsWith(github.head_ref, 'renovate/')
uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0
with:
role-to-assume: arn:aws:iam::795746500882:role/GithubConstellationMirrorWrite
aws-region: eu-central-1
- name: Upload Bazel dependencies to the mirror
if: startsWith(github.head_ref, 'renovate/')
shell: bash
run: bazelisk run //bazel/ci:deps_mirror_upload
- name: Run Bazel tidy
shell: bash
run: bazelisk run //:tidy
@ -107,7 +122,7 @@ jobs:
run: bazelisk run //:check
# The following steps are only executed if the previous tidy check failed
# and the action runs on an renovat branche. In this case, we tidy all
# and the action runs on an renovate branch. In this case, we tidy all
# modules again and commit the changes, so the user doesn't need to do it.
- name: Push changes

2
.gitignore vendored
View File

@ -57,7 +57,7 @@ __pycache__/
.gitpod.yml
# Bazel
bazel-*
/bazel-*
tools/pseudo-version
.bazeloverwriterc

View File

@ -6,6 +6,7 @@ def node_maintainance_operator_deps():
http_archive(
name = "com_github_medik8s_node_maintainance_operator",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/048323ffdb55787df9b93d85be93e4730f4495fba81b440dc6fe195408ec2533",
"https://github.com/medik8s/node-maintenance-operator/archive/refs/tags/v0.14.0.tar.gz",
],
sha256 = "048323ffdb55787df9b93d85be93e4730f4495fba81b440dc6fe195408ec2533",
@ -18,4 +19,5 @@ filegroup(
visibility = ["//visibility:public"],
)
""",
type = "tar.gz",
)

View File

@ -93,7 +93,11 @@ http_archive(
name = "rules_foreign_cc",
sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
strip_prefix = "rules_foreign_cc-0.9.0",
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
type = "tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
"https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
],
)
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")

View File

@ -331,6 +331,40 @@ sh_template(
template = "go_generate.sh.in",
)
repo_command(
name = "deps_mirror_fix",
args = [
"fix",
"--unauthenticated",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_upload",
args = [
"fix",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_check",
args = [
"check",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_check_mirror",
args = [
"check",
"--mirror",
],
command = "//hack/bazel-deps-mirror",
)
multirun(
name = "tidy",
commands = [
@ -342,6 +376,7 @@ multirun(
":buildifier_fix",
":terraform_fmt",
":buf_fmt",
":deps_mirror_fix",
],
jobs = 1, # execute sequentially
visibility = ["//visibility:public"],
@ -358,6 +393,7 @@ multirun(
":golicenses_check",
":license_header_check",
":govulncheck",
":deps_mirror_check",
] + select({
"@io_bazel_rules_go//go/platform:darwin_arm64": [
":shellcheck_noop_warning",

View File

@ -24,6 +24,7 @@ noHeader=$(
-rL \
--include='*.go' \
--exclude-dir 3rdparty \
--exclude-dir build \
-e'SPDX-License-Identifier: AGPL-3.0-only' \
-e'DO NOT EDIT'
)

View File

@ -8,6 +8,8 @@ def buildifier_deps():
sha256 = "ae34c344514e08c23e90da0e2d6cb700fcd28e80c02e23e4d5715dddcb42f7b3",
strip_prefix = "buildtools-4.2.2",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/ae34c344514e08c23e90da0e2d6cb700fcd28e80c02e23e4d5715dddcb42f7b3",
"https://github.com/bazelbuild/buildtools/archive/refs/tags/4.2.2.tar.gz",
],
type = "tar.gz",
)

View File

@ -18,29 +18,35 @@ def _shellcheck_deps():
http_archive(
name = "com_github_koalaman_shellcheck_linux_amd64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/700324c6dd0ebea0117591c6cc9d7350d9c7c5c287acbad7630fa17b1d4d9e2f",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.x86_64.tar.xz",
],
sha256 = "700324c6dd0ebea0117591c6cc9d7350d9c7c5c287acbad7630fa17b1d4d9e2f",
strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
)
http_archive(
name = "com_github_koalaman_shellcheck_linux_aamd64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/179c579ef3481317d130adebede74a34dbbc2df961a70916dd4039ebf0735fae",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.aarch64.tar.xz",
],
sha256 = "179c579ef3481317d130adebede74a34dbbc2df961a70916dd4039ebf0735fae",
strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
)
http_archive(
name = "com_github_koalaman_shellcheck_darwin_amd64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/7d3730694707605d6e60cec4efcb79a0632d61babc035aa16cda1b897536acf5",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.darwin.x86_64.tar.xz",
],
sha256 = "7d3730694707605d6e60cec4efcb79a0632d61babc035aa16cda1b897536acf5",
strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
)
def _terraform_deps():
@ -48,33 +54,41 @@ def _terraform_deps():
name = "com_github_hashicorp_terraform_linux_amd64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9f3ca33d04f5335472829d1df7785115b60176d610ae6f1583343b0a2221a931",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_amd64.zip",
],
sha256 = "9f3ca33d04f5335472829d1df7785115b60176d610ae6f1583343b0a2221a931",
type = "zip",
)
http_archive(
name = "com_github_hashicorp_terraform_linux_arm64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/39c182670c4e63e918e0a16080b1cc47bb16e158d7da96333d682d6a9cb8eb91",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_arm64.zip",
],
sha256 = "39c182670c4e63e918e0a16080b1cc47bb16e158d7da96333d682d6a9cb8eb91",
type = "zip",
)
http_archive(
name = "com_github_hashicorp_terraform_darwin_amd64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/c218a6c0ef6692b25af16995c8c7bdf6739e9638fef9235c6aced3cd84afaf66",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_amd64.zip",
],
sha256 = "c218a6c0ef6692b25af16995c8c7bdf6739e9638fef9235c6aced3cd84afaf66",
type = "zip",
)
http_archive(
name = "com_github_hashicorp_terraform_darwin_arm64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/af8ff7576c8fc41496fdf97e9199b00d8d81729a6a0e821eaf4dfd08aa763540",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_arm64.zip",
],
sha256 = "af8ff7576c8fc41496fdf97e9199b00d8d81729a6a0e821eaf4dfd08aa763540",
type = "zip",
)
def _actionlint_deps():
@ -82,39 +96,48 @@ def _actionlint_deps():
name = "com_github_rhysd_actionlint_linux_amd64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/b39e7cd53f4a317aecfb09edcebcc058df9ebef967866e11aa7f0df27339af3b",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_amd64.tar.gz",
],
sha256 = "b39e7cd53f4a317aecfb09edcebcc058df9ebef967866e11aa7f0df27339af3b",
type = "tar.gz",
)
http_archive(
name = "com_github_rhysd_actionlint_linux_arm64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/a36ba721621e861e900d36457836bfd6a29d6e10d9edebe547544a0e3dbf4348",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_arm64.tar.gz",
],
sha256 = "a36ba721621e861e900d36457836bfd6a29d6e10d9edebe547544a0e3dbf4348",
type = "tar.gz",
)
http_archive(
name = "com_github_rhysd_actionlint_darwin_amd64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/54f000f84d3fe85012a8726cd731c4101202c787963c9f8b40d15086b003d48e",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_amd64.tar.gz",
],
sha256 = "54f000f84d3fe85012a8726cd731c4101202c787963c9f8b40d15086b003d48e",
type = "tar.gz",
)
http_archive(
name = "com_github_rhysd_actionlint_darwin_arm64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/ddd0263968f7f024e49bd8721cd2b3d27c7a4d77081b81a4b376d5053ea25cdc",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_arm64.tar.gz",
],
sha256 = "ddd0263968f7f024e49bd8721cd2b3d27c7a4d77081b81a4b376d5053ea25cdc",
type = "tar.gz",
)
def _gofumpt_deps():
http_file(
name = "com_github_mvdan_gofumpt_linux_amd64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/d3ca535e6b0b230a9c4f05a3ec54e358336b5e7474d239c15514e63a0b2a8041",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_amd64",
],
executable = True,
@ -124,6 +147,7 @@ def _gofumpt_deps():
http_file(
name = "com_github_mvdan_gofumpt_linux_arm64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/186faa7b7562cc4c1a34f2cb89f9b09d9fad949bc2f3ce293ea2726b23c28695",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_arm64",
],
executable = True,
@ -133,6 +157,7 @@ def _gofumpt_deps():
http_file(
name = "com_github_mvdan_gofumpt_darwin_amd64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/3f550baa6d4c071b01e9c68b9308bd2ca3bae6b3b09d203f19ed8626ee0fe487",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_amd64",
],
executable = True,
@ -142,6 +167,7 @@ def _gofumpt_deps():
http_file(
name = "com_github_mvdan_gofumpt_darwin_arm64",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/768263452749a3a3cabf412f29f8a14e8bbdc7f6c6471427e977eebc6592ddb8",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_arm64",
],
executable = True,
@ -154,33 +180,41 @@ def _tfsec_deps():
name = "com_github_aquasecurity_tfsec_linux_amd64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/57b902b31da3eed12448a4e82a8aca30477e4bcd1bf99e3f65310eae0889f88d",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_amd64.tar.gz",
],
sha256 = "57b902b31da3eed12448a4e82a8aca30477e4bcd1bf99e3f65310eae0889f88d",
type = "tar.gz",
)
http_archive(
name = "com_github_aquasecurity_tfsec_linux_arm64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/20daad803d2a7a781f2ef0ee72ba4ed4ae17dcb41a43a330ae7b98347762bec9",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_arm64.tar.gz",
],
sha256 = "20daad803d2a7a781f2ef0ee72ba4ed4ae17dcb41a43a330ae7b98347762bec9",
type = "tar.gz",
)
http_archive(
name = "com_github_aquasecurity_tfsec_darwin_amd64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6d9f5a747b1fcc1b6c314d30f4ff4d753371e5690309a99a5dd653d719d20d2d",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_amd64.tar.gz",
],
sha256 = "6d9f5a747b1fcc1b6c314d30f4ff4d753371e5690309a99a5dd653d719d20d2d",
type = "tar.gz",
)
http_archive(
name = "com_github_aquasecurity_tfsec_darwin_arm64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6d664dcdd37e2809d1b4f14b310ccda0973b4a29e4624e902286e4964d101e22",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_arm64.tar.gz",
],
sha256 = "6d664dcdd37e2809d1b4f14b310ccda0973b4a29e4624e902286e4964d101e22",
type = "tar.gz",
)
def _golangci_lint_deps():
@ -188,67 +222,91 @@ def _golangci_lint_deps():
name = "com_github_golangci_golangci_lint_linux_amd64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/4de479eb9d9bc29da51aec1834e7c255b333723d38dbd56781c68e5dddc6a90b",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-amd64.tar.gz",
],
strip_prefix = "golangci-lint-1.51.2-linux-amd64",
sha256 = "4de479eb9d9bc29da51aec1834e7c255b333723d38dbd56781c68e5dddc6a90b",
type = "tar.gz",
)
http_archive(
name = "com_github_golangci_golangci_lint_linux_arm64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9e03c47b7628d49f950445d74881a0e3cb3a1e6b3c5ac3b67672d600124c1b08",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-arm64.tar.gz",
],
strip_prefix = "golangci-lint-1.51.2-linux-arm64",
sha256 = "9e03c47b7628d49f950445d74881a0e3cb3a1e6b3c5ac3b67672d600124c1b08",
type = "tar.gz",
)
http_archive(
name = "com_github_golangci_golangci_lint_darwin_amd64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0549cbaa2df451cf3a2011a9d73a9cb127784d26749d9cd14c9f4818af104d44",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-amd64.tar.gz",
],
strip_prefix = "golangci-lint-1.51.2-darwin-amd64",
sha256 = "0549cbaa2df451cf3a2011a9d73a9cb127784d26749d9cd14c9f4818af104d44",
type = "tar.gz",
)
http_archive(
name = "com_github_golangci_golangci_lint_darwin_arm64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/36e69882205a0e42a63ad57ec3015639c11051e03f0beb9cf7949c6451408960",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-arm64.tar.gz",
],
strip_prefix = "golangci-lint-1.51.2-darwin-arm64",
sha256 = "36e69882205a0e42a63ad57ec3015639c11051e03f0beb9cf7949c6451408960",
type = "tar.gz",
)
def _buf_deps():
http_archive(
name = "com_github_bufbuild_buf_linux_amd64",
sha256 = "39b58126938e265a7dd60fc4716a4a43931896e62db3d69c704d7dd63d5889dd",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-x86_64.tar.gz",
strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/39b58126938e265a7dd60fc4716a4a43931896e62db3d69c704d7dd63d5889dd",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-x86_64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_bufbuild_buf_linux_arm64",
sha256 = "90d8caa85b4cff1cdb6e96ee01e3f4f1a12135be3834ffd41c486f1cc03213ef",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-aarch64.tar.gz",
strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/90d8caa85b4cff1cdb6e96ee01e3f4f1a12135be3834ffd41c486f1cc03213ef",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-aarch64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_bufbuild_buf_darwin_amd64",
sha256 = "196e75933f7c3abebf8835fdfd74c15bc953525c9250e7bbff943e3db6fb0eb1",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-x86_64.tar.gz",
strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/196e75933f7c3abebf8835fdfd74c15bc953525c9250e7bbff943e3db6fb0eb1",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-x86_64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_bufbuild_buf_darwin_arm64",
sha256 = "f6187bbcf0718da1de38ca638038d4a707dd5b0e113e1a9e110ac8a15012505a",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-arm64.tar.gz",
strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/f6187bbcf0718da1de38ca638038d4a707dd5b0e113e1a9e110ac8a15012505a",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-arm64.tar.gz",
],
type = "tar.gz",
)
def _talos_docgen_deps():
@ -293,28 +351,44 @@ def _helm_deps():
http_archive(
name = "com_github_helm_helm_linux_amd64",
sha256 = "781d826daec584f9d50a01f0f7dadfd25a3312217a14aa2fbb85107b014ac8ca",
url = "https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz",
strip_prefix = "linux-amd64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/781d826daec584f9d50a01f0f7dadfd25a3312217a14aa2fbb85107b014ac8ca",
"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_helm_helm_linux_arm64",
sha256 = "0a60baac83c3106017666864e664f52a4e16fbd578ac009f9a85456a9241c5db",
url = "https://get.helm.sh/helm-v3.11.2-linux-arm64.tar.gz",
strip_prefix = "linux-arm64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0a60baac83c3106017666864e664f52a4e16fbd578ac009f9a85456a9241c5db",
"https://get.helm.sh/helm-v3.11.2-linux-arm64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_helm_helm_darwin_amd64",
sha256 = "404938fd2c6eff9e0dab830b0db943fca9e1572cd3d7ee40904705760faa390f",
url = "https://get.helm.sh/helm-v3.11.2-darwin-amd64.tar.gz",
strip_prefix = "darwin-amd64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/404938fd2c6eff9e0dab830b0db943fca9e1572cd3d7ee40904705760faa390f",
"https://get.helm.sh/helm-v3.11.2-darwin-amd64.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "com_github_helm_helm_darwin_arm64",
sha256 = "f61a3aa55827de2d8c64a2063fd744b618b443ed063871b79f52069e90813151",
url = "https://get.helm.sh/helm-v3.11.2-darwin-arm64.tar.gz",
strip_prefix = "darwin-arm64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
type = "tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/f61a3aa55827de2d8c64a2063fd744b618b443ed063871b79f52069e90813151",
"https://get.helm.sh/helm-v3.11.2-darwin-arm64.tar.gz",
],
)

View File

@ -8,6 +8,8 @@ def dnf_deps():
sha256 = "6104de1d657ae524bef5af86b153b82f114f532fe2e7eb02beb2e950550a88fe",
strip_prefix = "bazeldnf-45f5d74ba73710b538c57c9d43d88c583aab9d3a",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6104de1d657ae524bef5af86b153b82f114f532fe2e7eb02beb2e950550a88fe",
"https://github.com/rmohr/bazeldnf/archive/45f5d74ba73710b538c57c9d43d88c583aab9d3a.tar.gz",
],
type = "tar.gz",
)

View File

@ -815,6 +815,15 @@ def go_dependencies():
sum = "h1:UE9n9rkJF62ArLb1F3DEjRt8O3jLwMWdSoypKV4f3MU=",
version = "v0.9.0",
)
go_repository(
name = "com_github_bazelbuild_buildtools",
build_file_generation = "on",
build_file_proto_mode = "disable_global",
importpath = "github.com/bazelbuild/buildtools",
sum = "h1:XmPu4mXICgdGnC5dXGjUGbwUD/kUmS0l5Aop3LaevBM=",
version = "v0.0.0-20230317132445-9c3c1fc0106e",
)
go_repository(
name = "com_github_beeker1121_goque",
build_file_generation = "on",
@ -3559,6 +3568,14 @@ def go_dependencies():
sum = "h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8=",
version = "v1.9.0",
)
go_repository(
name = "com_github_hexops_gotextdiff",
build_file_generation = "on",
build_file_proto_mode = "disable_global",
importpath = "github.com/hexops/gotextdiff",
sum = "h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=",
version = "v1.0.3",
)
go_repository(
name = "com_github_honeycombio_beeline_go",
@ -5919,8 +5936,8 @@ def go_dependencies():
build_file_generation = "on",
build_file_proto_mode = "disable_global",
importpath = "github.com/sergi/go-diff",
sum = "h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=",
version = "v1.2.0",
sum = "h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=",
version = "v1.3.1",
)
go_repository(
name = "com_github_shibumi_go_pathspec",

View File

@ -12,16 +12,18 @@ def go_deps():
strip_prefix = "rules_go-ea3cc4f0778ba4bb35a682affc8e278551187fad",
sha256 = "9f0c386d233e7160cb752527c34654620cef1920a53617a2f1cca8d8edee5e8a",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9f0c386d233e7160cb752527c34654620cef1920a53617a2f1cca8d8edee5e8a",
"https://github.com/bazelbuild/rules_go/archive/ea3cc4f0778ba4bb35a682affc8e278551187fad.tar.gz",
],
type = "tar.gz",
)
http_archive(
name = "bazel_gazelle",
strip_prefix = "bazel-gazelle-97a754c6e45848828b27152fa64ca5dd3003d832",
sha256 = "2591fe5c9ff639317c5144665f2b97f3e45dac7ebb0b9357f8ddb3533b60a16f",
urls = [
# Depending on main until the next release, needed change from https://github.com/bazelbuild/bazel-gazelle/pull/1432
# so that "go:embed all:" directives work.
"https://cdn.confidential.cloud/constellation/cas/sha256/2591fe5c9ff639317c5144665f2b97f3e45dac7ebb0b9357f8ddb3533b60a16f",
"https://github.com/bazelbuild/bazel-gazelle/archive/97a754c6e45848828b27152fa64ca5dd3003d832.tar.gz",
],
type = "tar.gz",
)

View File

@ -7,5 +7,9 @@ def multirun_deps():
name = "com_github_ash2k_bazel_tools",
sha256 = "0ad31a16c9e48b01a1a11daf908227a6bf6106269187cccf7398625fea2ba45a",
strip_prefix = "bazel-tools-4e045b9b4e3e613970ab68941b556a356239d433",
url = "https://github.com/ash2k/bazel-tools/archive/4e045b9b4e3e613970ab68941b556a356239d433.tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0ad31a16c9e48b01a1a11daf908227a6bf6106269187cccf7398625fea2ba45a",
"https://github.com/ash2k/bazel-tools/archive/4e045b9b4e3e613970ab68941b556a356239d433.tar.gz",
],
type = "tar.gz",
)

View File

@ -7,5 +7,9 @@ def oci_deps():
name = "rules_oci",
sha256 = "4f119dc9e08319a3262c04b334bda54ba0484ca34f8ead706dd2397fc11816f7",
strip_prefix = "rules_oci-0.3.3",
url = "https://github.com/bazel-contrib/rules_oci/releases/download/v0.3.3/rules_oci-v0.3.3.tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/4f119dc9e08319a3262c04b334bda54ba0484ca34f8ead706dd2397fc11816f7",
"https://github.com/bazel-contrib/rules_oci/releases/download/v0.3.3/rules_oci-v0.3.3.tar.gz",
],
type = "tar.gz",
)

View File

@ -7,7 +7,9 @@ def pkg_deps():
name = "rules_pkg",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/eea0f59c28a9241156a47d7a8e32db9122f3d50b505fae0f33de6ce4d9b61834",
"https://github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz",
],
sha256 = "eea0f59c28a9241156a47d7a8e32db9122f3d50b505fae0f33de6ce4d9b61834",
type = "tar.gz",
)

View File

@ -8,6 +8,8 @@ def proto_deps():
sha256 = "17fa03f509b0d1df05c70c174a266ab211d04b9969e41924fd07a81ea171f117",
strip_prefix = "rules_proto-cda0effe6b5af095a6886c67f90c760b83f08c48",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/17fa03f509b0d1df05c70c174a266ab211d04b9969e41924fd07a81ea171f117",
"https://github.com/bazelbuild/rules_proto/archive/cda0effe6b5af095a6886c67f90c760b83f08c48.tar.gz",
],
type = "tar.gz",
)

View File

@ -10,8 +10,10 @@ def zig_cc_deps():
sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
],
type = "tar.gz",
)
# TODO(malt3): Update to a release version once the next release is out.
@ -21,5 +23,9 @@ def zig_cc_deps():
name = "bazel-zig-cc",
sha256 = "bea372f7f9bd8541f7b0a152c76c7b9396201c36a0ed229b36c48301815c3141",
strip_prefix = "bazel-zig-cc-f3e4542bd62f4aef794a3d184140a9d30b8fadb8",
urls = ["https://github.com/uber/bazel-zig-cc/archive/f3e4542bd62f4aef794a3d184140a9d30b8fadb8.tar.gz"],
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/bea372f7f9bd8541f7b0a152c76c7b9396201c36a0ed229b36c48301815c3141",
"https://github.com/uber/bazel-zig-cc/archive/f3e4542bd62f4aef794a3d184140a9d30b8fadb8.tar.gz",
],
type = "tar.gz",
)

View File

@ -0,0 +1,28 @@
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "bazel-deps-mirror_lib",
srcs = [
"bazel-deps-mirror.go",
"check.go",
"fix.go",
],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror",
visibility = ["//visibility:private"],
deps = [
"//hack/bazel-deps-mirror/internal/bazelfiles",
"//hack/bazel-deps-mirror/internal/issues",
"//hack/bazel-deps-mirror/internal/mirror",
"//hack/bazel-deps-mirror/internal/rules",
"//internal/logger",
"@com_github_bazelbuild_buildtools//build",
"@com_github_spf13_cobra//:cobra",
"@org_uber_go_zap//zapcore",
],
)
go_binary(
name = "bazel-deps-mirror",
embed = [":bazel-deps-mirror_lib"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,9 @@
# Bazel deps mirror
This directory contains tooling to automatically mirror the dependencies of a Bazel project into the Constellation CDN at `https://cdn.confidential.cloud/`.
The tool searches for various rules in the WORKSPACE.bazel file and all loaded .bzl files.
It has the following commands:
- check: checks if the dependencies all have a mirror URL and optionally checks if the mirror really returns the expected file
- mirror: mirrors all dependencies that don't have a mirror URL yet. Also normalizes the `urls` attribute of rules

View File

@ -0,0 +1,82 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// bazel-deps-mirror adds external dependencies to edgeless systems' mirror.
package main
import (
"context"
"fmt"
"os"
"os/signal"
"github.com/spf13/cobra"
)
func main() {
if err := execute(); err != nil {
os.Exit(1)
}
}
func execute() error {
rootCmd := newRootCmd()
ctx, cancel := signalContext(context.Background(), os.Interrupt)
defer cancel()
return rootCmd.ExecuteContext(ctx)
}
func newRootCmd() *cobra.Command {
rootCmd := &cobra.Command{
Use: "bazel-deps-mirror",
Short: "Add external Bazel dependencies to edgeless systems' mirror",
Long: "Add external Bazel dependencies to edgeless systems' mirror.",
PersistentPreRun: preRunRoot,
}
rootCmd.SetOut(os.Stdout)
rootCmd.PersistentFlags().Bool("verbose", false, "Enable verbose output")
rootCmd.PersistentFlags().String("region", "eu-central-1", "AWS region of the API S3 bucket")
rootCmd.PersistentFlags().String("bucket", "cdn-constellation-backend", "S3 bucket name of the API")
rootCmd.PersistentFlags().String("mirror-base-url", "https://cdn.confidential.cloud", "Base URL of the public mirror endpoint")
rootCmd.AddCommand(newCheckCmd())
rootCmd.AddCommand(newFixCmd())
return rootCmd
}
// signalContext returns a context that is canceled on the handed signal.
// The signal isn't watched after its first occurrence. Call the cancel
// function to ensure the internal goroutine is stopped and the signal isn't
// watched any longer.
func signalContext(ctx context.Context, sig os.Signal) (context.Context, context.CancelFunc) {
sigCtx, stop := signal.NotifyContext(ctx, sig)
done := make(chan struct{}, 1)
stopDone := make(chan struct{}, 1)
go func() {
defer func() { stopDone <- struct{}{} }()
defer stop()
select {
case <-sigCtx.Done():
fmt.Println(" Signal caught. Press ctrl+c again to terminate the program immediately.")
case <-done:
}
}()
cancelFunc := func() {
done <- struct{}{}
<-stopDone
}
return sigCtx, cancelFunc
}
func preRunRoot(cmd *cobra.Command, _ []string) {
cmd.SilenceUsage = true
}

View File

@ -0,0 +1,184 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package main
import (
"context"
"errors"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/spf13/cobra"
"go.uber.org/zap/zapcore"
)
func newCheckCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "check",
Short: "Check if all Bazel dependencies are mirrored and the corresponding rules are properly formatted.",
RunE: runCheck,
}
cmd.Flags().Bool("mirror", false, "Performs authenticated checks to validate if all referenced CAS objects are still consistent within the mirror.")
cmd.Flags().Bool("mirror-unauthenticated", false, "Performs unauthenticated, slow checks to validate if all referenced CAS objects are still consistent within the mirror. Doesn't require authentication to the mirror but may be inefficient.")
cmd.MarkFlagsMutuallyExclusive("mirror", "mirror-unauthenticated")
return cmd
}
func runCheck(cmd *cobra.Command, _ []string) error {
flags, err := parseCheckFlags(cmd)
if err != nil {
return err
}
log := logger.New(logger.PlainLog, flags.logLevel)
log.Debugf("Parsed flags: %+v", flags)
filesHelper, err := bazelfiles.New()
if err != nil {
return err
}
log.Debugf("Searching for Bazel files in the current WORKSPACE and all subdirectories...")
bazelFiles, err := filesHelper.FindFiles()
if err != nil {
return err
}
var mirrorCheck mirrorChecker
switch {
case flags.mirrorUnauthenticated:
log.Debugf("Checking consistency of all referenced CAS objects without authentication.")
mirrorCheck = mirror.NewUnauthenticated(flags.mirrorBaseURL, mirror.Run, log)
case flags.mirror:
log.Debugf("Checking consistency of all referenced CAS objects using AWS S3.")
mirrorCheck, err = mirror.New(cmd.Context(), flags.region, flags.bucket, flags.mirrorBaseURL, mirror.Run, log)
if err != nil {
return err
}
default:
mirrorCheck = &noOpMirrorChecker{}
}
iss := issues.New()
for _, bazelFile := range bazelFiles {
issByFile, err := checkBazelFile(cmd.Context(), filesHelper, mirrorCheck, bazelFile, log)
if err != nil {
return err
}
if len(issByFile) > 0 {
iss.Set(bazelFile.AbsPath, issByFile)
}
}
if len(iss) > 0 {
log.Infof("Found issues in rules")
iss.Report(cmd.OutOrStdout())
return errors.New("found issues in rules")
}
log.Infof("No issues found 🦭")
return nil
}
func checkBazelFile(ctx context.Context, fileHelper *bazelfiles.Helper, mirrorCheck mirrorChecker, bazelFile bazelfiles.BazelFile, log *logger.Logger) (issByFile issues.ByFile, err error) {
log.Debugf("Checking file: %s", bazelFile.RelPath)
issByFile = issues.NewByFile()
buildfile, err := fileHelper.LoadFile(bazelFile)
if err != nil {
return nil, err
}
found := rules.Rules(buildfile, rules.SupportedRules)
if len(found) == 0 {
log.Debugf("No rules found in file: %s", bazelFile.RelPath)
return
}
log.Debugf("Found %d rules in file: %s", len(found), bazelFile.RelPath)
for _, rule := range found {
log.Debugf("Checking rule: %s", rule.Name())
// check if the rule is a valid pinned dependency rule (has all required attributes)
if issues := rules.ValidatePinned(rule); len(issues) > 0 {
issByFile.Add(rule.Name(), issues...)
continue
}
// check if the rule is a valid mirror rule
if issues := rules.Check(rule); len(issues) > 0 {
issByFile.Add(rule.Name(), issues...)
}
// check if the referenced CAS object is still consistent
// may be a no-op if --check-consistency is not set
expectedHash, expectedHashErr := rules.GetHash(rule)
if expectedHashErr == nil && rules.HasMirrorURL(rule) {
if issue := mirrorCheck.Check(ctx, expectedHash); issue != nil {
issByFile.Add(rule.Name(), issue)
}
}
}
return
}
type checkFlags struct {
mirrorUnauthenticated bool
mirror bool
region string
bucket string
mirrorBaseURL string
logLevel zapcore.Level
}
func parseCheckFlags(cmd *cobra.Command) (checkFlags, error) {
mirrorUnauthenticated, err := cmd.Flags().GetBool("mirror-unauthenticated")
if err != nil {
return checkFlags{}, err
}
mirror, err := cmd.Flags().GetBool("mirror")
if err != nil {
return checkFlags{}, err
}
verbose, err := cmd.Flags().GetBool("verbose")
if err != nil {
return checkFlags{}, err
}
logLevel := zapcore.InfoLevel
if verbose {
logLevel = zapcore.DebugLevel
}
region, err := cmd.Flags().GetString("region")
if err != nil {
return checkFlags{}, err
}
bucket, err := cmd.Flags().GetString("bucket")
if err != nil {
return checkFlags{}, err
}
mirrorBaseURL, err := cmd.Flags().GetString("mirror-base-url")
if err != nil {
return checkFlags{}, err
}
return checkFlags{
mirrorUnauthenticated: mirrorUnauthenticated,
mirror: mirror,
region: region,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
logLevel: logLevel,
}, nil
}
type mirrorChecker interface {
Check(ctx context.Context, expectedHash string) error
}
type noOpMirrorChecker struct{}
func (m *noOpMirrorChecker) Check(ctx context.Context, expectedHash string) error {
return nil
}

View File

@ -0,0 +1,230 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package main
import (
"context"
"errors"
"github.com/bazelbuild/buildtools/build"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/spf13/cobra"
"go.uber.org/zap/zapcore"
)
func newFixCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "fix",
Short: "fix all Bazel dependency rules by uploading artifacts to the mirror (if needed) and formatting the rules.",
RunE: runFix,
}
cmd.Flags().Bool("unauthenticated", false, "Doesn't require authentication to the mirror but cannot upload files.")
cmd.Flags().Bool("dry-run", false, "Don't actually change files or upload anything.")
return cmd
}
func runFix(cmd *cobra.Command, _ []string) error {
flags, err := parseFixFlags(cmd)
if err != nil {
return err
}
log := logger.New(logger.PlainLog, flags.logLevel)
log.Debugf("Parsed flags: %+v", flags)
fileHelper, err := bazelfiles.New()
if err != nil {
return err
}
log.Debugf("Searching for Bazel files in the current WORKSPACE and all subdirectories...")
bazelFiles, err := fileHelper.FindFiles()
if err != nil {
return err
}
var mirrorUpload mirrorUploader
switch {
case flags.unauthenticated:
log.Warnf("Fixing rules without authentication for AWS S3. If artifacts are not yet mirrored, this will fail.")
mirrorUpload = mirror.NewUnauthenticated(flags.mirrorBaseURL, flags.dryRun, log)
default:
log.Debugf("Fixing rules with authentication for AWS S3.")
mirrorUpload, err = mirror.New(cmd.Context(), flags.region, flags.bucket, flags.mirrorBaseURL, flags.dryRun, log)
if err != nil {
return err
}
}
issues := issues.New()
for _, bazelFile := range bazelFiles {
fileIssues, err := fixBazelFile(cmd.Context(), fileHelper, mirrorUpload, bazelFile, flags.dryRun, log)
if err != nil {
return err
}
if len(fileIssues) > 0 {
issues.Set(bazelFile.AbsPath, fileIssues)
}
}
if len(issues) > 0 {
log.Warnf("Found %d unfixable issues in rules", len(issues))
issues.Report(cmd.OutOrStdout())
return errors.New("found issues in rules")
}
log.Infof("No unfixable issues found")
return nil
}
func fixBazelFile(ctx context.Context, fileHelper *bazelfiles.Helper, mirrorUpload mirrorUploader, bazelFile bazelfiles.BazelFile, dryRun bool, log *logger.Logger) (iss issues.ByFile, err error) {
iss = issues.NewByFile()
var changed bool // true if any rule in this file was changed
log.Infof("Checking file: %s", bazelFile.RelPath)
buildfile, err := fileHelper.LoadFile(bazelFile)
if err != nil {
return iss, err
}
found := rules.Rules(buildfile, rules.SupportedRules)
if len(found) == 0 {
log.Debugf("No rules found in file: %s", bazelFile.RelPath)
return iss, nil
}
log.Debugf("Found %d rules in file: %s", len(found), bazelFile.RelPath)
for _, rule := range found {
changedRule, ruleIssues := fixRule(ctx, mirrorUpload, rule, log)
if len(ruleIssues) > 0 {
iss.Add(rule.Name(), ruleIssues...)
}
changed = changed || changedRule
}
if len(iss) > 0 {
log.Warnf("File %s has issues. Not saving!", bazelFile.RelPath)
return iss, nil
}
if !changed {
log.Debugf("No changes to file: %s", bazelFile.RelPath)
return iss, nil
}
if dryRun {
diff, err := fileHelper.Diff(bazelFile, buildfile)
if err != nil {
return iss, err
}
log.Infof("Dry run: would save updated file %s with diff:\n%s", bazelFile.RelPath, diff)
return iss, nil
}
log.Infof("Saving updated file: %s", bazelFile.RelPath)
if err := fileHelper.WriteFile(bazelFile, buildfile); err != nil {
return iss, err
}
return iss, nil
}
func fixRule(ctx context.Context, mirrorUpload mirrorUploader, rule *build.Rule, log *logger.Logger) (changed bool, iss []error) {
log.Debugf("Fixing rule: %s", rule.Name())
// check if the rule is a valid pinned dependency rule (has all required attributes)
issue := rules.ValidatePinned(rule)
if issue != nil {
// don't try to fix the rule if it's invalid
iss = append(iss, issue...)
return
}
// check if the referenced CAS object exists in the mirror and is consistent
expectedHash, expectedHashErr := rules.GetHash(rule)
if expectedHashErr != nil {
// don't try to fix the rule if the hash is missing
iss = append(iss,
errors.New("hash attribute is missing. unable to check if the artifact is already mirrored or upload it"))
return
}
if rules.HasMirrorURL(rule) {
changed = rules.Normalize(rule)
return
}
log.Infof("Artifact %s with hash %s is not yet mirrored. Uploading...", rule.Name(), expectedHash)
if uploadErr := mirrorUpload.Mirror(ctx, expectedHash, rules.GetURLs(rule)); uploadErr != nil {
// don't try to fix the rule if the upload failed
iss = append(iss, uploadErr)
return
}
// now the artifact is mirrored (if it wasn't already) and we can fix the rule
mirrorURL, err := mirrorUpload.MirrorURL(expectedHash)
if err != nil {
iss = append(iss, err)
return
}
rules.AddURLs(rule, []string{mirrorURL})
// normalize the rule
rules.Normalize(rule)
return true, iss
}
type fixFlags struct {
unauthenticated bool
dryRun bool
region string
bucket string
mirrorBaseURL string
logLevel zapcore.Level
}
func parseFixFlags(cmd *cobra.Command) (fixFlags, error) {
unauthenticated, err := cmd.Flags().GetBool("unauthenticated")
if err != nil {
return fixFlags{}, err
}
dryRun, err := cmd.Flags().GetBool("dry-run")
if err != nil {
return fixFlags{}, err
}
verbose, err := cmd.Flags().GetBool("verbose")
if err != nil {
return fixFlags{}, err
}
logLevel := zapcore.InfoLevel
if verbose {
logLevel = zapcore.DebugLevel
}
region, err := cmd.Flags().GetString("region")
if err != nil {
return fixFlags{}, err
}
bucket, err := cmd.Flags().GetString("bucket")
if err != nil {
return fixFlags{}, err
}
mirrorBaseURL, err := cmd.Flags().GetString("mirror-base-url")
if err != nil {
return fixFlags{}, err
}
return fixFlags{
unauthenticated: unauthenticated,
dryRun: dryRun,
region: region,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
logLevel: logLevel,
}, nil
}
type mirrorUploader interface {
Check(ctx context.Context, expectedHash string) error
Mirror(ctx context.Context, hash string, urls []string) error
MirrorURL(hash string) (string, error)
}

View File

@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "bazelfiles",
srcs = ["files.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_hexops_gotextdiff//:gotextdiff",
"@com_github_hexops_gotextdiff//myers",
"@com_github_hexops_gotextdiff//span",
"@com_github_spf13_afero//:afero",
],
)
go_test(
name = "bazelfiles_test",
srcs = ["files_test.go"],
embed = [":bazelfiles"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_bazelbuild_buildtools//edit",
"@com_github_spf13_afero//:afero",
"@com_github_stretchr_testify//assert",
"@com_github_stretchr_testify//require",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,158 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package bazelfiles is used to find and handle Bazel WORKSPACE and bzl files.
package bazelfiles
import (
"fmt"
"os"
"path/filepath"
"github.com/bazelbuild/buildtools/build"
"github.com/hexops/gotextdiff"
"github.com/hexops/gotextdiff/myers"
"github.com/hexops/gotextdiff/span"
"github.com/spf13/afero"
)
// Helper is used to find and handle Bazel WORKSPACE and bzl files.
type Helper struct {
fs afero.Fs
workspaceRoot string
}
// New creates a new BazelFilesHelper.
func New() (*Helper, error) {
workspaceRoot, err := findWorkspaceRoot(os.LookupEnv)
if err != nil {
return nil, err
}
return &Helper{
fs: afero.NewBasePathFs(afero.NewOsFs(), workspaceRoot),
workspaceRoot: workspaceRoot,
}, nil
}
// FindFiles returns the paths to all Bazel files in the Bazel workspace.
func (h *Helper) FindFiles() ([]BazelFile, error) {
workspaceFile, err := h.findWorkspaceFile()
if err != nil {
return nil, err
}
bzlFiles, err := h.findBzlFiles()
if err != nil {
return nil, err
}
return append(bzlFiles, workspaceFile), nil
}
// findWorkspaceFile returns the path to the Bazel WORKSPACE.bazel file (or WORKSPACE if the former doesn't exist).
func (h *Helper) findWorkspaceFile() (BazelFile, error) {
if _, err := h.fs.Stat("WORKSPACE.bazel"); err == nil {
return BazelFile{
RelPath: "WORKSPACE.bazel",
AbsPath: filepath.Join(h.workspaceRoot, "WORKSPACE.bazel"),
Type: BazelFileTypeWorkspace,
}, nil
}
if _, err := h.fs.Stat("WORKSPACE"); err == nil {
return BazelFile{
RelPath: "WORKSPACE",
AbsPath: filepath.Join(h.workspaceRoot, "WORKSPACE"),
Type: BazelFileTypeWorkspace,
}, nil
}
return BazelFile{}, fmt.Errorf("failed to find Bazel WORKSPACE file")
}
// findBzlFiles returns the paths to all .bzl files in the Bazel workspace.
func (h *Helper) findBzlFiles() ([]BazelFile, error) {
var bzlFiles []BazelFile
err := afero.Walk(h.fs, ".", func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
if filepath.Ext(path) != ".bzl" {
return nil
}
bzlFiles = append(bzlFiles, BazelFile{
RelPath: path,
AbsPath: filepath.Join(h.workspaceRoot, path),
Type: BazelFileTypeBzl,
})
return nil
})
if err != nil {
return nil, err
}
return bzlFiles, nil
}
// LoadFile loads a Bazel file.
func (h *Helper) LoadFile(bf BazelFile) (*build.File, error) {
data, err := afero.ReadFile(h.fs, bf.RelPath)
if err != nil {
return nil, err
}
switch bf.Type {
case BazelFileTypeBzl:
return build.ParseBzl(bf.AbsPath, data)
case BazelFileTypeWorkspace:
return build.ParseWorkspace(bf.AbsPath, data)
}
return nil, fmt.Errorf("unknown Bazel file type: %d", bf.Type)
}
// WriteFile writes (updates) a Bazel file.
func (h *Helper) WriteFile(bf BazelFile, buildfile *build.File) error {
return afero.WriteFile(h.fs, bf.RelPath, build.Format(buildfile), 0o644)
}
// Diff returns the diff between the saved and the updated (in-memory) version of a Bazel file.
func (h *Helper) Diff(bf BazelFile, buildfile *build.File) (string, error) {
savedData, err := afero.ReadFile(h.fs, bf.RelPath)
if err != nil {
return "", err
}
updatedData := build.Format(buildfile)
edits := myers.ComputeEdits(span.URIFromPath(bf.RelPath), string(savedData), string(updatedData))
diff := fmt.Sprint(gotextdiff.ToUnified("a/"+bf.RelPath, "b/"+bf.RelPath, string(savedData), edits))
return diff, nil
}
// findWorkspaceRoot returns the path to the Bazel workspace root.
func findWorkspaceRoot(lookupEnv LookupEnv) (string, error) {
workspaceRoot, ok := lookupEnv("BUILD_WORKSPACE_DIRECTORY")
if !ok {
return "", fmt.Errorf("failed to find Bazel workspace root: not executed via \"bazel run\" and BUILD_WORKSPACE_DIRECTORY not set")
}
return workspaceRoot, nil
}
// BazelFile is a reference (path) to a Bazel file.
type BazelFile struct {
RelPath string
AbsPath string
Type BazelFileType
}
// BazelFileType is the type of a Bazel file.
type BazelFileType int
const (
BazelFileTypeBzl = iota // BazelFileTypeBzl is a .bzl file
BazelFileTypeWorkspace // BazelFileTypeWorkspace is a WORKSPACE or WORKSPACE.bazel file
)
// LookupEnv can be the real os.LookupEnv or a mock for testing.
type LookupEnv func(key string) (string, bool)

View File

@ -0,0 +1,259 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package bazelfiles
import (
"testing"
"github.com/bazelbuild/buildtools/build"
"github.com/bazelbuild/buildtools/edit"
"github.com/spf13/afero"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestFindFiles(t *testing.T) {
testCases := map[string]struct {
files []string
wantFiles []BazelFile
wantErr bool
}{
"no WORKSPACE file": {
files: []string{},
wantFiles: []BazelFile{},
wantErr: true,
},
"only WORKSPACE file": {
files: []string{"WORKSPACE"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE",
AbsPath: "/WORKSPACE",
Type: BazelFileTypeWorkspace,
},
},
},
"only WORKSPACE.bazel file": {
files: []string{"WORKSPACE.bazel"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
},
},
"both WORKSPACE and WORKSPACE.bazel files": {
files: []string{"WORKSPACE", "WORKSPACE.bazel"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
},
},
"only .bzl file": {
files: []string{"foo.bzl"},
wantErr: true,
},
"all kinds": {
files: []string{"WORKSPACE", "WORKSPACE.bazel", "foo.bzl", "bar.bzl", "unused.txt", "folder/baz.bzl"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
{
RelPath: "bar.bzl",
AbsPath: "/bar.bzl",
Type: BazelFileTypeBzl,
},
{
RelPath: "folder/baz.bzl",
AbsPath: "/folder/baz.bzl",
Type: BazelFileTypeBzl,
},
},
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
for _, file := range tc.files {
_, err := fs.Create(file)
assert.NoError(err)
}
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
gotFiles, err := helper.FindFiles()
if tc.wantErr {
assert.Error(err)
return
}
require.NoError(err)
assert.ElementsMatch(tc.wantFiles, gotFiles)
})
}
}
func TestLoadFile(t *testing.T) {
testCases := map[string]struct {
file BazelFile
contents string
wantErr bool
}{
"file does not exist": {
file: BazelFile{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
wantErr: true,
},
"file has unknown type": {
file: BazelFile{
RelPath: "foo.txt",
AbsPath: "/foo.txt",
Type: BazelFileType(999),
},
contents: "foo",
wantErr: true,
},
"file is a bzl file": {
file: BazelFile{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
contents: "load(\"bar.bzl\", \"bar\")",
},
"file is a workspace file": {
file: BazelFile{
RelPath: "WORKSPACE",
AbsPath: "/WORKSPACE",
Type: BazelFileTypeWorkspace,
},
contents: "workspace(name = \"foo\")",
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
if tc.contents != "" {
err := afero.WriteFile(fs, tc.file.RelPath, []byte(tc.contents), 0o644)
require.NoError(err)
}
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
_, err := helper.LoadFile(tc.file)
if tc.wantErr {
assert.Error(err)
return
}
require.NoError(err)
})
}
}
func TestReadWriteFile(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
err := afero.WriteFile(fs, "a.bzl", []byte("load(\"bar.bzl\", \"bar\")\n"), 0o644)
require.NoError(err)
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
bf, err := helper.LoadFile(BazelFile{
RelPath: "a.bzl",
AbsPath: "/a.bzl",
Type: BazelFileTypeBzl,
})
require.NoError(err)
err = helper.WriteFile(BazelFile{
RelPath: "b.bzl",
AbsPath: "/b.bzl",
Type: BazelFileTypeBzl,
}, bf)
require.NoError(err)
_, err = fs.Stat("b.bzl")
assert.NoError(err)
contents, err := afero.ReadFile(fs, "b.bzl")
assert.NoError(err)
assert.Equal("load(\"bar.bzl\", \"bar\")\n", string(contents))
}
func TestDiff(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
err := afero.WriteFile(fs, "WORKSPACE.bazel", []byte(""), 0o644)
require.NoError(err)
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
fileRef := BazelFile{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
}
bf, err := helper.LoadFile(fileRef)
require.NoError(err)
diff, err := helper.Diff(fileRef, bf)
require.NoError(err)
assert.Empty(diff)
bf.Stmt = edit.InsertAtEnd(
bf.Stmt,
&build.CallExpr{
X: &build.Ident{Name: "workspace"},
List: []build.Expr{
&build.AssignExpr{
LHS: &build.Ident{Name: "name"},
Op: "=",
RHS: &build.StringExpr{Value: "foo"},
},
},
},
)
diff, err = helper.Diff(fileRef, bf)
require.NoError(err)
assert.Equal("--- a/WORKSPACE.bazel\n+++ b/WORKSPACE.bazel\n@@ -1 +1 @@\n+workspace(name = \"foo\")\n", diff)
err = helper.WriteFile(fileRef, bf)
require.NoError(err)
contents, err := afero.ReadFile(fs, "WORKSPACE.bazel")
assert.NoError(err)
assert.Equal("workspace(name = \"foo\")\n", string(contents))
diff, err = helper.Diff(fileRef, bf)
require.NoError(err)
assert.Empty(diff)
}

View File

@ -0,0 +1,19 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "issues",
srcs = ["issues.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
)
go_test(
name = "issues_test",
srcs = ["issues_test.go"],
embed = [":issues"],
deps = [
"@com_github_stretchr_testify//assert",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,88 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package issues can store and report issues found during the bazel-deps-mirror process.
package issues
import (
"fmt"
"io"
"sort"
)
// Map is a map of issues arranged by path => rulename => issues.
type Map map[string]map[string][]error
// New creates a new Map.
func New() Map {
return make(map[string]map[string][]error)
}
// Set sets all issues for a file.
func (m Map) Set(file string, issues ByFile) {
m[file] = issues
}
// Report prints all issues to a writer in a human-readable format.
func (m Map) Report(w io.Writer) {
files := make([]string, 0, len(m))
for f := range m {
files = append(files, f)
}
sort.Strings(files)
for _, file := range files {
rules := make([]string, 0, len(m[file]))
for r := range m[file] {
rules = append(rules, r)
}
sort.Strings(rules)
fmt.Fprintf(w, "File %s (%d issues total):\n", file, m.IssuesPerFile(file))
for _, rule := range rules {
ruleIssues := m[file][rule]
if len(ruleIssues) == 0 {
continue
}
fmt.Fprintf(w, " Rule %s (%d issues total):\n", rule, m.IssuesPerRule(file, rule))
for _, issue := range ruleIssues {
fmt.Fprintf(w, " %s\n", issue)
}
}
}
}
// FileHasIssues returns true if the file has any issues.
func (m Map) FileHasIssues(file string) bool {
return m[file] != nil
}
// IssuesPerFile returns the number of issues for a file.
func (m Map) IssuesPerFile(file string) int {
sum := 0
for _, ruleIssues := range m[file] {
sum += len(ruleIssues)
}
return sum
}
// IssuesPerRule returns the number of issues for a rule.
func (m Map) IssuesPerRule(file string, rule string) int {
return len(m[file][rule])
}
// ByFile is a map of issues belonging to one file arranged by rulename => issues.
type ByFile map[string][]error
// NewByFile creates a new ByFile.
func NewByFile() ByFile {
return make(map[string][]error)
}
// Add adds one or more issues belonging to a rule.
func (m ByFile) Add(rule string, issues ...error) {
m[rule] = append(m[rule], issues...)
}

View File

@ -0,0 +1,46 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package issues
import (
"bytes"
"errors"
"testing"
"github.com/stretchr/testify/assert"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestMap(t *testing.T) {
assert := assert.New(t)
m := New()
assert.Equal(0, len(m))
assert.False(m.FileHasIssues("file1"))
m.Set("file1", map[string][]error{
"rule1": {errors.New("r1_issue1"), errors.New("r1_issue2")},
"rule2": {errors.New("r2_issue1")},
})
assert.Equal(3, m.IssuesPerFile("file1"))
assert.True(m.FileHasIssues("file1"))
// let report write to a buffer
b := new(bytes.Buffer)
m.Report(b)
rep := b.String()
assert.Equal(rep, `File file1 (3 issues total):
Rule rule1 (2 issues total):
r1_issue1
r1_issue2
Rule rule2 (1 issues total):
r2_issue1
`)
}

View File

@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "mirror",
srcs = ["mirror.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"//internal/logger",
"@com_github_aws_aws_sdk_go_v2_config//:config",
"@com_github_aws_aws_sdk_go_v2_feature_s3_manager//:manager",
"@com_github_aws_aws_sdk_go_v2_service_s3//:s3",
"@com_github_aws_aws_sdk_go_v2_service_s3//types",
],
)
go_test(
name = "mirror_test",
srcs = ["mirror_test.go"],
embed = [":mirror"],
deps = [
"//internal/logger",
"@com_github_aws_aws_sdk_go_v2_feature_s3_manager//:manager",
"@com_github_aws_aws_sdk_go_v2_service_s3//:s3",
"@com_github_aws_aws_sdk_go_v2_service_s3//types",
"@com_github_stretchr_testify//assert",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,270 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package mirror is used upload and download Bazel dependencies to and from a mirror.
package mirror
import (
"context"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"path"
awsconfig "github.com/aws/aws-sdk-go-v2/config"
s3manager "github.com/aws/aws-sdk-go-v2/feature/s3/manager"
"github.com/aws/aws-sdk-go-v2/service/s3"
s3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/edgelesssys/constellation/v2/internal/logger"
)
// Maintainer can upload and download files to and from a CAS mirror.
type Maintainer struct {
objectStorageClient objectStorageClient
uploadClient uploadClient
httpClient httpClient
// bucket is the name of the S3 bucket to use.
bucket string
// mirrorBaseURL is the base URL of the public CAS http endpoint.
mirrorBaseURL string
unauthenticated bool
dryRun bool
log *logger.Logger
}
// NewUnauthenticated creates a new Maintainer that dose not require authentication can only download files from a CAS mirror.
func NewUnauthenticated(mirrorBaseURL string, dryRun bool, log *logger.Logger) *Maintainer {
return &Maintainer{
httpClient: http.DefaultClient,
mirrorBaseURL: mirrorBaseURL,
unauthenticated: true,
dryRun: dryRun,
log: log,
}
}
// New creates a new Maintainer that can upload and download files to and from a CAS mirror.
func New(ctx context.Context, region, bucket, mirrorBaseURL string, dryRun bool, log *logger.Logger) (*Maintainer, error) {
cfg, err := awsconfig.LoadDefaultConfig(ctx, awsconfig.WithRegion(region))
if err != nil {
return nil, err
}
s3C := s3.NewFromConfig(cfg)
uploadC := s3manager.NewUploader(s3C)
return &Maintainer{
objectStorageClient: s3C,
uploadClient: uploadC,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
httpClient: http.DefaultClient,
dryRun: dryRun,
log: log,
}, nil
}
// MirrorURL returns the public URL of a file in the CAS mirror.
func (m *Maintainer) MirrorURL(hash string) (string, error) {
if _, err := hex.DecodeString(hash); err != nil {
return "", fmt.Errorf("invalid hash %q: %w", hash, err)
}
key := path.Join(keyBase, hash)
pubURL, err := url.Parse(m.mirrorBaseURL)
if err != nil {
return "", err
}
pubURL.Path = path.Join(pubURL.Path, key)
return pubURL.String(), nil
}
// Mirror downloads a file from one of the existing (non-mirror) urls and uploads it to the CAS mirror.
// It also calculates the hash of the file during streaming and checks if it matches the expected hash.
func (m *Maintainer) Mirror(ctx context.Context, hash string, urls []string) error {
if m.unauthenticated {
return errors.New("cannot upload in unauthenticated mode")
}
for _, url := range urls {
m.log.Debugf("Mirroring file with hash %v from %q", hash, url)
body, err := m.downloadFromUpstream(ctx, url)
if err != nil {
m.log.Debugf("Failed to download file from %q: %v", url, err)
continue
}
defer body.Close()
streamedHash := sha256.New()
tee := io.TeeReader(body, streamedHash)
if err := m.put(ctx, hash, tee); err != nil {
m.log.Warnf("Failed to stream file from upstream %q to mirror: %v.. Trying next url.", url, err)
continue
}
actualHash := hex.EncodeToString(streamedHash.Sum(nil))
if actualHash != hash {
return fmt.Errorf("hash mismatch while streaming file to mirror: expected %v, got %v", hash, actualHash)
}
pubURL, err := m.MirrorURL(hash)
if err != nil {
return err
}
m.log.Debugf("File uploaded successfully to mirror from %q as %q", url, pubURL)
return nil
}
return fmt.Errorf("failed to download / reupload file with hash %v from any of the urls: %v", hash, urls)
}
// Check checks if a file is present and has the correct hash in the CAS mirror.
func (m *Maintainer) Check(ctx context.Context, expectedHash string) error {
m.log.Debugf("Checking consistency of object with hash %v", expectedHash)
if m.unauthenticated {
return m.checkUnauthenticated(ctx, expectedHash)
}
return m.checkAuthenticated(ctx, expectedHash)
}
// checkReadonly checks if a file is present and has the correct hash in the CAS mirror.
// It uses the authenticated CAS s3 endpoint to download the file metadata.
func (m *Maintainer) checkAuthenticated(ctx context.Context, expectedHash string) error {
key := path.Join(keyBase, expectedHash)
m.log.Debugf("Check: s3 getObjectAttributes {Bucket: %v, Key: %v}", m.bucket, key)
attributes, err := m.objectStorageClient.GetObjectAttributes(ctx, &s3.GetObjectAttributesInput{
Bucket: &m.bucket,
Key: &key,
ObjectAttributes: []s3types.ObjectAttributes{s3types.ObjectAttributesChecksum, s3types.ObjectAttributesObjectParts},
})
if err != nil {
return err
}
hasChecksum := attributes.Checksum != nil && attributes.Checksum.ChecksumSHA256 != nil && len(*attributes.Checksum.ChecksumSHA256) > 0
isSinglePart := attributes.ObjectParts == nil || attributes.ObjectParts.TotalPartsCount == 1
if !hasChecksum || !isSinglePart {
// checksums are not guaranteed to be present
// and if present, they are only meaningful for single part objects
// fallback if checksum cannot be verified from attributes
m.log.Debugf("S3 object attributes cannot be used to verify key %v. Falling back to download.", key)
return m.checkUnauthenticated(ctx, expectedHash)
}
actualHash, err := base64.StdEncoding.DecodeString(*attributes.Checksum.ChecksumSHA256)
if err != nil {
return err
}
return compareHashes(expectedHash, actualHash)
}
// checkReadonly checks if a file is present and has the correct hash in the CAS mirror.
// It uses the public CAS http endpoint to download the file.
func (m *Maintainer) checkUnauthenticated(ctx context.Context, expectedHash string) error {
pubURL, err := m.MirrorURL(expectedHash)
if err != nil {
return err
}
m.log.Debugf("Check: http get {Url: %v}", pubURL)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, pubURL, http.NoBody)
if err != nil {
return err
}
resp, err := m.httpClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code %v", resp.StatusCode)
}
actualHash := sha256.New()
if _, err := io.Copy(actualHash, resp.Body); err != nil {
return err
}
return compareHashes(expectedHash, actualHash.Sum(nil))
}
// put uploads a file to the CAS mirror.
func (m *Maintainer) put(ctx context.Context, hash string, data io.Reader) error {
if m.unauthenticated {
return errors.New("cannot upload in unauthenticated mode")
}
key := path.Join(keyBase, hash)
if m.dryRun {
m.log.Debugf("DryRun: s3 put object {Bucket: %v, Key: %v}", m.bucket, key)
return nil
}
m.log.Debugf("Uploading object with hash %v to s3://%v/%v", hash, m.bucket, key)
_, err := m.uploadClient.Upload(ctx, &s3.PutObjectInput{
Bucket: &m.bucket,
Key: &key,
Body: data,
ChecksumAlgorithm: s3types.ChecksumAlgorithmSha256,
})
return err
}
// downloadFromUpstream downloads a file from one of the existing (non-mirror) urls.
func (m *Maintainer) downloadFromUpstream(ctx context.Context, url string) (body io.ReadCloser, retErr error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, http.NoBody)
if err != nil {
return nil, err
}
resp, err := m.httpClient.Do(req)
if err != nil {
return nil, err
}
defer func() {
if retErr != nil {
resp.Body.Close()
}
}()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code %v", resp.StatusCode)
}
return resp.Body, nil
}
func compareHashes(expectedHash string, actualHash []byte) error {
if len(actualHash) != sha256.Size {
return fmt.Errorf("actual hash should to be %v bytes, got %v", sha256.Size, len(actualHash))
}
if len(expectedHash) != hex.EncodedLen(sha256.Size) {
return fmt.Errorf("expected hash should be %v bytes, got %v", hex.EncodedLen(sha256.Size), len(expectedHash))
}
actualHashStr := hex.EncodeToString(actualHash)
if expectedHash != actualHashStr {
return fmt.Errorf("expected hash %v, mirror returned %v", expectedHash, actualHashStr)
}
return nil
}
type objectStorageClient interface {
GetObjectAttributes(ctx context.Context, params *s3.GetObjectAttributesInput, optFns ...func(*s3.Options)) (*s3.GetObjectAttributesOutput, error)
}
type uploadClient interface {
Upload(ctx context.Context, input *s3.PutObjectInput, opts ...func(*s3manager.Uploader)) (*s3manager.UploadOutput, error)
}
type httpClient interface {
Get(url string) (*http.Response, error)
Do(req *http.Request) (*http.Response, error)
}
const (
// DryRun is a flag to enable dry run mode.
DryRun = true
// Run is a flag to perform actual operations.
Run = false
keyBase = "constellation/cas/sha256"
)

View File

@ -0,0 +1,285 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package mirror
import (
"bytes"
"context"
"io"
"log"
"net/http"
"testing"
s3manager "github.com/aws/aws-sdk-go-v2/feature/s3/manager"
"github.com/aws/aws-sdk-go-v2/service/s3"
"github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/stretchr/testify/assert"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestMirrorURL(t *testing.T) {
testCases := map[string]struct {
hash string
wantURL string
wantErr bool
}{
"empty hash": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
wantURL: "https://example.com/constellation/cas/sha256/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
},
"other hash": {
hash: "0000000000000000000000000000000000000000000000000000000000000000",
wantURL: "https://example.com/constellation/cas/sha256/0000000000000000000000000000000000000000000000000000000000000000",
},
"invalid hash": {
hash: "\x00",
wantErr: true,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
mirrorBaseURL: "https://example.com/",
}
url, err := m.MirrorURL(tc.hash)
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tc.wantURL, url)
})
}
}
func TestMirror(t *testing.T) {
testCases := map[string]struct {
unauthenticated bool
hash string
data []byte
upstreamURL string
statusCode int
failUpload bool
wantErr bool
}{
"cannot upload in unauthenticated mode": {
unauthenticated: true,
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
wantErr: true,
},
"http error": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusNotFound,
wantErr: true,
},
"hash mismatch": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte("evil"),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
wantErr: true,
},
"upload error": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
failUpload: true,
wantErr: true,
},
"success": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
},
"success with different hash": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
data: []byte("foo"),
upstreamURL: "https://example.com/foo",
statusCode: http.StatusOK,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
httpClient: &http.Client{
Transport: &stubUpstream{
statusCode: tc.statusCode,
body: tc.data,
},
},
uploadClient: &stubUploadClient{
uploadErr: func() error {
if tc.failUpload {
return assert.AnError
}
return nil
}(),
},
unauthenticated: tc.unauthenticated,
log: logger.NewTest(t),
}
err := m.Mirror(context.Background(), tc.hash, []string{tc.upstreamURL})
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestCheck(t *testing.T) {
testCases := map[string]struct {
hash string
unauthenticatedResponse []byte
unauthenticatedStatusCode int
authenticatedResponse *s3.GetObjectAttributesOutput
authenticatedErr error
wantErr bool
}{
"unauthenticated mode, http error": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
unauthenticatedResponse: []byte("foo"), // ignored
unauthenticatedStatusCode: http.StatusNotFound,
wantErr: true,
},
"unauthenticated mode, hash mismatch": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
wantErr: true,
},
"unauthenticated mode, success": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
},
"authenticated mode, get attributes fails": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedErr: assert.AnError,
wantErr: true,
},
"authenticated mode, hash mismatch": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
Checksum: &types.Checksum{
ChecksumSHA256: toPtr("tcH7Lvxta0Z0wv3MSM4BtDo7fAN2PAwzVd4Ame4PjHM="),
},
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 1,
},
},
wantErr: true,
},
"authenticated mode, success": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
Checksum: &types.Checksum{
ChecksumSHA256: toPtr("LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564="),
},
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 1,
},
},
},
"authenticated mode, fallback to unauthenticated": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 2,
},
},
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
unauthenticated: (tc.authenticatedResponse == nil),
httpClient: &http.Client{
Transport: &stubUpstream{
statusCode: tc.unauthenticatedStatusCode,
body: tc.unauthenticatedResponse,
},
},
objectStorageClient: &stubObjectStorageClient{
response: tc.authenticatedResponse,
err: tc.authenticatedErr,
},
log: logger.NewTest(t),
}
err := m.Check(context.Background(), tc.hash)
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
// stubUpstream implements http.RoundTripper and returns a canned response.
type stubUpstream struct {
statusCode int
body []byte
}
func (s *stubUpstream) RoundTrip(req *http.Request) (*http.Response, error) {
log.Printf("stubUpstream: %s %s -> %q\n", req.Method, req.URL, string(s.body))
return &http.Response{
StatusCode: s.statusCode,
Body: io.NopCloser(bytes.NewReader(s.body)),
}, nil
}
type stubUploadClient struct {
uploadErr error
uploadedData []byte
}
func (s *stubUploadClient) Upload(
_ context.Context, input *s3.PutObjectInput,
_ ...func(*s3manager.Uploader),
) (*s3manager.UploadOutput, error) {
var err error
s.uploadedData, err = io.ReadAll(input.Body)
if err != nil {
panic(err)
}
return nil, s.uploadErr
}
func toPtr[T any](v T) *T {
return &v
}
type stubObjectStorageClient struct {
response *s3.GetObjectAttributesOutput
err error
}
func (s *stubObjectStorageClient) GetObjectAttributes(
_ context.Context, _ *s3.GetObjectAttributesInput, _ ...func(*s3.Options),
) (*s3.GetObjectAttributesOutput, error) {
return s.response, s.err
}

View File

@ -0,0 +1,25 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "rules",
srcs = ["rules.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@org_golang_x_exp//slices",
],
)
go_test(
name = "rules_test",
srcs = ["rules_test.go"],
embed = [":rules"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_stretchr_testify//assert",
"@com_github_stretchr_testify//require",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,297 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package rules is used find and modify Bazel rules in WORKSPACE and bzl files.
package rules
import (
"errors"
"fmt"
"sort"
"strings"
"github.com/bazelbuild/buildtools/build"
"golang.org/x/exp/slices"
)
// Rules is used to find and modify Bazel rules of a set of rule kinds in WORKSPACE and .bzl files.
// Filter is a list of rule kinds to consider.
// If filter is empty, all rules are considered.
func Rules(file *build.File, filter []string) (rules []*build.Rule) {
allRules := file.Rules("")
if len(filter) == 0 {
return allRules
}
ruleLoop:
for _, rule := range allRules {
for _, ruleKind := range filter {
if rule.Kind() == ruleKind {
rules = append(rules, rule)
continue ruleLoop
}
}
}
return
}
// ValidatePinned checks if the given rule is a pinned dependency rule.
// That is, if it has a name, either a url or urls attribute, and a sha256 attribute.
func ValidatePinned(rule *build.Rule) (validationErrs []error) {
if rule.Name() == "" {
validationErrs = append(validationErrs, errors.New("rule has no name"))
}
hasURL := rule.Attr("url") != nil
hasURLs := rule.Attr("urls") != nil
if !hasURL && !hasURLs {
validationErrs = append(validationErrs, errors.New("rule has no url or urls attribute"))
}
if hasURL && hasURLs {
validationErrs = append(validationErrs, errors.New("rule has both url and urls attribute"))
}
if hasURL {
url := rule.AttrString("url")
if url == "" {
validationErrs = append(validationErrs, errors.New("rule has empty url attribute"))
}
}
if hasURLs {
urls := rule.AttrStrings("urls")
if len(urls) == 0 {
validationErrs = append(validationErrs, errors.New("rule has empty urls list attribute"))
} else {
for _, url := range urls {
if url == "" {
validationErrs = append(validationErrs, errors.New("rule has empty url in urls attribute"))
}
}
}
}
if rule.Attr("sha256") == nil {
validationErrs = append(validationErrs, errors.New("rule has no sha256 attribute"))
} else {
sha256 := rule.AttrString("sha256")
if sha256 == "" {
validationErrs = append(validationErrs, errors.New("rule has empty sha256 attribute"))
}
}
return
}
// Check checks if a dependency rule is normalized and contains a mirror url.
// All errors reported by this function can be fixed by calling AddURLs and Normalize.
func Check(rule *build.Rule) (validationErrs []error) {
hasURL := rule.Attr("url") != nil
if hasURL {
validationErrs = append(validationErrs, errors.New("rule has url (singular) attribute"))
}
urls := rule.AttrStrings("urls")
sorted := make([]string, len(urls))
copy(sorted, urls)
sortURLs(sorted)
for i, url := range urls {
if url != sorted[i] {
validationErrs = append(validationErrs, errors.New("rule has unsorted urls attributes"))
break
}
}
if !HasMirrorURL(rule) {
validationErrs = append(validationErrs, errors.New("rule is not mirrored"))
}
if rule.Kind() == "http_archive" && rule.Attr("type") == nil {
validationErrs = append(validationErrs, errors.New("http_archive rule has no type attribute"))
}
if rule.Kind() == "rpm" && len(urls) != 1 {
validationErrs = append(validationErrs, errors.New("rpm rule has unstable urls that are not the edgeless mirror"))
}
return
}
// Normalize normalizes a rule and returns true if the rule was changed.
func Normalize(rule *build.Rule) (changed bool) {
changed = addTypeAttribute(rule)
urls := GetURLs(rule)
normalizedURLS := append([]string{}, urls...)
// rpm rules must have exactly one url (the edgeless mirror)
if mirrorU, err := mirrorURL(rule); rule.Kind() == "rpm" && err == nil {
normalizedURLS = []string{mirrorU}
}
sortURLs(normalizedURLS)
normalizedURLS = deduplicateURLs(normalizedURLS)
if slices.Equal(urls, normalizedURLS) && rule.Attr("url") == nil {
return
}
setURLs(rule, normalizedURLS)
changed = true
return
}
// AddURLs adds a url to a rule.
func AddURLs(rule *build.Rule, urls []string) {
existingURLs := GetURLs(rule)
existingURLs = append(existingURLs, urls...)
sortURLs(existingURLs)
deduplicatedURLs := deduplicateURLs(existingURLs)
setURLs(rule, deduplicatedURLs)
}
// GetHash returns the sha256 hash of a rule.
func GetHash(rule *build.Rule) (string, error) {
hash := rule.AttrString("sha256")
if hash == "" {
return "", fmt.Errorf("rule %s has empty or missing sha256 attribute", rule.Name())
}
return hash, nil
}
// GetURLs returns the sorted urls of a rule.
func GetURLs(rule *build.Rule) []string {
urls := rule.AttrStrings("urls")
url := rule.AttrString("url")
if url != "" {
urls = append(urls, url)
}
return urls
}
// HasMirrorURL returns true if the rule has a url from the Edgeless mirror.
func HasMirrorURL(rule *build.Rule) bool {
_, err := mirrorURL(rule)
return err == nil
}
func deduplicateURLs(urls []string) (deduplicated []string) {
seen := make(map[string]bool)
for _, url := range urls {
if !seen[url] {
deduplicated = append(deduplicated, url)
seen[url] = true
}
}
return
}
// addTypeAttribute adds the type attribute to http_archive rules if it is missing.
// it returns true if the rule was changed.
// it returns an error if the rule does not have enough information to add the type attribute.
func addTypeAttribute(rule *build.Rule) bool {
// only http_archive rules have a type attribute
if rule.Kind() != "http_archive" {
return false
}
if rule.Attr("type") != nil {
return false
}
// iterate over all URLs and check if they have a known archive type
var archiveType string
urlLoop:
for _, url := range GetURLs(rule) {
switch {
case strings.HasSuffix(url, ".aar"):
archiveType = "aar"
break urlLoop
case strings.HasSuffix(url, ".ar"):
archiveType = "ar"
break urlLoop
case strings.HasSuffix(url, ".deb"):
archiveType = "deb"
break urlLoop
case strings.HasSuffix(url, ".jar"):
archiveType = "jar"
break urlLoop
case strings.HasSuffix(url, ".tar.bz2"):
archiveType = "tar.bz2"
break urlLoop
case strings.HasSuffix(url, ".tar.gz"):
archiveType = "tar.gz"
break urlLoop
case strings.HasSuffix(url, ".tar.xz"):
archiveType = "tar.xz"
break urlLoop
case strings.HasSuffix(url, ".tar.zst"):
archiveType = "tar.zst"
break urlLoop
case strings.HasSuffix(url, ".tar"):
archiveType = "tar"
break urlLoop
case strings.HasSuffix(url, ".tgz"):
archiveType = "tgz"
break urlLoop
case strings.HasSuffix(url, ".txz"):
archiveType = "txz"
break urlLoop
case strings.HasSuffix(url, ".tzst"):
archiveType = "tzst"
break urlLoop
case strings.HasSuffix(url, ".war"):
archiveType = "war"
break urlLoop
case strings.HasSuffix(url, ".zip"):
archiveType = "zip"
break urlLoop
}
}
if archiveType == "" {
return false
}
rule.SetAttr("type", &build.StringExpr{Value: archiveType})
return true
}
// mirrorURL returns the first mirror URL for a rule.
func mirrorURL(rule *build.Rule) (string, error) {
urls := GetURLs(rule)
for _, url := range urls {
if strings.HasPrefix(url, edgelessMirrorPrefix) {
return url, nil
}
}
return "", fmt.Errorf("rule %s has no mirror url", rule.Name())
}
func setURLs(rule *build.Rule, urls []string) {
// delete single url attribute if it exists
rule.DelAttr("url")
urlsAttr := []build.Expr{}
for _, url := range urls {
urlsAttr = append(urlsAttr, &build.StringExpr{Value: url})
}
rule.SetAttr("urls", &build.ListExpr{List: urlsAttr, ForceMultiLine: true})
}
func sortURLs(urls []string) {
// Bazel mirror should be first
// edgeless mirror should be second
// other urls should be last
// if there are multiple urls from the same mirror, they should be sorted alphabetically
sort.Slice(urls, func(i, j int) bool {
rank := func(url string) int {
if strings.HasPrefix(url, bazelMirrorPrefix) {
return 0
}
if strings.HasPrefix(url, edgelessMirrorPrefix) {
return 1
}
return 2
}
if rank(urls[i]) != rank(urls[j]) {
return rank(urls[i]) < rank(urls[j])
}
return urls[i] < urls[j]
})
}
// SupportedRules is a list of all rules that can be mirrored.
var SupportedRules = []string{
"http_archive",
"http_file",
"rpm",
}
const (
bazelMirrorPrefix = "https://mirror.bazel.build/"
edgelessMirrorPrefix = "https://cdn.confidential.cloud/constellation/cas/sha256/"
)

View File

@ -0,0 +1,450 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package rules
import (
"testing"
"github.com/bazelbuild/buildtools/build"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestRules(t *testing.T) {
assert := assert.New(t)
const bzlFileContents = `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
load("@bazeldnf//:deps.bzl", "rpm")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
)
http_file(
name = "bar_file",
sha256 = "fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9",
urls = ["https://example.com/bar"],
)
rpm(
name = "baz_rpm",
sha256 = "9e7ab438597fee20e16e8e441bed0ce966bd59e0fb993fa7c94be31fb1384d88",
urls = ["https://example.com/baz.rpm"],
)
git_repository(
name = "qux_git",
remote = "https://example.com/qux.git",
commit = "1234567890abcdef",
)
`
bf, err := build.Parse("foo.bzl", []byte(bzlFileContents))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
assert.Len(rules, 3)
expectedNames := []string{"foo_archive", "bar_file", "baz_rpm"}
for i, rule := range rules {
assert.Equal(expectedNames[i], rule.Name())
}
allRules := Rules(bf, nil)
assert.Len(allRules, 4)
expectedNames = []string{"foo_archive", "bar_file", "baz_rpm", "qux_git"}
for i, rule := range allRules {
assert.Equal(expectedNames[i], rule.Name())
}
}
func TestValidatePinned(t *testing.T) {
testCases := map[string]struct {
rule string
expectedIssueCount int
}{
"no issues, singular url": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 0,
},
"no issues, url list": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
)
`,
expectedIssueCount: 0,
},
"no issues, url list with multiple urls": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz", "https://example.com/foo2.tar.gz"],
)
`,
expectedIssueCount: 0,
},
"missing name": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing sha256 attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing url attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
`,
expectedIssueCount: 1,
},
"url and urls attribute given": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
urls = ["https://example.com/foo.tar.gz"],
)
`,
expectedIssueCount: 1,
},
"empty url attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "",
)
`,
expectedIssueCount: 1,
},
"empty urls attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [],
)
`,
expectedIssueCount: 1,
},
"empty url in urls attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [""],
)
`,
expectedIssueCount: 1,
},
"empty sha256 attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing all attributes": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
)
`,
expectedIssueCount: 2,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
bf, err := build.Parse("foo.bzl", []byte(tc.rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
issues := ValidatePinned(rules[0])
if tc.expectedIssueCount == 0 {
assert.Nil(issues)
return
}
assert.Len(issues, tc.expectedIssueCount)
})
}
}
func TestCheckNormalize(t *testing.T) {
testCases := map[string]struct {
rule string
expectedIssueCount int
cannotFix bool
}{
"rule with single url": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
type = "tar.gz",
)
`,
expectedIssueCount: 1,
},
"rule with unsorted urls": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://example.com/a/foo.tar.gz",
"https://example.com/b/foo.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://mirror.bazel.build/example.com/a/foo.tar.gz",
],
type = "tar.gz",
)
`,
expectedIssueCount: 1,
},
"rule that is not mirrored": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
type = "tar.gz",
)
`,
expectedIssueCount: 1,
cannotFix: true,
},
"http_archive with no type": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://example.com/foo.tar.gz",
],
)
`,
expectedIssueCount: 1,
},
"rpm rule with urls that are not the mirror": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
rpm(
name = "foo_rpm",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://example.com/foo.rpm",
],
)
`,
expectedIssueCount: 1,
},
"http_archive rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
type = "tar.gz",
)
`,
expectedIssueCount: 0,
},
"rpm rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
rpm(
name = "foo_rpm",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
)
`,
expectedIssueCount: 0,
},
"http_file rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
http_file(
name = "foo_file",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
)
`,
expectedIssueCount: 0,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
bf, err := build.Parse("foo.bzl", []byte(tc.rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
issues := Check(rules[0])
if tc.expectedIssueCount == 0 {
assert.Nil(issues)
return
}
assert.Equal(len(issues), tc.expectedIssueCount)
changed := Normalize(rules[0])
if tc.expectedIssueCount > 0 && !tc.cannotFix {
assert.True(changed)
} else {
assert.False(changed)
}
if tc.cannotFix {
assert.NotNil(Check(rules[0]))
} else {
assert.Nil(Check(rules[0]))
}
})
}
}
func TestAddURLs(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
rule := `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
`
bf, err := build.Parse("foo.bzl", []byte(rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
AddURLs(rules[0], []string{"https://example.com/a", "https://example.com/b"})
assert.Equal([]string{"https://example.com/a", "https://example.com/b"}, GetURLs(rules[0]))
}
func TestGetHash(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
rule := `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
http_archive(
name = "bar_archive",
)
`
bf, err := build.Parse("foo.bzl", []byte(rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 2)
hash, err := GetHash(rules[0])
assert.NoError(err)
assert.Equal("2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae", hash)
_, err = GetHash(rules[1])
assert.Error(err)
}

View File

@ -37,11 +37,19 @@ replace (
)
require (
github.com/aws/aws-sdk-go-v2/config v1.18.19
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59
github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e
github.com/edgelesssys/constellation/v2 v2.6.0
github.com/go-git/go-git/v5 v5.5.2
github.com/hexops/gotextdiff v1.0.3
github.com/spf13/afero v1.9.5
github.com/spf13/cobra v1.6.1
github.com/stretchr/testify v1.8.2
go.uber.org/goleak v1.2.1
go.uber.org/zap v1.24.0
golang.org/x/exp v0.0.0-20220823124025-807a23277127
golang.org/x/mod v0.8.0
gopkg.in/square/go-jose.v2 v2.6.0
libvirt.org/go/libvirt v1.8010.0
@ -80,10 +88,8 @@ require (
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect
github.com/aws/aws-sdk-go-v2/config v1.18.19 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 // indirect
@ -94,7 +100,6 @@ require (
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 // indirect
@ -239,14 +244,13 @@ require (
github.com/sassoftware/relic v0.0.0-20210427151427-dfb082b79b74 // indirect
github.com/schollz/progressbar/v3 v3.13.1 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.5.0 // indirect
github.com/sergi/go-diff v1.2.0 // indirect
github.com/sergi/go-diff v1.3.1 // indirect
github.com/shopspring/decimal v1.3.1 // indirect
github.com/siderolabs/talos/pkg/machinery v1.3.2 // indirect
github.com/sigstore/rekor v1.0.1 // indirect
github.com/sigstore/sigstore v1.6.0 // indirect
github.com/sirupsen/logrus v1.9.0 // indirect
github.com/skeema/knownhosts v1.1.0 // indirect
github.com/spf13/afero v1.9.5 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/tent/canonical-json-go v0.0.0-20130607151641-96e4ba3a7613 // indirect
@ -265,7 +269,6 @@ require (
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.6.0 // indirect
golang.org/x/exp v0.0.0-20220823124025-807a23277127 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/oauth2 v0.6.0 // indirect
golang.org/x/sync v0.1.0 // indirect

View File

@ -257,6 +257,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.18.7/go.mod h1:JuTnSoeePXmMVe9G8Ncjj
github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8=
github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e h1:XmPu4mXICgdGnC5dXGjUGbwUD/kUmS0l5Aop3LaevBM=
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e/go.mod h1:689QdV3hBP7Vo9dJMmzhoYIyo/9iMhEmHkJcnaPRCbo=
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
@ -809,6 +811,8 @@ github.com/hashicorp/terraform-exec v0.17.3 h1:MX14Kvnka/oWGmIkyuyvL6POx25ZmKrjl
github.com/hashicorp/terraform-exec v0.17.3/go.mod h1:+NELG0EqQekJzhvikkeQsOAZpsw0cv/03rbeQJqscAI=
github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s=
github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/honeycombio/beeline-go v1.10.0 h1:cUDe555oqvw8oD76BQJ8alk7FP0JZ/M/zXpNvOEDLDc=
github.com/honeycombio/libhoney-go v1.16.0 h1:kPpqoz6vbOzgp7jC6SR7SkNj7rua7rgxvznI6M3KdHc=
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs=
@ -1208,8 +1212,9 @@ github.com/secure-systems-lab/go-securesystemslib v0.5.0 h1:oTiNu0QnulMQgN/hLK12
github.com/secure-systems-lab/go-securesystemslib v0.5.0/go.mod h1:uoCqUC0Ap7jrBSEanxT+SdACYJTVplRXWLkGMuDjXqk=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
@ -1424,6 +1429,7 @@ go.opentelemetry.io/otel/sdk/export/metric v0.20.0/go.mod h1:h7RBNMsDJ5pmI1zExLi
go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE=
go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.starlark.net v0.0.0-20210223155950-e043a3d3c984/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
go.starlark.net v0.0.0-20220223235035-243c74974e97 h1:ghIB+2LQvihWROIGpcAVPq/ce5O2uMQersgxXiOeTS4=
go.starlark.net v0.0.0-20220223235035-243c74974e97/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@ -1435,6 +1441,7 @@ go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ=
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=

File diff suppressed because it is too large Load Diff