Compare commits

...

6 Commits

Author SHA1 Message Date
edgelessci
cf3fb3a725
deps: update apk package hashes (#1559)
Co-authored-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
2023-03-30 16:03:29 +02:00
renovate[bot]
b49ca67add
deps: update alpine Docker tag to v3.17.3 (#1558)
Co-authored-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
2023-03-30 15:36:57 +02:00
Paul Meyer
399b052f9e
bazel: add protoc codegen to //:generate target (#1554)
Signed-off-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
2023-03-30 14:47:29 +02:00
Moritz Sanft
1f7acf8dfb
docs: list minimal permissions for Constellation setup (#1442)
* add required Azure perms

* add minimal aws permissions

* add minimal gcp permissions

* [wip] split Azure perms by iam create/create step

* Update docs/docs/getting-started/install.md

Co-authored-by: Nils Hanke <Nirusu@users.noreply.github.com>

* Update docs/docs/getting-started/install.md

Co-authored-by: Nils Hanke <Nirusu@users.noreply.github.com>

* minimal gcp permissions for iam create/create step

* escape footnote bracket

Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>

* active voice

Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>

* link to config step

Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>

* add predefined roles for Azure

Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>

* add AWS and GCP predefined min roles

* add Azure attestationprovider perm

* footnote for attestation mode

* Update docs/docs/getting-started/install.md

Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>

* accept superset

* fix negation

Co-authored-by: Nils Hanke <Nirusu@users.noreply.github.com>

* update footnote

---------

Co-authored-by: Nils Hanke <Nirusu@users.noreply.github.com>
Co-authored-by: Thomas Tendyck <51411342+thomasten@users.noreply.github.com>
2023-03-30 10:16:57 +02:00
Otto Bittner
ef5d64b170
ci: set correct fromVersion in upgrade test (#1535) 2023-03-30 09:46:41 +02:00
Malte Poll
827c4f548d
bazel: deps mirror (#1522)
bazel-deps-mirror is an internal tools used to upload external dependencies
that are referenced in the Bazel WORKSPACE to the Edgeless Systems' mirror.

It also normalizes deps rules.

* hack: add tool to mirror Bazel dependencies
* hack: bazel-deps-mirror tests
* bazel: add deps mirror commands
* ci: upload Bazel dependencies on renovate PRs
* update go mod
* run deps_mirror_upload


Signed-off-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
Co-authored-by: Paul Meyer <49727155+katexochen@users.noreply.github.com>
2023-03-30 09:41:56 +02:00
74 changed files with 4571 additions and 2356 deletions

View File

@ -232,7 +232,7 @@ jobs:
max-parallel: 1 max-parallel: 1
matrix: matrix:
fromVersion: fromVersion:
["2.6"] ["v2.6.0"]
cloudProvider: ["gcp", "azure"] cloudProvider: ["gcp", "azure"]
name: Run upgrade tests name: Run upgrade tests
secrets: inherit secrets: inherit

View File

@ -1,35 +0,0 @@
name: Proto generate check
on:
workflow_dispatch:
push:
branches:
- main
- "release/**"
paths:
- "**.proto"
- ".github/workflows/test-proto.yml"
- "proto/Dockerfile.gen-proto"
pull_request:
paths:
- "**.proto"
- ".github/workflows/test-proto.yml"
- "proto/Dockerfile.gen-proto"
jobs:
go-generate:
runs-on: ubuntu-22.04
steps:
- name: Checkout
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0
with:
ref: ${{ !github.event.pull_request.head.repo.fork && github.head_ref || '' }}
- name: Generate proto
shell: bash
working-directory: proto
env:
DOCKER_BUILDKIT: 1
run: |
docker build -o .. -f Dockerfile.gen-proto ..
git diff --exit-code

View File

@ -12,6 +12,9 @@ jobs:
tidycheck: tidycheck:
name: tidy, check and generate name: tidy, check and generate
runs-on: ubuntu-22.04 runs-on: ubuntu-22.04
permissions:
id-token: write
contents: read
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0 uses: actions/checkout@24cb9080177205b6e8c946b17badbe402adc938f # v3.4.0
@ -37,6 +40,18 @@ jobs:
with: with:
go-version: "1.20.2" go-version: "1.20.2"
- name: Assume AWS role to upload Bazel dependencies to S3
if: startsWith(github.head_ref, 'renovate/')
uses: aws-actions/configure-aws-credentials@e1e17a757e536f70e52b5a12b2e8d1d1c60e04ef # v2.0.0
with:
role-to-assume: arn:aws:iam::795746500882:role/GithubConstellationMirrorWrite
aws-region: eu-central-1
- name: Upload Bazel dependencies to the mirror
if: startsWith(github.head_ref, 'renovate/')
shell: bash
run: bazelisk run //bazel/ci:deps_mirror_upload
- name: Run Bazel tidy - name: Run Bazel tidy
shell: bash shell: bash
run: bazelisk run //:tidy run: bazelisk run //:tidy
@ -107,7 +122,7 @@ jobs:
run: bazelisk run //:check run: bazelisk run //:check
# The following steps are only executed if the previous tidy check failed # The following steps are only executed if the previous tidy check failed
# and the action runs on an renovat branche. In this case, we tidy all # and the action runs on an renovate branch. In this case, we tidy all
# modules again and commit the changes, so the user doesn't need to do it. # modules again and commit the changes, so the user doesn't need to do it.
- name: Push changes - name: Push changes

2
.gitignore vendored
View File

@ -57,7 +57,7 @@ __pycache__/
.gitpod.yml .gitpod.yml
# Bazel # Bazel
bazel-* /bazel-*
tools/pseudo-version tools/pseudo-version
.bazeloverwriterc .bazeloverwriterc

View File

@ -6,6 +6,7 @@ def node_maintainance_operator_deps():
http_archive( http_archive(
name = "com_github_medik8s_node_maintainance_operator", name = "com_github_medik8s_node_maintainance_operator",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/048323ffdb55787df9b93d85be93e4730f4495fba81b440dc6fe195408ec2533",
"https://github.com/medik8s/node-maintenance-operator/archive/refs/tags/v0.14.0.tar.gz", "https://github.com/medik8s/node-maintenance-operator/archive/refs/tags/v0.14.0.tar.gz",
], ],
sha256 = "048323ffdb55787df9b93d85be93e4730f4495fba81b440dc6fe195408ec2533", sha256 = "048323ffdb55787df9b93d85be93e4730f4495fba81b440dc6fe195408ec2533",
@ -18,4 +19,5 @@ filegroup(
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
""", """,
type = "tar.gz",
) )

View File

@ -93,7 +93,11 @@ http_archive(
name = "rules_foreign_cc", name = "rules_foreign_cc",
sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51", sha256 = "2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
strip_prefix = "rules_foreign_cc-0.9.0", strip_prefix = "rules_foreign_cc-0.9.0",
url = "https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz", type = "tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2a4d07cd64b0719b39a7c12218a3e507672b82a97b98c6a89d38565894cf7c51",
"https://github.com/bazelbuild/rules_foreign_cc/archive/refs/tags/0.9.0.tar.gz",
],
) )
load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies") load("@rules_foreign_cc//foreign_cc:repositories.bzl", "rules_foreign_cc_dependencies")

View File

@ -1,6 +1,7 @@
948af973885559d30e7af8d435a5270f873160104ae6eb8578f0af1d74645b1f v3.17/main/x86_64/krb5-conf-1.0-r2.apk 948af973885559d30e7af8d435a5270f873160104ae6eb8578f0af1d74645b1f v3.17/main/x86_64/krb5-conf-1.0-r2.apk
95f83a74e872b62360f6c6623a773e07a864e3313a127f57910caba368fed04b v3.17/community/x86_64/libvirt-libs-8.9.0-r4.apk 95f83a74e872b62360f6c6623a773e07a864e3313a127f57910caba368fed04b v3.17/community/x86_64/libvirt-libs-8.9.0-r4.apk
42f2618b35e9404d64f752c22f9cd2cb7a7d72328ceff4292b0a2a6be355fdc6 v3.17/main/x86_64/libffi-3.4.4-r0.apk 42f2618b35e9404d64f752c22f9cd2cb7a7d72328ceff4292b0a2a6be355fdc6 v3.17/main/x86_64/libffi-3.4.4-r0.apk
2698e84ad84aa587bcd1745175f1744a10ea74685b30209845db89ecf2365b85 v3.17/main/x86_64/libssl3-3.0.8-r3.apk
ecca312cb85b634352aef41f1561f3f3f262e85b57a620859df2d0cbe6972ded v3.17/main/x86_64/libtasn1-4.19.0-r0.apk ecca312cb85b634352aef41f1561f3f3f262e85b57a620859df2d0cbe6972ded v3.17/main/x86_64/libtasn1-4.19.0-r0.apk
f9585399e58c15da6324f92e7ad92a757c01edb560e9c362ab4587c6158cd8e4 v3.17/main/x86_64/libverto-glib-0.3.2-r1.apk f9585399e58c15da6324f92e7ad92a757c01edb560e9c362ab4587c6158cd8e4 v3.17/main/x86_64/libverto-glib-0.3.2-r1.apk
455c58e9b66da6d7fe4b86cd9bab830e3963008b58bd87fe0e6b7aa05907af4c v3.17/main/x86_64/pkgconf-1.9.4-r0.apk 455c58e9b66da6d7fe4b86cd9bab830e3963008b58bd87fe0e6b7aa05907af4c v3.17/main/x86_64/pkgconf-1.9.4-r0.apk
@ -30,6 +31,7 @@ bdc90400c34b17772e2713154c3e4c34a8db37edace1e6dc8f07329eb09f4ac9 v3.17/main/x86
ede0b4fa32c44ed13ef23616856f173d6f9fd7de1787426e8009cbd04f03802d v3.17/main/x86_64/libnl3-3.7.0-r0.apk ede0b4fa32c44ed13ef23616856f173d6f9fd7de1787426e8009cbd04f03802d v3.17/main/x86_64/libnl3-3.7.0-r0.apk
b3ad8d88fdae82cb1bd350f84298059ac8287a2855136580b2828f75ef846c4b v3.17/main/x86_64/scanelf-1.3.5-r1.apk b3ad8d88fdae82cb1bd350f84298059ac8287a2855136580b2828f75ef846c4b v3.17/main/x86_64/scanelf-1.3.5-r1.apk
a51399a9415101a98ffee5921fdf3fc24308c37e30cb4afe3c89ef9cf1da9bc7 v3.17/main/x86_64/krb5-libs-1.20.1-r0.apk a51399a9415101a98ffee5921fdf3fc24308c37e30cb4afe3c89ef9cf1da9bc7 v3.17/main/x86_64/krb5-libs-1.20.1-r0.apk
64337f9c3fe1cd25d0863a00e6fd2329f8e0976f2d6fb0210391de9593602585 v3.17/main/x86_64/openssl-3.0.8-r3.apk
5a0730375e1f6d2f70d4385f9b63b0957024bd6e2a80dc784d066cf714362b07 v3.17/main/x86_64/libevent-2.1.12-r5.apk 5a0730375e1f6d2f70d4385f9b63b0957024bd6e2a80dc784d066cf714362b07 v3.17/main/x86_64/libevent-2.1.12-r5.apk
9a60fb5126f84fabb1097bcb6d01bc0d298df8c362a69cd83178ae20d971cc38 v3.17/main/x86_64/attr-2.5.1-r2.apk 9a60fb5126f84fabb1097bcb6d01bc0d298df8c362a69cd83178ae20d971cc38 v3.17/main/x86_64/attr-2.5.1-r2.apk
981ccb518411d2e9f04249f6fe40568ce41e320d23a9550647852417be58bec6 v3.17/main/x86_64/tar-1.34-r2.apk 981ccb518411d2e9f04249f6fe40568ce41e320d23a9550647852417be58bec6 v3.17/main/x86_64/tar-1.34-r2.apk
@ -43,6 +45,7 @@ b1b3ac001c198712c2798ec70b8bb6245b06cdee342a4622f371f7df043ab82c v3.17/main/x86
2a46230e00ba2e1c59c4d3dfc4bd74135d034191dc9fdf6606b3021c00efb5d3 v3.17/main/x86_64/mdev-conf-4.3-r0.apk 2a46230e00ba2e1c59c4d3dfc4bd74135d034191dc9fdf6606b3021c00efb5d3 v3.17/main/x86_64/mdev-conf-4.3-r0.apk
afcc0a285b823f73526c1995cf9ce71f91fc99ce0969a3494926df94e2589e68 v3.17/main/x86_64/ca-certificates-bundle-20220614-r4.apk afcc0a285b823f73526c1995cf9ce71f91fc99ce0969a3494926df94e2589e68 v3.17/main/x86_64/ca-certificates-bundle-20220614-r4.apk
83dd5cc59510198067ba0e4db76208f669218469417b909f82c2f9fbb1e1f20a v3.17/main/x86_64/p11-kit-0.24.1-r1.apk 83dd5cc59510198067ba0e4db76208f669218469417b909f82c2f9fbb1e1f20a v3.17/main/x86_64/p11-kit-0.24.1-r1.apk
8c1086f697257360b6cc28816c2544b6fbc428d2419f4d78eb3d0598897a3d47 v3.17/main/x86_64/alpine-base-3.17.3-r0.apk
c0e98093cbf8c824ff490cad1a4ea0037c1ff6b0bcb7c7069acb03e4aaf021d3 v3.17/main/x86_64/abuild-3.10.0-r0.apk c0e98093cbf8c824ff490cad1a4ea0037c1ff6b0bcb7c7069acb03e4aaf021d3 v3.17/main/x86_64/abuild-3.10.0-r0.apk
2f380042d7e80f124291ffaeed21700af13fbf112866a4caa663226cc9ba3468 v3.17/main/x86_64/musl-utils-1.2.3-r4.apk 2f380042d7e80f124291ffaeed21700af13fbf112866a4caa663226cc9ba3468 v3.17/main/x86_64/musl-utils-1.2.3-r4.apk
ff98cab12123a0729fb1f2815bfa9e1611e5567f06d85b340b85f55d89ffa9c6 v3.17/main/x86_64/alpine-baselayout-data-3.4.0-r0.apk ff98cab12123a0729fb1f2815bfa9e1611e5567f06d85b340b85f55d89ffa9c6 v3.17/main/x86_64/alpine-baselayout-data-3.4.0-r0.apk
@ -50,9 +53,10 @@ ff98cab12123a0729fb1f2815bfa9e1611e5567f06d85b340b85f55d89ffa9c6 v3.17/main/x86
1ab16d81c9e7b59c51692626ac58b55d779f40bca4313be3d591d56a873c9434 v3.17/main/x86_64/util-linux-dev-2.38.1-r1.apk 1ab16d81c9e7b59c51692626ac58b55d779f40bca4313be3d591d56a873c9434 v3.17/main/x86_64/util-linux-dev-2.38.1-r1.apk
c3687cf0c19f8d1fbad0d2a9e49e0f1cea7f10b67f961f17b0f0173b56161d2f v3.17/main/x86_64/e2fsprogs-libs-1.46.6-r0.apk c3687cf0c19f8d1fbad0d2a9e49e0f1cea7f10b67f961f17b0f0173b56161d2f v3.17/main/x86_64/e2fsprogs-libs-1.46.6-r0.apk
ceff279c448e9987e70a97a77fe57d84ff1eefd428345c525f2e21a00d1a54b4 v3.17/main/x86_64/ssl_client-1.35.0-r29.apk ceff279c448e9987e70a97a77fe57d84ff1eefd428345c525f2e21a00d1a54b4 v3.17/main/x86_64/ssl_client-1.35.0-r29.apk
d4ff9adf2dbceac313b6d464588be5dd17d1c1d5bd8b644bffc546da76635c1c v3.17/main/x86_64/alpine-release-3.17.3-r0.apk
0c083d8c3d2511e8387e487c83aaa429de5a76fbf0219404c3afde63b715d2a4 v3.17/main/x86_64/libfdisk-2.38.1-r1.apk 0c083d8c3d2511e8387e487c83aaa429de5a76fbf0219404c3afde63b715d2a4 v3.17/main/x86_64/libfdisk-2.38.1-r1.apk
1e4149304c4acc0e93c72aadf8df0f4643aee35f0294bf2deae019cca1bf5085 v3.17/main/x86_64/pcre2-10.42-r0.apk 1e4149304c4acc0e93c72aadf8df0f4643aee35f0294bf2deae019cca1bf5085 v3.17/main/x86_64/pcre2-10.42-r0.apk
e439017b79d69e2ede0b7a74bf7c9cfdedc3f59b2581f1c4913931e255736a52 v3.17/main/x86_64/alpine-base-3.17.2-r0.apk 68a074d18ded51e1953bf3c97ea66877f9e6703fe357a315daac63b91c5ce8d9 v3.17/main/x86_64/libcrypto3-3.0.8-r3.apk
dc35929a53b3abaecb69b18dca79af25e38b8ab906aec5a912ec120b2cb4b731 v3.17/main/x86_64/zlib-1.2.13-r0.apk dc35929a53b3abaecb69b18dca79af25e38b8ab906aec5a912ec120b2cb4b731 v3.17/main/x86_64/zlib-1.2.13-r0.apk
d43569a2293a79ae7b7ee7d36f14b3f9893301a971e8534d104fa51a160b9607 v3.17/main/x86_64/libunistring-1.1-r0.apk d43569a2293a79ae7b7ee7d36f14b3f9893301a971e8534d104fa51a160b9607 v3.17/main/x86_64/libunistring-1.1-r0.apk
82874c31d2fc4aa5bb2c3e7240d419643c20c5740e1f2c91099b6f04aad200ad v3.17/main/x86_64/nghttp2-libs-1.51.0-r0.apk 82874c31d2fc4aa5bb2c3e7240d419643c20c5740e1f2c91099b6f04aad200ad v3.17/main/x86_64/nghttp2-libs-1.51.0-r0.apk
@ -61,9 +65,6 @@ ac29bb040470e672d186c62bd9db5b7f0d29336b5992f024098a951754f43a22 v3.17/main/x86
baa3e5a7f248f0e34bcaa07b2c5dfbe39641e52feb878518cd6a7f6c579590e9 v3.17/main/x86_64/patch-2.7.6-r9.apk baa3e5a7f248f0e34bcaa07b2c5dfbe39641e52feb878518cd6a7f6c579590e9 v3.17/main/x86_64/patch-2.7.6-r9.apk
adfebf5fc4004f1460f5971913fcca3ea3d6fa56412d32ffc48f191e336a1cc5 v3.17/main/x86_64/libcap-ng-0.8.3-r1.apk adfebf5fc4004f1460f5971913fcca3ea3d6fa56412d32ffc48f191e336a1cc5 v3.17/main/x86_64/libcap-ng-0.8.3-r1.apk
a1060409c38e4d67e6ce67001108a35c2ade5a50cdff9c62fc555ef9a08717b9 v3.17/main/x86_64/libverto-libev-0.3.2-r1.apk a1060409c38e4d67e6ce67001108a35c2ade5a50cdff9c62fc555ef9a08717b9 v3.17/main/x86_64/libverto-libev-0.3.2-r1.apk
9c75bacd5d9b5c41c451813afd7aa35e209537d44d6b0b944a5738a83d088934 v3.17/main/x86_64/libssl3-3.0.8-r2.apk
6643e122290e4485cfbb7dae52d11ad1d48c19f5f34a6549af0da1ce9974dfd1 v3.17/main/x86_64/alpine-release-3.17.2-r0.apk
74b244c1baaa58f1c41519aea3025a696dbaaac5d912c1bc3029437bea9b2b38 v3.17/main/x86_64/openssl-3.0.8-r2.apk
862e8d30f9be1a41632c7c575fbc8f81199a5fda650bc47384422bc017e09c4d v3.17/main/x86_64/keyutils-libs-1.6.3-r1.apk 862e8d30f9be1a41632c7c575fbc8f81199a5fda650bc47384422bc017e09c4d v3.17/main/x86_64/keyutils-libs-1.6.3-r1.apk
935589dfe902b26cdbe09f54eb399ce2f5d6b5e13eb994de36abb495e4843df5 v3.17/main/x86_64/yajl-2.1.0-r5.apk 935589dfe902b26cdbe09f54eb399ce2f5d6b5e13eb994de36abb495e4843df5 v3.17/main/x86_64/yajl-2.1.0-r5.apk
fee7860a5a1cb324bfe5ee4b5a68e834d57862743f062183681443e3387951da v3.17/main/x86_64/libverto-libevent-0.3.2-r1.apk fee7860a5a1cb324bfe5ee4b5a68e834d57862743f062183681443e3387951da v3.17/main/x86_64/libverto-libevent-0.3.2-r1.apk
@ -86,4 +87,3 @@ b413e1c8b38b53fb83ecc7b75a227aa7b520a9dac80f0d7c1fc912bc56416c2a v3.17/main/x86
2c0282ec5c2d78fe94b1e0ab676d6fe675e6656796b8a92e29ce4b17234add6a v3.17/main/x86_64/libgcc-12.2.1_git20220924-r4.apk 2c0282ec5c2d78fe94b1e0ab676d6fe675e6656796b8a92e29ce4b17234add6a v3.17/main/x86_64/libgcc-12.2.1_git20220924-r4.apk
736d8808f17603015b7766e0f88c703451cba97d987dfd1c92ceed7b55ecf24d v3.17/main/x86_64/ifupdown-ng-0.12.1-r1.apk 736d8808f17603015b7766e0f88c703451cba97d987dfd1c92ceed7b55ecf24d v3.17/main/x86_64/ifupdown-ng-0.12.1-r1.apk
f401d78b65a5067ef396c93a56950a87fa1b1fe3e1770489021f5924db7b10b0 v3.17/main/x86_64/libverto-0.3.2-r1.apk f401d78b65a5067ef396c93a56950a87fa1b1fe3e1770489021f5924db7b10b0 v3.17/main/x86_64/libverto-0.3.2-r1.apk
2384fa191f9a2c676dc2d898f8a8f64fd62dac8ab2dbe00c515b2165120a82a1 v3.17/main/x86_64/libcrypto3-3.0.8-r2.apk

View File

@ -1,6 +1,7 @@
load("@bazel_gazelle//:def.bzl", "gazelle") load("@bazel_gazelle//:def.bzl", "gazelle")
load("@com_github_ash2k_bazel_tools//multirun:def.bzl", "multirun") load("@com_github_ash2k_bazel_tools//multirun:def.bzl", "multirun")
load("@com_github_bazelbuild_buildtools//buildifier:def.bzl", "buildifier", "buildifier_test") load("@com_github_bazelbuild_buildtools//buildifier:def.bzl", "buildifier", "buildifier_test")
load("//bazel/ci:proto_targets.bzl", "proto_targets")
load("//bazel/sh:def.bzl", "noop_warn", "repo_command", "sh_template") load("//bazel/sh:def.bzl", "noop_warn", "repo_command", "sh_template")
required_tags = ["e2e"] required_tags = ["e2e"]
@ -331,6 +332,55 @@ sh_template(
template = "go_generate.sh.in", template = "go_generate.sh.in",
) )
repo_command(
name = "deps_mirror_fix",
args = [
"fix",
"--unauthenticated",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_upload",
args = [
"fix",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_check",
args = [
"check",
],
command = "//hack/bazel-deps-mirror",
)
repo_command(
name = "deps_mirror_check_mirror",
args = [
"check",
"--mirror",
],
command = "//hack/bazel-deps-mirror",
)
sh_template(
name = "proto_targets_check",
data = [],
substitutions = {
"@@PROTO_TARGETS@@": " ".join(proto_targets()),
},
template = "proto_targets_check.sh.in",
)
multirun(
name = "proto_generate",
commands = proto_targets(),
jobs = 0, # execute concurrently
)
multirun( multirun(
name = "tidy", name = "tidy",
commands = [ commands = [
@ -342,6 +392,7 @@ multirun(
":buildifier_fix", ":buildifier_fix",
":terraform_fmt", ":terraform_fmt",
":buf_fmt", ":buf_fmt",
":deps_mirror_fix",
], ],
jobs = 1, # execute sequentially jobs = 1, # execute sequentially
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
@ -358,6 +409,8 @@ multirun(
":golicenses_check", ":golicenses_check",
":license_header_check", ":license_header_check",
":govulncheck", ":govulncheck",
":deps_mirror_check",
":proto_targets_check",
] + select({ ] + select({
"@io_bazel_rules_go//go/platform:darwin_arm64": [ "@io_bazel_rules_go//go/platform:darwin_arm64": [
":shellcheck_noop_warning", ":shellcheck_noop_warning",
@ -378,6 +431,7 @@ multirun(
":terraform_gen", ":terraform_gen",
"//3rdparty/bazel/com_github_medik8s_node_maintainance_operator:pull_files", "//3rdparty/bazel/com_github_medik8s_node_maintainance_operator:pull_files",
":go_generate", ":go_generate",
":proto_generate",
], ],
jobs = 1, # execute sequentially jobs = 1, # execute sequentially
visibility = ["//visibility:public"], visibility = ["//visibility:public"],

View File

@ -24,6 +24,7 @@ noHeader=$(
-rL \ -rL \
--include='*.go' \ --include='*.go' \
--exclude-dir 3rdparty \ --exclude-dir 3rdparty \
--exclude-dir build \
-e'SPDX-License-Identifier: AGPL-3.0-only' \ -e'SPDX-License-Identifier: AGPL-3.0-only' \
-e'DO NOT EDIT' -e'DO NOT EDIT'
) )

View File

@ -0,0 +1,12 @@
"""Proto targets"""
def proto_targets():
return [
"//joinservice/joinproto:write_generated_protos",
"//bootstrapper/initproto:write_generated_protos",
"//debugd/service:write_generated_protos",
"//disk-mapper/recoverproto:write_generated_protos",
"//keyservice/keyserviceproto:write_generated_protos",
"//upgrade-agent/upgradeproto:write_generated_protos",
"//verify/verifyproto:write_generated_protos",
]

View File

@ -0,0 +1,85 @@
#!/usr/bin/env bash
###### script header ######
lib=$(realpath @@BASE_LIB@@) || exit 1
stat "${lib}" >> /dev/null || exit 1
# shellcheck source=../sh/lib.bash
if ! source "${lib}"; then
echo "Error: could not find import"
exit 1
fi
protoTargets=(@@PROTO_TARGETS@@)
cd "${BUILD_WORKSPACE_DIRECTORY}"
###### script body ######
exitCode=0
writeGoProtoFindingsStr=$(
grep \
-rw . \
-e "write_go_proto_srcs(" \
--include=*.bazel
)
readarray -t <<< "${writeGoProtoFindingsStr}"
writeGoProtoFindings=("${MAPFILE[@]}")
echo "Checking that all proto files have a 'write_go_proto_srcs' in the BUILD.bazel file of that package..."
protoFilesStr=$(find . -type f -name "*.proto")
readarray -t <<< "${protoFilesStr}"
protoFiles=("${MAPFILE[@]}")
protoFilePaths=()
for protoFile in "${protoFiles[@]}"; do
protoFilePaths+=("$(dirname "${protoFile}")")
done
writeGoProtoPaths=()
for writeGoProtoFinding in "${writeGoProtoFindings[@]}"; do
writeGoProtoPaths+=("${writeGoProtoFinding%/*}") # remove everything after the last slash
done
protoFilePathsSorted=$(printf '%s\n' "${protoFilePaths[@]}" | sort)
writeGoProtoPathsSorted=$(printf '%s\n' "${writeGoProtoPaths[@]}" | sort)
diff=$(diff <(echo "${protoFilePathsSorted}") <(echo "${writeGoProtoPathsSorted}") || true) # don't let diff fail
if [[ -n ${diff} ]]; then
echo "Mismatch between proto files and 'write_go_proto_srcs' calls:"
# shellcheck disable=SC2001
echo "${diff}" | sed -e 's/^/ /'
exitCode=1
fi
echo "Checking that all 'write_go_proto_srcs' calls and targets in bazel/ci/proto_targets.bzl match..."
writeGoProtoFindingsCleaned=()
for protoFinding in "${writeGoProtoFindings[@]}"; do
findingCleaned=$(
echo "${protoFinding}" |
sed \
-e 's$/BUILD.bazel$$' \
-e 's/write_go_proto_srcs(/write_generated_protos/g' \
-e 's$./$//$'
)
writeGoProtoFindingsCleaned+=("${findingCleaned}")
done
writeGoProtoFindingsSorted=$(printf '%s\n' "${writeGoProtoFindingsCleaned[@]}" | sort)
protoTargetsSorted=$(printf '%s\n' "${protoTargets[@]}" | sort)
diff=$(diff <(echo "${writeGoProtoFindingsSorted}") <(echo "${protoTargetsSorted}") || true) # don't let diff fail
if [[ -n ${diff} ]]; then
echo "Mismatch between 'write_go_proto_srcs' calls and targets listed in bazel/ci/proto_targets.bzl:"
# shellcheck disable=SC2001
echo "${diff}" | sed -e 's/^/ /'
exitCode=1
fi
exit "${exitCode}"

0
bazel/proto/BUILD.bazel Normal file
View File

87
bazel/proto/rules.bzl Normal file
View File

@ -0,0 +1,87 @@
"""
Rules for generating Go source files from proto files.
based on https://github.com/bazelbuild/rules_go/issues/2111#issuecomment-1355927231
"""
load("@aspect_bazel_lib//lib:write_source_files.bzl", "write_source_files")
load("@io_bazel_rules_go//go:def.bzl", "GoLibrary", "go_context")
load("@io_bazel_rules_go//proto:compiler.bzl", "GoProtoCompiler")
def _output_go_library_srcs_impl(ctx):
go = go_context(ctx)
srcs_of_library = []
importpath = ""
for src in ctx.attr.deps:
lib = src[GoLibrary]
go_src = go.library_to_source(go, ctx.attr, lib, False)
if importpath and lib.importpath != importpath:
fail(
"importpath of all deps must match, got {} and {}",
importpath,
lib.importpath,
)
importpath = lib.importpath
srcs_of_library.extend(go_src.srcs)
if len(srcs_of_library) != 1:
fail("expected exactly one src for library, got {}", len(srcs_of_library))
if not ctx.attr.out:
fail("must specify out for now")
# Run a command to copy the src file to the out location.
_copy(ctx, srcs_of_library[0], ctx.outputs.out)
def _copy(ctx, in_file, out_file):
# based on https://github.com/bazelbuild/examples/blob/main/rules/shell_command/rules.bzl
ctx.actions.run_shell(
# Input files visible to the action.
inputs = [in_file],
# Output files that must be created by the action.
outputs = [out_file],
progress_message = "Copying {} to {}".format(in_file.path, out_file.path),
arguments = [in_file.path, out_file.path],
command = """cp "$1" "$2" """,
)
output_go_library_srcs = rule(
implementation = _output_go_library_srcs_impl,
attrs = {
"compiler": attr.label(
providers = [GoProtoCompiler],
default = "@io_bazel_rules_go//proto:go_proto",
),
"deps": attr.label_list(
providers = [GoLibrary],
aspects = [],
),
"out": attr.output(
doc = ("Name of output .go file. If not specified, the file name " +
"of the generated source file will be used."),
mandatory = False,
),
"_go_context_data": attr.label(
default = "@io_bazel_rules_go//:go_context_data",
),
},
toolchains = ["@io_bazel_rules_go//go:toolchain"],
)
def write_go_proto_srcs(name, go_proto_library, src, visibility = None):
generated_src = "__generated_" + src
output_go_library_srcs(
name = name + "_generated",
deps = [go_proto_library],
out = generated_src,
visibility = ["//visibility:private"],
)
write_source_files(
name = name,
files = {
src: generated_src,
},
visibility = visibility,
)

View File

@ -8,6 +8,8 @@ def buildifier_deps():
sha256 = "ae34c344514e08c23e90da0e2d6cb700fcd28e80c02e23e4d5715dddcb42f7b3", sha256 = "ae34c344514e08c23e90da0e2d6cb700fcd28e80c02e23e4d5715dddcb42f7b3",
strip_prefix = "buildtools-4.2.2", strip_prefix = "buildtools-4.2.2",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/ae34c344514e08c23e90da0e2d6cb700fcd28e80c02e23e4d5715dddcb42f7b3",
"https://github.com/bazelbuild/buildtools/archive/refs/tags/4.2.2.tar.gz", "https://github.com/bazelbuild/buildtools/archive/refs/tags/4.2.2.tar.gz",
], ],
type = "tar.gz",
) )

View File

@ -18,29 +18,35 @@ def _shellcheck_deps():
http_archive( http_archive(
name = "com_github_koalaman_shellcheck_linux_amd64", name = "com_github_koalaman_shellcheck_linux_amd64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/700324c6dd0ebea0117591c6cc9d7350d9c7c5c287acbad7630fa17b1d4d9e2f",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.x86_64.tar.xz", "https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.x86_64.tar.xz",
], ],
sha256 = "700324c6dd0ebea0117591c6cc9d7350d9c7c5c287acbad7630fa17b1d4d9e2f", sha256 = "700324c6dd0ebea0117591c6cc9d7350d9c7c5c287acbad7630fa17b1d4d9e2f",
strip_prefix = "shellcheck-v0.9.0", strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
) )
http_archive( http_archive(
name = "com_github_koalaman_shellcheck_linux_aamd64", name = "com_github_koalaman_shellcheck_linux_aamd64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/179c579ef3481317d130adebede74a34dbbc2df961a70916dd4039ebf0735fae",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.aarch64.tar.xz", "https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.linux.aarch64.tar.xz",
], ],
sha256 = "179c579ef3481317d130adebede74a34dbbc2df961a70916dd4039ebf0735fae", sha256 = "179c579ef3481317d130adebede74a34dbbc2df961a70916dd4039ebf0735fae",
strip_prefix = "shellcheck-v0.9.0", strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
) )
http_archive( http_archive(
name = "com_github_koalaman_shellcheck_darwin_amd64", name = "com_github_koalaman_shellcheck_darwin_amd64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/7d3730694707605d6e60cec4efcb79a0632d61babc035aa16cda1b897536acf5",
"https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.darwin.x86_64.tar.xz", "https://github.com/koalaman/shellcheck/releases/download/v0.9.0/shellcheck-v0.9.0.darwin.x86_64.tar.xz",
], ],
sha256 = "7d3730694707605d6e60cec4efcb79a0632d61babc035aa16cda1b897536acf5", sha256 = "7d3730694707605d6e60cec4efcb79a0632d61babc035aa16cda1b897536acf5",
strip_prefix = "shellcheck-v0.9.0", strip_prefix = "shellcheck-v0.9.0",
build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["shellcheck"], visibility = ["//visibility:public"])""",
type = "tar.xz",
) )
def _terraform_deps(): def _terraform_deps():
@ -48,33 +54,41 @@ def _terraform_deps():
name = "com_github_hashicorp_terraform_linux_amd64", name = "com_github_hashicorp_terraform_linux_amd64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9f3ca33d04f5335472829d1df7785115b60176d610ae6f1583343b0a2221a931",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_amd64.zip", "https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_amd64.zip",
], ],
sha256 = "9f3ca33d04f5335472829d1df7785115b60176d610ae6f1583343b0a2221a931", sha256 = "9f3ca33d04f5335472829d1df7785115b60176d610ae6f1583343b0a2221a931",
type = "zip",
) )
http_archive( http_archive(
name = "com_github_hashicorp_terraform_linux_arm64", name = "com_github_hashicorp_terraform_linux_arm64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/39c182670c4e63e918e0a16080b1cc47bb16e158d7da96333d682d6a9cb8eb91",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_arm64.zip", "https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_linux_arm64.zip",
], ],
sha256 = "39c182670c4e63e918e0a16080b1cc47bb16e158d7da96333d682d6a9cb8eb91", sha256 = "39c182670c4e63e918e0a16080b1cc47bb16e158d7da96333d682d6a9cb8eb91",
type = "zip",
) )
http_archive( http_archive(
name = "com_github_hashicorp_terraform_darwin_amd64", name = "com_github_hashicorp_terraform_darwin_amd64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/c218a6c0ef6692b25af16995c8c7bdf6739e9638fef9235c6aced3cd84afaf66",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_amd64.zip", "https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_amd64.zip",
], ],
sha256 = "c218a6c0ef6692b25af16995c8c7bdf6739e9638fef9235c6aced3cd84afaf66", sha256 = "c218a6c0ef6692b25af16995c8c7bdf6739e9638fef9235c6aced3cd84afaf66",
type = "zip",
) )
http_archive( http_archive(
name = "com_github_hashicorp_terraform_darwin_arm64", name = "com_github_hashicorp_terraform_darwin_arm64",
build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["terraform"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/af8ff7576c8fc41496fdf97e9199b00d8d81729a6a0e821eaf4dfd08aa763540",
"https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_arm64.zip", "https://releases.hashicorp.com/terraform/1.4.2/terraform_1.4.2_darwin_arm64.zip",
], ],
sha256 = "af8ff7576c8fc41496fdf97e9199b00d8d81729a6a0e821eaf4dfd08aa763540", sha256 = "af8ff7576c8fc41496fdf97e9199b00d8d81729a6a0e821eaf4dfd08aa763540",
type = "zip",
) )
def _actionlint_deps(): def _actionlint_deps():
@ -82,39 +96,48 @@ def _actionlint_deps():
name = "com_github_rhysd_actionlint_linux_amd64", name = "com_github_rhysd_actionlint_linux_amd64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/b39e7cd53f4a317aecfb09edcebcc058df9ebef967866e11aa7f0df27339af3b",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_amd64.tar.gz", "https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_amd64.tar.gz",
], ],
sha256 = "b39e7cd53f4a317aecfb09edcebcc058df9ebef967866e11aa7f0df27339af3b", sha256 = "b39e7cd53f4a317aecfb09edcebcc058df9ebef967866e11aa7f0df27339af3b",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_rhysd_actionlint_linux_arm64", name = "com_github_rhysd_actionlint_linux_arm64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/a36ba721621e861e900d36457836bfd6a29d6e10d9edebe547544a0e3dbf4348",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_arm64.tar.gz", "https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_linux_arm64.tar.gz",
], ],
sha256 = "a36ba721621e861e900d36457836bfd6a29d6e10d9edebe547544a0e3dbf4348", sha256 = "a36ba721621e861e900d36457836bfd6a29d6e10d9edebe547544a0e3dbf4348",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_rhysd_actionlint_darwin_amd64", name = "com_github_rhysd_actionlint_darwin_amd64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/54f000f84d3fe85012a8726cd731c4101202c787963c9f8b40d15086b003d48e",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_amd64.tar.gz", "https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_amd64.tar.gz",
], ],
sha256 = "54f000f84d3fe85012a8726cd731c4101202c787963c9f8b40d15086b003d48e", sha256 = "54f000f84d3fe85012a8726cd731c4101202c787963c9f8b40d15086b003d48e",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_rhysd_actionlint_darwin_arm64", name = "com_github_rhysd_actionlint_darwin_arm64",
build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["actionlint"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/ddd0263968f7f024e49bd8721cd2b3d27c7a4d77081b81a4b376d5053ea25cdc",
"https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_arm64.tar.gz", "https://github.com/rhysd/actionlint/releases/download/v1.6.23/actionlint_1.6.23_darwin_arm64.tar.gz",
], ],
sha256 = "ddd0263968f7f024e49bd8721cd2b3d27c7a4d77081b81a4b376d5053ea25cdc", sha256 = "ddd0263968f7f024e49bd8721cd2b3d27c7a4d77081b81a4b376d5053ea25cdc",
type = "tar.gz",
) )
def _gofumpt_deps(): def _gofumpt_deps():
http_file( http_file(
name = "com_github_mvdan_gofumpt_linux_amd64", name = "com_github_mvdan_gofumpt_linux_amd64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/d3ca535e6b0b230a9c4f05a3ec54e358336b5e7474d239c15514e63a0b2a8041",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_amd64", "https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_amd64",
], ],
executable = True, executable = True,
@ -124,6 +147,7 @@ def _gofumpt_deps():
http_file( http_file(
name = "com_github_mvdan_gofumpt_linux_arm64", name = "com_github_mvdan_gofumpt_linux_arm64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/186faa7b7562cc4c1a34f2cb89f9b09d9fad949bc2f3ce293ea2726b23c28695",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_arm64", "https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_linux_arm64",
], ],
executable = True, executable = True,
@ -133,6 +157,7 @@ def _gofumpt_deps():
http_file( http_file(
name = "com_github_mvdan_gofumpt_darwin_amd64", name = "com_github_mvdan_gofumpt_darwin_amd64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/3f550baa6d4c071b01e9c68b9308bd2ca3bae6b3b09d203f19ed8626ee0fe487",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_amd64", "https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_amd64",
], ],
executable = True, executable = True,
@ -142,6 +167,7 @@ def _gofumpt_deps():
http_file( http_file(
name = "com_github_mvdan_gofumpt_darwin_arm64", name = "com_github_mvdan_gofumpt_darwin_arm64",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/768263452749a3a3cabf412f29f8a14e8bbdc7f6c6471427e977eebc6592ddb8",
"https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_arm64", "https://github.com/mvdan/gofumpt/releases/download/v0.4.0/gofumpt_v0.4.0_darwin_arm64",
], ],
executable = True, executable = True,
@ -154,33 +180,41 @@ def _tfsec_deps():
name = "com_github_aquasecurity_tfsec_linux_amd64", name = "com_github_aquasecurity_tfsec_linux_amd64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/57b902b31da3eed12448a4e82a8aca30477e4bcd1bf99e3f65310eae0889f88d",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_amd64.tar.gz", "https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_amd64.tar.gz",
], ],
sha256 = "57b902b31da3eed12448a4e82a8aca30477e4bcd1bf99e3f65310eae0889f88d", sha256 = "57b902b31da3eed12448a4e82a8aca30477e4bcd1bf99e3f65310eae0889f88d",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_aquasecurity_tfsec_linux_arm64", name = "com_github_aquasecurity_tfsec_linux_arm64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/20daad803d2a7a781f2ef0ee72ba4ed4ae17dcb41a43a330ae7b98347762bec9",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_arm64.tar.gz", "https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_linux_arm64.tar.gz",
], ],
sha256 = "20daad803d2a7a781f2ef0ee72ba4ed4ae17dcb41a43a330ae7b98347762bec9", sha256 = "20daad803d2a7a781f2ef0ee72ba4ed4ae17dcb41a43a330ae7b98347762bec9",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_aquasecurity_tfsec_darwin_amd64", name = "com_github_aquasecurity_tfsec_darwin_amd64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6d9f5a747b1fcc1b6c314d30f4ff4d753371e5690309a99a5dd653d719d20d2d",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_amd64.tar.gz", "https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_amd64.tar.gz",
], ],
sha256 = "6d9f5a747b1fcc1b6c314d30f4ff4d753371e5690309a99a5dd653d719d20d2d", sha256 = "6d9f5a747b1fcc1b6c314d30f4ff4d753371e5690309a99a5dd653d719d20d2d",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_aquasecurity_tfsec_darwin_arm64", name = "com_github_aquasecurity_tfsec_darwin_arm64",
build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["tfsec"], visibility = ["//visibility:public"])""",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6d664dcdd37e2809d1b4f14b310ccda0973b4a29e4624e902286e4964d101e22",
"https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_arm64.tar.gz", "https://github.com/aquasecurity/tfsec/releases/download/v1.28.1/tfsec_1.28.1_darwin_arm64.tar.gz",
], ],
sha256 = "6d664dcdd37e2809d1b4f14b310ccda0973b4a29e4624e902286e4964d101e22", sha256 = "6d664dcdd37e2809d1b4f14b310ccda0973b4a29e4624e902286e4964d101e22",
type = "tar.gz",
) )
def _golangci_lint_deps(): def _golangci_lint_deps():
@ -188,67 +222,91 @@ def _golangci_lint_deps():
name = "com_github_golangci_golangci_lint_linux_amd64", name = "com_github_golangci_golangci_lint_linux_amd64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel", build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/4de479eb9d9bc29da51aec1834e7c255b333723d38dbd56781c68e5dddc6a90b",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-amd64.tar.gz", "https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-amd64.tar.gz",
], ],
strip_prefix = "golangci-lint-1.51.2-linux-amd64", strip_prefix = "golangci-lint-1.51.2-linux-amd64",
sha256 = "4de479eb9d9bc29da51aec1834e7c255b333723d38dbd56781c68e5dddc6a90b", sha256 = "4de479eb9d9bc29da51aec1834e7c255b333723d38dbd56781c68e5dddc6a90b",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_golangci_golangci_lint_linux_arm64", name = "com_github_golangci_golangci_lint_linux_arm64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel", build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9e03c47b7628d49f950445d74881a0e3cb3a1e6b3c5ac3b67672d600124c1b08",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-arm64.tar.gz", "https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-linux-arm64.tar.gz",
], ],
strip_prefix = "golangci-lint-1.51.2-linux-arm64", strip_prefix = "golangci-lint-1.51.2-linux-arm64",
sha256 = "9e03c47b7628d49f950445d74881a0e3cb3a1e6b3c5ac3b67672d600124c1b08", sha256 = "9e03c47b7628d49f950445d74881a0e3cb3a1e6b3c5ac3b67672d600124c1b08",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_golangci_golangci_lint_darwin_amd64", name = "com_github_golangci_golangci_lint_darwin_amd64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel", build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0549cbaa2df451cf3a2011a9d73a9cb127784d26749d9cd14c9f4818af104d44",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-amd64.tar.gz", "https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-amd64.tar.gz",
], ],
strip_prefix = "golangci-lint-1.51.2-darwin-amd64", strip_prefix = "golangci-lint-1.51.2-darwin-amd64",
sha256 = "0549cbaa2df451cf3a2011a9d73a9cb127784d26749d9cd14c9f4818af104d44", sha256 = "0549cbaa2df451cf3a2011a9d73a9cb127784d26749d9cd14c9f4818af104d44",
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_golangci_golangci_lint_darwin_arm64", name = "com_github_golangci_golangci_lint_darwin_arm64",
build_file = "//bazel/toolchains:BUILD.golangci.bazel", build_file = "//bazel/toolchains:BUILD.golangci.bazel",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/36e69882205a0e42a63ad57ec3015639c11051e03f0beb9cf7949c6451408960",
"https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-arm64.tar.gz", "https://github.com/golangci/golangci-lint/releases/download/v1.51.2/golangci-lint-1.51.2-darwin-arm64.tar.gz",
], ],
strip_prefix = "golangci-lint-1.51.2-darwin-arm64", strip_prefix = "golangci-lint-1.51.2-darwin-arm64",
sha256 = "36e69882205a0e42a63ad57ec3015639c11051e03f0beb9cf7949c6451408960", sha256 = "36e69882205a0e42a63ad57ec3015639c11051e03f0beb9cf7949c6451408960",
type = "tar.gz",
) )
def _buf_deps(): def _buf_deps():
http_archive( http_archive(
name = "com_github_bufbuild_buf_linux_amd64", name = "com_github_bufbuild_buf_linux_amd64",
sha256 = "39b58126938e265a7dd60fc4716a4a43931896e62db3d69c704d7dd63d5889dd", sha256 = "39b58126938e265a7dd60fc4716a4a43931896e62db3d69c704d7dd63d5889dd",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-x86_64.tar.gz",
strip_prefix = "buf/bin", strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/39b58126938e265a7dd60fc4716a4a43931896e62db3d69c704d7dd63d5889dd",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-x86_64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_bufbuild_buf_linux_arm64", name = "com_github_bufbuild_buf_linux_arm64",
sha256 = "90d8caa85b4cff1cdb6e96ee01e3f4f1a12135be3834ffd41c486f1cc03213ef", sha256 = "90d8caa85b4cff1cdb6e96ee01e3f4f1a12135be3834ffd41c486f1cc03213ef",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-aarch64.tar.gz",
strip_prefix = "buf/bin", strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/90d8caa85b4cff1cdb6e96ee01e3f4f1a12135be3834ffd41c486f1cc03213ef",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Linux-aarch64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_bufbuild_buf_darwin_amd64", name = "com_github_bufbuild_buf_darwin_amd64",
sha256 = "196e75933f7c3abebf8835fdfd74c15bc953525c9250e7bbff943e3db6fb0eb1", sha256 = "196e75933f7c3abebf8835fdfd74c15bc953525c9250e7bbff943e3db6fb0eb1",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-x86_64.tar.gz",
strip_prefix = "buf/bin", strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/196e75933f7c3abebf8835fdfd74c15bc953525c9250e7bbff943e3db6fb0eb1",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-x86_64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_bufbuild_buf_darwin_arm64", name = "com_github_bufbuild_buf_darwin_arm64",
sha256 = "f6187bbcf0718da1de38ca638038d4a707dd5b0e113e1a9e110ac8a15012505a", sha256 = "f6187bbcf0718da1de38ca638038d4a707dd5b0e113e1a9e110ac8a15012505a",
url = "https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-arm64.tar.gz",
strip_prefix = "buf/bin", strip_prefix = "buf/bin",
build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["buf"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/f6187bbcf0718da1de38ca638038d4a707dd5b0e113e1a9e110ac8a15012505a",
"https://github.com/bufbuild/buf/releases/download/v1.15.1/buf-Darwin-arm64.tar.gz",
],
type = "tar.gz",
) )
def _talos_docgen_deps(): def _talos_docgen_deps():
@ -293,28 +351,44 @@ def _helm_deps():
http_archive( http_archive(
name = "com_github_helm_helm_linux_amd64", name = "com_github_helm_helm_linux_amd64",
sha256 = "781d826daec584f9d50a01f0f7dadfd25a3312217a14aa2fbb85107b014ac8ca", sha256 = "781d826daec584f9d50a01f0f7dadfd25a3312217a14aa2fbb85107b014ac8ca",
url = "https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz",
strip_prefix = "linux-amd64", strip_prefix = "linux-amd64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/781d826daec584f9d50a01f0f7dadfd25a3312217a14aa2fbb85107b014ac8ca",
"https://get.helm.sh/helm-v3.11.2-linux-amd64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_helm_helm_linux_arm64", name = "com_github_helm_helm_linux_arm64",
sha256 = "0a60baac83c3106017666864e664f52a4e16fbd578ac009f9a85456a9241c5db", sha256 = "0a60baac83c3106017666864e664f52a4e16fbd578ac009f9a85456a9241c5db",
url = "https://get.helm.sh/helm-v3.11.2-linux-arm64.tar.gz",
strip_prefix = "linux-arm64", strip_prefix = "linux-arm64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0a60baac83c3106017666864e664f52a4e16fbd578ac009f9a85456a9241c5db",
"https://get.helm.sh/helm-v3.11.2-linux-arm64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_helm_helm_darwin_amd64", name = "com_github_helm_helm_darwin_amd64",
sha256 = "404938fd2c6eff9e0dab830b0db943fca9e1572cd3d7ee40904705760faa390f", sha256 = "404938fd2c6eff9e0dab830b0db943fca9e1572cd3d7ee40904705760faa390f",
url = "https://get.helm.sh/helm-v3.11.2-darwin-amd64.tar.gz",
strip_prefix = "darwin-amd64", strip_prefix = "darwin-amd64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/404938fd2c6eff9e0dab830b0db943fca9e1572cd3d7ee40904705760faa390f",
"https://get.helm.sh/helm-v3.11.2-darwin-amd64.tar.gz",
],
type = "tar.gz",
) )
http_archive( http_archive(
name = "com_github_helm_helm_darwin_arm64", name = "com_github_helm_helm_darwin_arm64",
sha256 = "f61a3aa55827de2d8c64a2063fd744b618b443ed063871b79f52069e90813151", sha256 = "f61a3aa55827de2d8c64a2063fd744b618b443ed063871b79f52069e90813151",
url = "https://get.helm.sh/helm-v3.11.2-darwin-arm64.tar.gz",
strip_prefix = "darwin-arm64", strip_prefix = "darwin-arm64",
build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""", build_file_content = """exports_files(["helm"], visibility = ["//visibility:public"])""",
type = "tar.gz",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/f61a3aa55827de2d8c64a2063fd744b618b443ed063871b79f52069e90813151",
"https://get.helm.sh/helm-v3.11.2-darwin-arm64.tar.gz",
],
) )

View File

@ -8,6 +8,8 @@ def dnf_deps():
sha256 = "6104de1d657ae524bef5af86b153b82f114f532fe2e7eb02beb2e950550a88fe", sha256 = "6104de1d657ae524bef5af86b153b82f114f532fe2e7eb02beb2e950550a88fe",
strip_prefix = "bazeldnf-45f5d74ba73710b538c57c9d43d88c583aab9d3a", strip_prefix = "bazeldnf-45f5d74ba73710b538c57c9d43d88c583aab9d3a",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/6104de1d657ae524bef5af86b153b82f114f532fe2e7eb02beb2e950550a88fe",
"https://github.com/rmohr/bazeldnf/archive/45f5d74ba73710b538c57c9d43d88c583aab9d3a.tar.gz", "https://github.com/rmohr/bazeldnf/archive/45f5d74ba73710b538c57c9d43d88c583aab9d3a.tar.gz",
], ],
type = "tar.gz",
) )

View File

@ -815,6 +815,15 @@ def go_dependencies():
sum = "h1:UE9n9rkJF62ArLb1F3DEjRt8O3jLwMWdSoypKV4f3MU=", sum = "h1:UE9n9rkJF62ArLb1F3DEjRt8O3jLwMWdSoypKV4f3MU=",
version = "v0.9.0", version = "v0.9.0",
) )
go_repository(
name = "com_github_bazelbuild_buildtools",
build_file_generation = "on",
build_file_proto_mode = "disable_global",
importpath = "github.com/bazelbuild/buildtools",
sum = "h1:XmPu4mXICgdGnC5dXGjUGbwUD/kUmS0l5Aop3LaevBM=",
version = "v0.0.0-20230317132445-9c3c1fc0106e",
)
go_repository( go_repository(
name = "com_github_beeker1121_goque", name = "com_github_beeker1121_goque",
build_file_generation = "on", build_file_generation = "on",
@ -3559,6 +3568,14 @@ def go_dependencies():
sum = "h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8=", sum = "h1:ab7dI6W8DuCY7yCU8blo0UCYl2oHre/dloCmzMWg9w8=",
version = "v1.9.0", version = "v1.9.0",
) )
go_repository(
name = "com_github_hexops_gotextdiff",
build_file_generation = "on",
build_file_proto_mode = "disable_global",
importpath = "github.com/hexops/gotextdiff",
sum = "h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=",
version = "v1.0.3",
)
go_repository( go_repository(
name = "com_github_honeycombio_beeline_go", name = "com_github_honeycombio_beeline_go",
@ -5919,8 +5936,8 @@ def go_dependencies():
build_file_generation = "on", build_file_generation = "on",
build_file_proto_mode = "disable_global", build_file_proto_mode = "disable_global",
importpath = "github.com/sergi/go-diff", importpath = "github.com/sergi/go-diff",
sum = "h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=", sum = "h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=",
version = "v1.2.0", version = "v1.3.1",
) )
go_repository( go_repository(
name = "com_github_shibumi_go_pathspec", name = "com_github_shibumi_go_pathspec",

View File

@ -12,16 +12,18 @@ def go_deps():
strip_prefix = "rules_go-ea3cc4f0778ba4bb35a682affc8e278551187fad", strip_prefix = "rules_go-ea3cc4f0778ba4bb35a682affc8e278551187fad",
sha256 = "9f0c386d233e7160cb752527c34654620cef1920a53617a2f1cca8d8edee5e8a", sha256 = "9f0c386d233e7160cb752527c34654620cef1920a53617a2f1cca8d8edee5e8a",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/9f0c386d233e7160cb752527c34654620cef1920a53617a2f1cca8d8edee5e8a",
"https://github.com/bazelbuild/rules_go/archive/ea3cc4f0778ba4bb35a682affc8e278551187fad.tar.gz", "https://github.com/bazelbuild/rules_go/archive/ea3cc4f0778ba4bb35a682affc8e278551187fad.tar.gz",
], ],
type = "tar.gz",
) )
http_archive( http_archive(
name = "bazel_gazelle", name = "bazel_gazelle",
strip_prefix = "bazel-gazelle-97a754c6e45848828b27152fa64ca5dd3003d832", strip_prefix = "bazel-gazelle-97a754c6e45848828b27152fa64ca5dd3003d832",
sha256 = "2591fe5c9ff639317c5144665f2b97f3e45dac7ebb0b9357f8ddb3533b60a16f", sha256 = "2591fe5c9ff639317c5144665f2b97f3e45dac7ebb0b9357f8ddb3533b60a16f",
urls = [ urls = [
# Depending on main until the next release, needed change from https://github.com/bazelbuild/bazel-gazelle/pull/1432 "https://cdn.confidential.cloud/constellation/cas/sha256/2591fe5c9ff639317c5144665f2b97f3e45dac7ebb0b9357f8ddb3533b60a16f",
# so that "go:embed all:" directives work.
"https://github.com/bazelbuild/bazel-gazelle/archive/97a754c6e45848828b27152fa64ca5dd3003d832.tar.gz", "https://github.com/bazelbuild/bazel-gazelle/archive/97a754c6e45848828b27152fa64ca5dd3003d832.tar.gz",
], ],
type = "tar.gz",
) )

View File

@ -7,5 +7,9 @@ def multirun_deps():
name = "com_github_ash2k_bazel_tools", name = "com_github_ash2k_bazel_tools",
sha256 = "0ad31a16c9e48b01a1a11daf908227a6bf6106269187cccf7398625fea2ba45a", sha256 = "0ad31a16c9e48b01a1a11daf908227a6bf6106269187cccf7398625fea2ba45a",
strip_prefix = "bazel-tools-4e045b9b4e3e613970ab68941b556a356239d433", strip_prefix = "bazel-tools-4e045b9b4e3e613970ab68941b556a356239d433",
url = "https://github.com/ash2k/bazel-tools/archive/4e045b9b4e3e613970ab68941b556a356239d433.tar.gz", urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/0ad31a16c9e48b01a1a11daf908227a6bf6106269187cccf7398625fea2ba45a",
"https://github.com/ash2k/bazel-tools/archive/4e045b9b4e3e613970ab68941b556a356239d433.tar.gz",
],
type = "tar.gz",
) )

View File

@ -7,5 +7,9 @@ def oci_deps():
name = "rules_oci", name = "rules_oci",
sha256 = "4f119dc9e08319a3262c04b334bda54ba0484ca34f8ead706dd2397fc11816f7", sha256 = "4f119dc9e08319a3262c04b334bda54ba0484ca34f8ead706dd2397fc11816f7",
strip_prefix = "rules_oci-0.3.3", strip_prefix = "rules_oci-0.3.3",
url = "https://github.com/bazel-contrib/rules_oci/releases/download/v0.3.3/rules_oci-v0.3.3.tar.gz", urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/4f119dc9e08319a3262c04b334bda54ba0484ca34f8ead706dd2397fc11816f7",
"https://github.com/bazel-contrib/rules_oci/releases/download/v0.3.3/rules_oci-v0.3.3.tar.gz",
],
type = "tar.gz",
) )

View File

@ -7,7 +7,9 @@ def pkg_deps():
name = "rules_pkg", name = "rules_pkg",
urls = [ urls = [
"https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz", "https://mirror.bazel.build/github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/eea0f59c28a9241156a47d7a8e32db9122f3d50b505fae0f33de6ce4d9b61834",
"https://github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz", "https://github.com/bazelbuild/rules_pkg/releases/download/0.8.0/rules_pkg-0.8.0.tar.gz",
], ],
sha256 = "eea0f59c28a9241156a47d7a8e32db9122f3d50b505fae0f33de6ce4d9b61834", sha256 = "eea0f59c28a9241156a47d7a8e32db9122f3d50b505fae0f33de6ce4d9b61834",
type = "tar.gz",
) )

View File

@ -8,6 +8,8 @@ def proto_deps():
sha256 = "17fa03f509b0d1df05c70c174a266ab211d04b9969e41924fd07a81ea171f117", sha256 = "17fa03f509b0d1df05c70c174a266ab211d04b9969e41924fd07a81ea171f117",
strip_prefix = "rules_proto-cda0effe6b5af095a6886c67f90c760b83f08c48", strip_prefix = "rules_proto-cda0effe6b5af095a6886c67f90c760b83f08c48",
urls = [ urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/17fa03f509b0d1df05c70c174a266ab211d04b9969e41924fd07a81ea171f117",
"https://github.com/bazelbuild/rules_proto/archive/cda0effe6b5af095a6886c67f90c760b83f08c48.tar.gz", "https://github.com/bazelbuild/rules_proto/archive/cda0effe6b5af095a6886c67f90c760b83f08c48.tar.gz",
], ],
type = "tar.gz",
) )

View File

@ -10,8 +10,10 @@ def zig_cc_deps():
sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506", sha256 = "74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
urls = [ urls = [
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz", "https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/74d544d96f4a5bb630d465ca8bbcfe231e3594e5aae57e1edbf17a6eb3ca2506",
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz", "https://github.com/bazelbuild/bazel-skylib/releases/download/1.3.0/bazel-skylib-1.3.0.tar.gz",
], ],
type = "tar.gz",
) )
# TODO(malt3): Update to a release version once the next release is out. # TODO(malt3): Update to a release version once the next release is out.
@ -21,5 +23,9 @@ def zig_cc_deps():
name = "bazel-zig-cc", name = "bazel-zig-cc",
sha256 = "bea372f7f9bd8541f7b0a152c76c7b9396201c36a0ed229b36c48301815c3141", sha256 = "bea372f7f9bd8541f7b0a152c76c7b9396201c36a0ed229b36c48301815c3141",
strip_prefix = "bazel-zig-cc-f3e4542bd62f4aef794a3d184140a9d30b8fadb8", strip_prefix = "bazel-zig-cc-f3e4542bd62f4aef794a3d184140a9d30b8fadb8",
urls = ["https://github.com/uber/bazel-zig-cc/archive/f3e4542bd62f4aef794a3d184140a9d30b8fadb8.tar.gz"], urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/bea372f7f9bd8541f7b0a152c76c7b9396201c36a0ed229b36c48301815c3141",
"https://github.com/uber/bazel-zig-cc/archive/f3e4542bd62f4aef794a3d184140a9d30b8fadb8.tar.gz",
],
type = "tar.gz",
) )

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "initproto_proto", name = "initproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/bootstrapper/initproto", importpath = "github.com/edgelesssys/constellation/v2/bootstrapper/initproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "init.pb.go",
go_proto_library = ":initproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: init.proto // source: bootstrapper/initproto/init.proto
package initproto package initproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -25,29 +29,21 @@ type InitRequest struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// repeated string autoscaling_node_groups = 1; removed KmsUri string `protobuf:"bytes,3,opt,name=kms_uri,json=kmsUri,proto3" json:"kms_uri,omitempty"`
// bytes master_secret = 2; removed StorageUri string `protobuf:"bytes,4,opt,name=storage_uri,json=storageUri,proto3" json:"storage_uri,omitempty"`
KmsUri string `protobuf:"bytes,3,opt,name=kms_uri,json=kmsUri,proto3" json:"kms_uri,omitempty"` CloudServiceAccountUri string `protobuf:"bytes,7,opt,name=cloud_service_account_uri,json=cloudServiceAccountUri,proto3" json:"cloud_service_account_uri,omitempty"`
StorageUri string `protobuf:"bytes,4,opt,name=storage_uri,json=storageUri,proto3" json:"storage_uri,omitempty"` KubernetesVersion string `protobuf:"bytes,8,opt,name=kubernetes_version,json=kubernetesVersion,proto3" json:"kubernetes_version,omitempty"`
// string key_encryption_key_id = 5; removed HelmDeployments []byte `protobuf:"bytes,11,opt,name=helm_deployments,json=helmDeployments,proto3" json:"helm_deployments,omitempty"`
// bool use_existing_kek = 6; removed ConformanceMode bool `protobuf:"varint,14,opt,name=conformance_mode,json=conformanceMode,proto3" json:"conformance_mode,omitempty"`
CloudServiceAccountUri string `protobuf:"bytes,7,opt,name=cloud_service_account_uri,json=cloudServiceAccountUri,proto3" json:"cloud_service_account_uri,omitempty"` KubernetesComponents []*KubernetesComponent `protobuf:"bytes,15,rep,name=kubernetes_components,json=kubernetesComponents,proto3" json:"kubernetes_components,omitempty"`
KubernetesVersion string `protobuf:"bytes,8,opt,name=kubernetes_version,json=kubernetesVersion,proto3" json:"kubernetes_version,omitempty"` InitSecret []byte `protobuf:"bytes,16,opt,name=init_secret,json=initSecret,proto3" json:"init_secret,omitempty"`
// repeated SSHUserKey ssh_user_keys = 9; removed ClusterName string `protobuf:"bytes,17,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
// bytes salt = 10; removed
HelmDeployments []byte `protobuf:"bytes,11,opt,name=helm_deployments,json=helmDeployments,proto3" json:"helm_deployments,omitempty"`
// repeated uint32 enforced_pcrs = 12; removed
// bool enforce_idkeydigest = 13; removed
ConformanceMode bool `protobuf:"varint,14,opt,name=conformance_mode,json=conformanceMode,proto3" json:"conformance_mode,omitempty"`
KubernetesComponents []*KubernetesComponent `protobuf:"bytes,15,rep,name=kubernetes_components,json=kubernetesComponents,proto3" json:"kubernetes_components,omitempty"`
InitSecret []byte `protobuf:"bytes,16,opt,name=init_secret,json=initSecret,proto3" json:"init_secret,omitempty"`
ClusterName string `protobuf:"bytes,17,opt,name=cluster_name,json=clusterName,proto3" json:"cluster_name,omitempty"`
} }
func (x *InitRequest) Reset() { func (x *InitRequest) Reset() {
*x = InitRequest{} *x = InitRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_init_proto_msgTypes[0] mi := &file_bootstrapper_initproto_init_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -60,7 +56,7 @@ func (x *InitRequest) String() string {
func (*InitRequest) ProtoMessage() {} func (*InitRequest) ProtoMessage() {}
func (x *InitRequest) ProtoReflect() protoreflect.Message { func (x *InitRequest) ProtoReflect() protoreflect.Message {
mi := &file_init_proto_msgTypes[0] mi := &file_bootstrapper_initproto_init_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -73,7 +69,7 @@ func (x *InitRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use InitRequest.ProtoReflect.Descriptor instead. // Deprecated: Use InitRequest.ProtoReflect.Descriptor instead.
func (*InitRequest) Descriptor() ([]byte, []int) { func (*InitRequest) Descriptor() ([]byte, []int) {
return file_init_proto_rawDescGZIP(), []int{0} return file_bootstrapper_initproto_init_proto_rawDescGZIP(), []int{0}
} }
func (x *InitRequest) GetKmsUri() string { func (x *InitRequest) GetKmsUri() string {
@ -152,7 +148,7 @@ type InitResponse struct {
func (x *InitResponse) Reset() { func (x *InitResponse) Reset() {
*x = InitResponse{} *x = InitResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_init_proto_msgTypes[1] mi := &file_bootstrapper_initproto_init_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -165,7 +161,7 @@ func (x *InitResponse) String() string {
func (*InitResponse) ProtoMessage() {} func (*InitResponse) ProtoMessage() {}
func (x *InitResponse) ProtoReflect() protoreflect.Message { func (x *InitResponse) ProtoReflect() protoreflect.Message {
mi := &file_init_proto_msgTypes[1] mi := &file_bootstrapper_initproto_init_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -178,7 +174,7 @@ func (x *InitResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use InitResponse.ProtoReflect.Descriptor instead. // Deprecated: Use InitResponse.ProtoReflect.Descriptor instead.
func (*InitResponse) Descriptor() ([]byte, []int) { func (*InitResponse) Descriptor() ([]byte, []int) {
return file_init_proto_rawDescGZIP(), []int{1} return file_bootstrapper_initproto_init_proto_rawDescGZIP(), []int{1}
} }
func (x *InitResponse) GetKubeconfig() []byte { func (x *InitResponse) GetKubeconfig() []byte {
@ -216,7 +212,7 @@ type KubernetesComponent struct {
func (x *KubernetesComponent) Reset() { func (x *KubernetesComponent) Reset() {
*x = KubernetesComponent{} *x = KubernetesComponent{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_init_proto_msgTypes[2] mi := &file_bootstrapper_initproto_init_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -229,7 +225,7 @@ func (x *KubernetesComponent) String() string {
func (*KubernetesComponent) ProtoMessage() {} func (*KubernetesComponent) ProtoMessage() {}
func (x *KubernetesComponent) ProtoReflect() protoreflect.Message { func (x *KubernetesComponent) ProtoReflect() protoreflect.Message {
mi := &file_init_proto_msgTypes[2] mi := &file_bootstrapper_initproto_init_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -242,7 +238,7 @@ func (x *KubernetesComponent) ProtoReflect() protoreflect.Message {
// Deprecated: Use KubernetesComponent.ProtoReflect.Descriptor instead. // Deprecated: Use KubernetesComponent.ProtoReflect.Descriptor instead.
func (*KubernetesComponent) Descriptor() ([]byte, []int) { func (*KubernetesComponent) Descriptor() ([]byte, []int) {
return file_init_proto_rawDescGZIP(), []int{2} return file_bootstrapper_initproto_init_proto_rawDescGZIP(), []int{2}
} }
func (x *KubernetesComponent) GetUrl() string { func (x *KubernetesComponent) GetUrl() string {
@ -273,80 +269,81 @@ func (x *KubernetesComponent) GetExtract() bool {
return false return false
} }
var File_init_proto protoreflect.FileDescriptor var File_bootstrapper_initproto_init_proto protoreflect.FileDescriptor
var file_init_proto_rawDesc = []byte{ var file_bootstrapper_initproto_init_proto_rawDesc = []byte{
0x0a, 0x0a, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x69, 0x6e, 0x0a, 0x21, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x2f, 0x69,
0x69, 0x74, 0x22, 0x9b, 0x03, 0x0a, 0x0b, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x6e, 0x69, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x70, 0x72,
0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x6b, 0x6d, 0x73, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x69, 0x6e, 0x69, 0x74, 0x22, 0x9b, 0x03, 0x0a, 0x0b, 0x49, 0x6e,
0x01, 0x28, 0x09, 0x52, 0x06, 0x6b, 0x6d, 0x73, 0x55, 0x72, 0x69, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x17, 0x0a, 0x07, 0x6b, 0x6d, 0x73,
0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6b, 0x6d, 0x73, 0x55,
0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55, 0x72, 0x69, 0x12, 0x39, 0x0a, 0x19, 0x72, 0x69, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x75, 0x72,
0x63, 0x6c, 0x6f, 0x75, 0x64, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, 0x63, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65,
0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x55, 0x72, 0x69, 0x12, 0x39, 0x0a, 0x19, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x5f, 0x73, 0x65, 0x72,
0x16, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x76, 0x69, 0x63, 0x65, 0x5f, 0x61, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x75, 0x72, 0x69,
0x6f, 0x75, 0x6e, 0x74, 0x55, 0x72, 0x69, 0x12, 0x2d, 0x0a, 0x12, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x16, 0x63, 0x6c, 0x6f, 0x75, 0x64, 0x53, 0x65, 0x72,
0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x76, 0x69, 0x63, 0x65, 0x41, 0x63, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x55, 0x72, 0x69, 0x12, 0x2d,
0x01, 0x28, 0x09, 0x52, 0x11, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x56, 0x0a, 0x12, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72,
0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a, 0x10, 0x68, 0x65, 0x6c, 0x6d, 0x5f, 0x64, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x6b, 0x75, 0x62, 0x65,
0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x29, 0x0a,
0x52, 0x0f, 0x68, 0x65, 0x6c, 0x6d, 0x44, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x10, 0x68, 0x65, 0x6c, 0x6d, 0x5f, 0x64, 0x65, 0x70, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74,
0x73, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x6e, 0x63, 0x65, 0x73, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x68, 0x65, 0x6c, 0x6d, 0x44, 0x65, 0x70,
0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x0e, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0f, 0x63, 0x6f, 0x6e, 0x6c, 0x6f, 0x79, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x29, 0x0a, 0x10, 0x63, 0x6f, 0x6e, 0x66,
0x66, 0x6f, 0x72, 0x6d, 0x61, 0x6e, 0x63, 0x65, 0x4d, 0x6f, 0x64, 0x65, 0x12, 0x4e, 0x0a, 0x15, 0x6f, 0x72, 0x6d, 0x61, 0x6e, 0x63, 0x65, 0x5f, 0x6d, 0x6f, 0x64, 0x65, 0x18, 0x0e, 0x20, 0x01,
0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x28, 0x08, 0x52, 0x0f, 0x63, 0x6f, 0x6e, 0x66, 0x6f, 0x72, 0x6d, 0x61, 0x6e, 0x63, 0x65, 0x4d,
0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x6f, 0x64, 0x65, 0x12, 0x4e, 0x0a, 0x15, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65,
0x69, 0x74, 0x2e, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0f, 0x20, 0x03,
0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e,
0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x6b,
0x69, 0x6e, 0x69, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65,
0x0c, 0x52, 0x0a, 0x69, 0x6e, 0x69, 0x74, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x6e, 0x74, 0x73, 0x12, 0x1f, 0x0a, 0x0b, 0x69, 0x6e, 0x69, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x72,
0x0c, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x11, 0x20, 0x65, 0x74, 0x18, 0x10, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x69, 0x6e, 0x69, 0x74, 0x53, 0x65,
0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f,
0x22, 0x68, 0x0a, 0x0c, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x11, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x63, 0x6c, 0x75, 0x73,
0x12, 0x1e, 0x0a, 0x0a, 0x6b, 0x75, 0x62, 0x65, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x74, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x68, 0x0a, 0x0c, 0x49, 0x6e, 0x69, 0x74, 0x52,
0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6b, 0x75, 0x62, 0x65, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1e, 0x0a, 0x0a, 0x6b, 0x75, 0x62, 0x65, 0x63,
0x12, 0x19, 0x0a, 0x08, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0a, 0x6b, 0x75, 0x62,
0x28, 0x0c, 0x52, 0x07, 0x6f, 0x77, 0x6e, 0x65, 0x72, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x65, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x19, 0x0a, 0x08, 0x6f, 0x77, 0x6e, 0x65, 0x72,
0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x6f, 0x77, 0x6e, 0x65, 0x72,
0x09, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x49, 0x64, 0x22, 0x78, 0x0a, 0x13, 0x4b, 0x75, 0x49, 0x64, 0x12, 0x1d, 0x0a, 0x0a, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x5f, 0x69, 0x64,
0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x09, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x49,
0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x64, 0x22, 0x78, 0x0a, 0x13, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43,
0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18,
0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61,
0x6c, 0x6c, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x21,
0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x78, 0x0a, 0x0c, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03,
0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x78, 0x74, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x50, 0x61, 0x74,
0x72, 0x61, 0x63, 0x74, 0x32, 0x34, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x2d, 0x0a, 0x04, 0x49, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01,
0x6e, 0x69, 0x74, 0x12, 0x11, 0x2e, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x28, 0x08, 0x52, 0x07, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x32, 0x34, 0x0a, 0x03, 0x41,
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x49, 0x6e, 0x50, 0x49, 0x12, 0x2d, 0x0a, 0x04, 0x49, 0x6e, 0x69, 0x74, 0x12, 0x11, 0x2e, 0x69, 0x6e, 0x69,
0x69, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x40, 0x5a, 0x3e, 0x67, 0x69, 0x74, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x12, 0x2e,
0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x69, 0x6e, 0x69, 0x74, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x65, 0x42, 0x40, 0x5a, 0x3e, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f,
0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x62, 0x6f, 0x6f, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x70, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73,
0x65, 0x72, 0x2f, 0x69, 0x6e, 0x69, 0x74, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x62, 0x6f, 0x6f,
0x6f, 0x74, 0x6f, 0x33, 0x74, 0x73, 0x74, 0x72, 0x61, 0x70, 0x70, 0x65, 0x72, 0x2f, 0x69, 0x6e, 0x69, 0x74, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (
file_init_proto_rawDescOnce sync.Once file_bootstrapper_initproto_init_proto_rawDescOnce sync.Once
file_init_proto_rawDescData = file_init_proto_rawDesc file_bootstrapper_initproto_init_proto_rawDescData = file_bootstrapper_initproto_init_proto_rawDesc
) )
func file_init_proto_rawDescGZIP() []byte { func file_bootstrapper_initproto_init_proto_rawDescGZIP() []byte {
file_init_proto_rawDescOnce.Do(func() { file_bootstrapper_initproto_init_proto_rawDescOnce.Do(func() {
file_init_proto_rawDescData = protoimpl.X.CompressGZIP(file_init_proto_rawDescData) file_bootstrapper_initproto_init_proto_rawDescData = protoimpl.X.CompressGZIP(file_bootstrapper_initproto_init_proto_rawDescData)
}) })
return file_init_proto_rawDescData return file_bootstrapper_initproto_init_proto_rawDescData
} }
var file_init_proto_msgTypes = make([]protoimpl.MessageInfo, 3) var file_bootstrapper_initproto_init_proto_msgTypes = make([]protoimpl.MessageInfo, 3)
var file_init_proto_goTypes = []interface{}{ var file_bootstrapper_initproto_init_proto_goTypes = []interface{}{
(*InitRequest)(nil), // 0: init.InitRequest (*InitRequest)(nil), // 0: init.InitRequest
(*InitResponse)(nil), // 1: init.InitResponse (*InitResponse)(nil), // 1: init.InitResponse
(*KubernetesComponent)(nil), // 2: init.KubernetesComponent (*KubernetesComponent)(nil), // 2: init.KubernetesComponent
} }
var file_init_proto_depIdxs = []int32{ var file_bootstrapper_initproto_init_proto_depIdxs = []int32{
2, // 0: init.InitRequest.kubernetes_components:type_name -> init.KubernetesComponent 2, // 0: init.InitRequest.kubernetes_components:type_name -> init.KubernetesComponent
0, // 1: init.API.Init:input_type -> init.InitRequest 0, // 1: init.API.Init:input_type -> init.InitRequest
1, // 2: init.API.Init:output_type -> init.InitResponse 1, // 2: init.API.Init:output_type -> init.InitResponse
@ -357,13 +354,13 @@ var file_init_proto_depIdxs = []int32{
0, // [0:1] is the sub-list for field type_name 0, // [0:1] is the sub-list for field type_name
} }
func init() { file_init_proto_init() } func init() { file_bootstrapper_initproto_init_proto_init() }
func file_init_proto_init() { func file_bootstrapper_initproto_init_proto_init() {
if File_init_proto != nil { if File_bootstrapper_initproto_init_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_init_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_bootstrapper_initproto_init_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InitRequest); i { switch v := v.(*InitRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -375,7 +372,7 @@ func file_init_proto_init() {
return nil return nil
} }
} }
file_init_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_bootstrapper_initproto_init_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*InitResponse); i { switch v := v.(*InitResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -387,7 +384,7 @@ func file_init_proto_init() {
return nil return nil
} }
} }
file_init_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { file_bootstrapper_initproto_init_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*KubernetesComponent); i { switch v := v.(*KubernetesComponent); i {
case 0: case 0:
return &v.state return &v.state
@ -404,18 +401,98 @@ func file_init_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_init_proto_rawDesc, RawDescriptor: file_bootstrapper_initproto_init_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 3, NumMessages: 3,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_init_proto_goTypes, GoTypes: file_bootstrapper_initproto_init_proto_goTypes,
DependencyIndexes: file_init_proto_depIdxs, DependencyIndexes: file_bootstrapper_initproto_init_proto_depIdxs,
MessageInfos: file_init_proto_msgTypes, MessageInfos: file_bootstrapper_initproto_init_proto_msgTypes,
}.Build() }.Build()
File_init_proto = out.File File_bootstrapper_initproto_init_proto = out.File
file_init_proto_rawDesc = nil file_bootstrapper_initproto_init_proto_rawDesc = nil
file_init_proto_goTypes = nil file_bootstrapper_initproto_init_proto_goTypes = nil
file_init_proto_depIdxs = nil file_bootstrapper_initproto_init_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type APIClient interface {
Init(ctx context.Context, in *InitRequest, opts ...grpc.CallOption) (*InitResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) Init(ctx context.Context, in *InitRequest, opts ...grpc.CallOption) (*InitResponse, error) {
out := new(InitResponse)
err := c.cc.Invoke(ctx, "/init.API/Init", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
type APIServer interface {
Init(context.Context, *InitRequest) (*InitResponse, error)
}
// UnimplementedAPIServer can be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (*UnimplementedAPIServer) Init(context.Context, *InitRequest) (*InitResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Init not implemented")
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_Init_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(InitRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Init(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/init.API/Init",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Init(ctx, req.(*InitRequest))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "init.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Init",
Handler: _API_Init_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "bootstrapper/initproto/init.proto",
} }

View File

@ -1,105 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: init.proto
package initproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type APIClient interface {
Init(ctx context.Context, in *InitRequest, opts ...grpc.CallOption) (*InitResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) Init(ctx context.Context, in *InitRequest, opts ...grpc.CallOption) (*InitResponse, error) {
out := new(InitResponse)
err := c.cc.Invoke(ctx, "/init.API/Init", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
// All implementations must embed UnimplementedAPIServer
// for forward compatibility
type APIServer interface {
Init(context.Context, *InitRequest) (*InitResponse, error)
mustEmbedUnimplementedAPIServer()
}
// UnimplementedAPIServer must be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (UnimplementedAPIServer) Init(context.Context, *InitRequest) (*InitResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Init not implemented")
}
func (UnimplementedAPIServer) mustEmbedUnimplementedAPIServer() {}
// UnsafeAPIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to APIServer will
// result in compilation errors.
type UnsafeAPIServer interface {
mustEmbedUnimplementedAPIServer()
}
func RegisterAPIServer(s grpc.ServiceRegistrar, srv APIServer) {
s.RegisterService(&API_ServiceDesc, srv)
}
func _API_Init_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(InitRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Init(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/init.API/Init",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Init(ctx, req.(*InitRequest))
}
return interceptor(ctx, in, info, handler)
}
// API_ServiceDesc is the grpc.ServiceDesc for API service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var API_ServiceDesc = grpc.ServiceDesc{
ServiceName: "init.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Init",
Handler: _API_Init_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "init.proto",
}

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "service_proto", name = "service_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/debugd/service", importpath = "github.com/edgelesssys/constellation/v2/debugd/service",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "debugd.pb.go",
go_proto_library = ":service_go_proto",
visibility = ["//visibility:public"],
)

File diff suppressed because it is too large Load Diff

View File

@ -1,311 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: debugd.proto
package service
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// DebugdClient is the client API for Debugd service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type DebugdClient interface {
SetInfo(ctx context.Context, in *SetInfoRequest, opts ...grpc.CallOption) (*SetInfoResponse, error)
GetInfo(ctx context.Context, in *GetInfoRequest, opts ...grpc.CallOption) (*GetInfoResponse, error)
UploadFiles(ctx context.Context, opts ...grpc.CallOption) (Debugd_UploadFilesClient, error)
DownloadFiles(ctx context.Context, in *DownloadFilesRequest, opts ...grpc.CallOption) (Debugd_DownloadFilesClient, error)
UploadSystemServiceUnits(ctx context.Context, in *UploadSystemdServiceUnitsRequest, opts ...grpc.CallOption) (*UploadSystemdServiceUnitsResponse, error)
}
type debugdClient struct {
cc grpc.ClientConnInterface
}
func NewDebugdClient(cc grpc.ClientConnInterface) DebugdClient {
return &debugdClient{cc}
}
func (c *debugdClient) SetInfo(ctx context.Context, in *SetInfoRequest, opts ...grpc.CallOption) (*SetInfoResponse, error) {
out := new(SetInfoResponse)
err := c.cc.Invoke(ctx, "/debugd.Debugd/SetInfo", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *debugdClient) GetInfo(ctx context.Context, in *GetInfoRequest, opts ...grpc.CallOption) (*GetInfoResponse, error) {
out := new(GetInfoResponse)
err := c.cc.Invoke(ctx, "/debugd.Debugd/GetInfo", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *debugdClient) UploadFiles(ctx context.Context, opts ...grpc.CallOption) (Debugd_UploadFilesClient, error) {
stream, err := c.cc.NewStream(ctx, &Debugd_ServiceDesc.Streams[0], "/debugd.Debugd/UploadFiles", opts...)
if err != nil {
return nil, err
}
x := &debugdUploadFilesClient{stream}
return x, nil
}
type Debugd_UploadFilesClient interface {
Send(*FileTransferMessage) error
CloseAndRecv() (*UploadFilesResponse, error)
grpc.ClientStream
}
type debugdUploadFilesClient struct {
grpc.ClientStream
}
func (x *debugdUploadFilesClient) Send(m *FileTransferMessage) error {
return x.ClientStream.SendMsg(m)
}
func (x *debugdUploadFilesClient) CloseAndRecv() (*UploadFilesResponse, error) {
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
m := new(UploadFilesResponse)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *debugdClient) DownloadFiles(ctx context.Context, in *DownloadFilesRequest, opts ...grpc.CallOption) (Debugd_DownloadFilesClient, error) {
stream, err := c.cc.NewStream(ctx, &Debugd_ServiceDesc.Streams[1], "/debugd.Debugd/DownloadFiles", opts...)
if err != nil {
return nil, err
}
x := &debugdDownloadFilesClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type Debugd_DownloadFilesClient interface {
Recv() (*FileTransferMessage, error)
grpc.ClientStream
}
type debugdDownloadFilesClient struct {
grpc.ClientStream
}
func (x *debugdDownloadFilesClient) Recv() (*FileTransferMessage, error) {
m := new(FileTransferMessage)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func (c *debugdClient) UploadSystemServiceUnits(ctx context.Context, in *UploadSystemdServiceUnitsRequest, opts ...grpc.CallOption) (*UploadSystemdServiceUnitsResponse, error) {
out := new(UploadSystemdServiceUnitsResponse)
err := c.cc.Invoke(ctx, "/debugd.Debugd/UploadSystemServiceUnits", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DebugdServer is the server API for Debugd service.
// All implementations must embed UnimplementedDebugdServer
// for forward compatibility
type DebugdServer interface {
SetInfo(context.Context, *SetInfoRequest) (*SetInfoResponse, error)
GetInfo(context.Context, *GetInfoRequest) (*GetInfoResponse, error)
UploadFiles(Debugd_UploadFilesServer) error
DownloadFiles(*DownloadFilesRequest, Debugd_DownloadFilesServer) error
UploadSystemServiceUnits(context.Context, *UploadSystemdServiceUnitsRequest) (*UploadSystemdServiceUnitsResponse, error)
mustEmbedUnimplementedDebugdServer()
}
// UnimplementedDebugdServer must be embedded to have forward compatible implementations.
type UnimplementedDebugdServer struct {
}
func (UnimplementedDebugdServer) SetInfo(context.Context, *SetInfoRequest) (*SetInfoResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method SetInfo not implemented")
}
func (UnimplementedDebugdServer) GetInfo(context.Context, *GetInfoRequest) (*GetInfoResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetInfo not implemented")
}
func (UnimplementedDebugdServer) UploadFiles(Debugd_UploadFilesServer) error {
return status.Errorf(codes.Unimplemented, "method UploadFiles not implemented")
}
func (UnimplementedDebugdServer) DownloadFiles(*DownloadFilesRequest, Debugd_DownloadFilesServer) error {
return status.Errorf(codes.Unimplemented, "method DownloadFiles not implemented")
}
func (UnimplementedDebugdServer) UploadSystemServiceUnits(context.Context, *UploadSystemdServiceUnitsRequest) (*UploadSystemdServiceUnitsResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method UploadSystemServiceUnits not implemented")
}
func (UnimplementedDebugdServer) mustEmbedUnimplementedDebugdServer() {}
// UnsafeDebugdServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to DebugdServer will
// result in compilation errors.
type UnsafeDebugdServer interface {
mustEmbedUnimplementedDebugdServer()
}
func RegisterDebugdServer(s grpc.ServiceRegistrar, srv DebugdServer) {
s.RegisterService(&Debugd_ServiceDesc, srv)
}
func _Debugd_SetInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(SetInfoRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DebugdServer).SetInfo(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/debugd.Debugd/SetInfo",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DebugdServer).SetInfo(ctx, req.(*SetInfoRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Debugd_GetInfo_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetInfoRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DebugdServer).GetInfo(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/debugd.Debugd/GetInfo",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DebugdServer).GetInfo(ctx, req.(*GetInfoRequest))
}
return interceptor(ctx, in, info, handler)
}
func _Debugd_UploadFiles_Handler(srv interface{}, stream grpc.ServerStream) error {
return srv.(DebugdServer).UploadFiles(&debugdUploadFilesServer{stream})
}
type Debugd_UploadFilesServer interface {
SendAndClose(*UploadFilesResponse) error
Recv() (*FileTransferMessage, error)
grpc.ServerStream
}
type debugdUploadFilesServer struct {
grpc.ServerStream
}
func (x *debugdUploadFilesServer) SendAndClose(m *UploadFilesResponse) error {
return x.ServerStream.SendMsg(m)
}
func (x *debugdUploadFilesServer) Recv() (*FileTransferMessage, error) {
m := new(FileTransferMessage)
if err := x.ServerStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
func _Debugd_DownloadFiles_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(DownloadFilesRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(DebugdServer).DownloadFiles(m, &debugdDownloadFilesServer{stream})
}
type Debugd_DownloadFilesServer interface {
Send(*FileTransferMessage) error
grpc.ServerStream
}
type debugdDownloadFilesServer struct {
grpc.ServerStream
}
func (x *debugdDownloadFilesServer) Send(m *FileTransferMessage) error {
return x.ServerStream.SendMsg(m)
}
func _Debugd_UploadSystemServiceUnits_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(UploadSystemdServiceUnitsRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DebugdServer).UploadSystemServiceUnits(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/debugd.Debugd/UploadSystemServiceUnits",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DebugdServer).UploadSystemServiceUnits(ctx, req.(*UploadSystemdServiceUnitsRequest))
}
return interceptor(ctx, in, info, handler)
}
// Debugd_ServiceDesc is the grpc.ServiceDesc for Debugd service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Debugd_ServiceDesc = grpc.ServiceDesc{
ServiceName: "debugd.Debugd",
HandlerType: (*DebugdServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "SetInfo",
Handler: _Debugd_SetInfo_Handler,
},
{
MethodName: "GetInfo",
Handler: _Debugd_GetInfo_Handler,
},
{
MethodName: "UploadSystemServiceUnits",
Handler: _Debugd_UploadSystemServiceUnits_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "UploadFiles",
Handler: _Debugd_UploadFiles_Handler,
ClientStreams: true,
},
{
StreamName: "DownloadFiles",
Handler: _Debugd_DownloadFiles_Handler,
ServerStreams: true,
},
},
Metadata: "debugd.proto",
}

View File

@ -47,12 +47,6 @@ This projects uses [golangci-lint](https://golangci-lint.run/) for linting.
You can [install golangci-lint](https://golangci-lint.run/usage/install/#linux-and-windows) locally, You can [install golangci-lint](https://golangci-lint.run/usage/install/#linux-and-windows) locally,
but there is also a CI action to ensure compliance. but there is also a CI action to ensure compliance.
To locally run all configured linters, execute
```sh
golangci-lint run ./...
```
It is also recommended to use golangci-lint (and [gofumpt](https://github.com/mvdan/gofumpt) as formatter) in your IDE, by adding the recommended VS Code Settings or by [configuring it yourself](https://golangci-lint.run/usage/integrations/#editor-integration) It is also recommended to use golangci-lint (and [gofumpt](https://github.com/mvdan/gofumpt) as formatter) in your IDE, by adding the recommended VS Code Settings or by [configuring it yourself](https://golangci-lint.run/usage/integrations/#editor-integration)
## Logging ## Logging

View File

@ -14,7 +14,6 @@ Development components:
* [3rdparty](/3rdparty): Contains the third party dependencies used by Constellation * [3rdparty](/3rdparty): Contains the third party dependencies used by Constellation
* [debugd](/debugd): Debug daemon and client * [debugd](/debugd): Debug daemon and client
* [hack](/hack): Development tools * [hack](/hack): Development tools
* [proto](/proto): Proto files generator
Additional repositories: Additional repositories:

View File

@ -127,6 +127,7 @@ Whenever Go code is changed, you will have to run `bazel run //:tidy` to regener
* `bazel test //...` - run all tests * `bazel test //...` - run all tests
* `bazel run //:tidy` - tidy, format and generate * `bazel run //:tidy` - tidy, format and generate
* `bazel run //:check` - execute checks and linters * `bazel run //:check` - execute checks and linters
* `bazel run //:generate` - execute code generation
## Editor integration ## Editor integration

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "recoverproto_proto", name = "recoverproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/disk-mapper/recoverproto", importpath = "github.com/edgelesssys/constellation/v2/disk-mapper/recoverproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "recover.pb.go",
go_proto_library = ":recoverproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: recover.proto // source: disk-mapper/recoverproto/recover.proto
package recoverproto package recoverproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -25,18 +29,14 @@ type RecoverMessage struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// bytes state_disk_key = 1; removed KmsUri string `protobuf:"bytes,3,opt,name=kms_uri,json=kmsUri,proto3" json:"kms_uri,omitempty"`
// bytes measurement_secret = 2; removed
// kms_uri is the URI of the KMS the recoveryserver should use to decrypt DEKs.
KmsUri string `protobuf:"bytes,3,opt,name=kms_uri,json=kmsUri,proto3" json:"kms_uri,omitempty"`
// storage_uri is the URI of the storage location the recoveryserver should use to fetch DEKs.
StorageUri string `protobuf:"bytes,4,opt,name=storage_uri,json=storageUri,proto3" json:"storage_uri,omitempty"` StorageUri string `protobuf:"bytes,4,opt,name=storage_uri,json=storageUri,proto3" json:"storage_uri,omitempty"`
} }
func (x *RecoverMessage) Reset() { func (x *RecoverMessage) Reset() {
*x = RecoverMessage{} *x = RecoverMessage{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_recover_proto_msgTypes[0] mi := &file_disk_mapper_recoverproto_recover_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -49,7 +49,7 @@ func (x *RecoverMessage) String() string {
func (*RecoverMessage) ProtoMessage() {} func (*RecoverMessage) ProtoMessage() {}
func (x *RecoverMessage) ProtoReflect() protoreflect.Message { func (x *RecoverMessage) ProtoReflect() protoreflect.Message {
mi := &file_recover_proto_msgTypes[0] mi := &file_disk_mapper_recoverproto_recover_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -62,7 +62,7 @@ func (x *RecoverMessage) ProtoReflect() protoreflect.Message {
// Deprecated: Use RecoverMessage.ProtoReflect.Descriptor instead. // Deprecated: Use RecoverMessage.ProtoReflect.Descriptor instead.
func (*RecoverMessage) Descriptor() ([]byte, []int) { func (*RecoverMessage) Descriptor() ([]byte, []int) {
return file_recover_proto_rawDescGZIP(), []int{0} return file_disk_mapper_recoverproto_recover_proto_rawDescGZIP(), []int{0}
} }
func (x *RecoverMessage) GetKmsUri() string { func (x *RecoverMessage) GetKmsUri() string {
@ -88,7 +88,7 @@ type RecoverResponse struct {
func (x *RecoverResponse) Reset() { func (x *RecoverResponse) Reset() {
*x = RecoverResponse{} *x = RecoverResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_recover_proto_msgTypes[1] mi := &file_disk_mapper_recoverproto_recover_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -101,7 +101,7 @@ func (x *RecoverResponse) String() string {
func (*RecoverResponse) ProtoMessage() {} func (*RecoverResponse) ProtoMessage() {}
func (x *RecoverResponse) ProtoReflect() protoreflect.Message { func (x *RecoverResponse) ProtoReflect() protoreflect.Message {
mi := &file_recover_proto_msgTypes[1] mi := &file_disk_mapper_recoverproto_recover_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -114,50 +114,52 @@ func (x *RecoverResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use RecoverResponse.ProtoReflect.Descriptor instead. // Deprecated: Use RecoverResponse.ProtoReflect.Descriptor instead.
func (*RecoverResponse) Descriptor() ([]byte, []int) { func (*RecoverResponse) Descriptor() ([]byte, []int) {
return file_recover_proto_rawDescGZIP(), []int{1} return file_disk_mapper_recoverproto_recover_proto_rawDescGZIP(), []int{1}
} }
var File_recover_proto protoreflect.FileDescriptor var File_disk_mapper_recoverproto_recover_proto protoreflect.FileDescriptor
var file_recover_proto_rawDesc = []byte{ var file_disk_mapper_recoverproto_recover_proto_rawDesc = []byte{
0x0a, 0x0d, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x26, 0x64, 0x69, 0x73, 0x6b, 0x2d, 0x6d, 0x61, 0x70, 0x70, 0x65, 0x72, 0x2f, 0x72, 0x65,
0x0c, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4a, 0x0a, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x76,
0x0e, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x65, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0c, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65,
0x17, 0x0a, 0x07, 0x6b, 0x6d, 0x73, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x4a, 0x0a, 0x0e, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65,
0x52, 0x06, 0x6b, 0x6d, 0x73, 0x55, 0x72, 0x69, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x6f, 0x72, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x12, 0x17, 0x0a, 0x07, 0x6b, 0x6d, 0x73, 0x5f,
0x61, 0x67, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x75, 0x72, 0x69, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x6b, 0x6d, 0x73, 0x55, 0x72,
0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55, 0x72, 0x69, 0x22, 0x11, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x69, 0x12, 0x1f, 0x0a, 0x0b, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x5f, 0x75, 0x72, 0x69,
0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x4f, 0x0a, 0x03, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x55,
0x41, 0x50, 0x49, 0x12, 0x48, 0x0a, 0x07, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x12, 0x1c, 0x72, 0x69, 0x22, 0x11, 0x0a, 0x0f, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x73,
0x2e, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x4f, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x48, 0x0a, 0x07,
0x63, 0x6f, 0x76, 0x65, 0x72, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x1a, 0x1d, 0x2e, 0x72, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x12, 0x1c, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65,
0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x4d, 0x65,
0x76, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x42, 0x5a, 0x73, 0x73, 0x61, 0x67, 0x65, 0x1a, 0x1d, 0x2e, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70,
0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x52, 0x65, 0x73, 0x70,
0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x42, 0x42, 0x5a, 0x40, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x64, 0x69, 0x73, 0x6b, 0x2d, 0x6d, 0x61, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73,
0x70, 0x70, 0x65, 0x72, 0x2f, 0x72, 0x65, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76,
0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, 0x32, 0x2f, 0x64, 0x69, 0x73, 0x6b, 0x2d, 0x6d, 0x61, 0x70, 0x70, 0x65, 0x72, 0x2f, 0x72, 0x65,
0x63, 0x6f, 0x76, 0x65, 0x72, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
} }
var ( var (
file_recover_proto_rawDescOnce sync.Once file_disk_mapper_recoverproto_recover_proto_rawDescOnce sync.Once
file_recover_proto_rawDescData = file_recover_proto_rawDesc file_disk_mapper_recoverproto_recover_proto_rawDescData = file_disk_mapper_recoverproto_recover_proto_rawDesc
) )
func file_recover_proto_rawDescGZIP() []byte { func file_disk_mapper_recoverproto_recover_proto_rawDescGZIP() []byte {
file_recover_proto_rawDescOnce.Do(func() { file_disk_mapper_recoverproto_recover_proto_rawDescOnce.Do(func() {
file_recover_proto_rawDescData = protoimpl.X.CompressGZIP(file_recover_proto_rawDescData) file_disk_mapper_recoverproto_recover_proto_rawDescData = protoimpl.X.CompressGZIP(file_disk_mapper_recoverproto_recover_proto_rawDescData)
}) })
return file_recover_proto_rawDescData return file_disk_mapper_recoverproto_recover_proto_rawDescData
} }
var file_recover_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_disk_mapper_recoverproto_recover_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_recover_proto_goTypes = []interface{}{ var file_disk_mapper_recoverproto_recover_proto_goTypes = []interface{}{
(*RecoverMessage)(nil), // 0: recoverproto.RecoverMessage (*RecoverMessage)(nil), // 0: recoverproto.RecoverMessage
(*RecoverResponse)(nil), // 1: recoverproto.RecoverResponse (*RecoverResponse)(nil), // 1: recoverproto.RecoverResponse
} }
var file_recover_proto_depIdxs = []int32{ var file_disk_mapper_recoverproto_recover_proto_depIdxs = []int32{
0, // 0: recoverproto.API.Recover:input_type -> recoverproto.RecoverMessage 0, // 0: recoverproto.API.Recover:input_type -> recoverproto.RecoverMessage
1, // 1: recoverproto.API.Recover:output_type -> recoverproto.RecoverResponse 1, // 1: recoverproto.API.Recover:output_type -> recoverproto.RecoverResponse
1, // [1:2] is the sub-list for method output_type 1, // [1:2] is the sub-list for method output_type
@ -167,13 +169,13 @@ var file_recover_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for field type_name 0, // [0:0] is the sub-list for field type_name
} }
func init() { file_recover_proto_init() } func init() { file_disk_mapper_recoverproto_recover_proto_init() }
func file_recover_proto_init() { func file_disk_mapper_recoverproto_recover_proto_init() {
if File_recover_proto != nil { if File_disk_mapper_recoverproto_recover_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_recover_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_disk_mapper_recoverproto_recover_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RecoverMessage); i { switch v := v.(*RecoverMessage); i {
case 0: case 0:
return &v.state return &v.state
@ -185,7 +187,7 @@ func file_recover_proto_init() {
return nil return nil
} }
} }
file_recover_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_disk_mapper_recoverproto_recover_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*RecoverResponse); i { switch v := v.(*RecoverResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -202,18 +204,98 @@ func file_recover_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_recover_proto_rawDesc, RawDescriptor: file_disk_mapper_recoverproto_recover_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 2, NumMessages: 2,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_recover_proto_goTypes, GoTypes: file_disk_mapper_recoverproto_recover_proto_goTypes,
DependencyIndexes: file_recover_proto_depIdxs, DependencyIndexes: file_disk_mapper_recoverproto_recover_proto_depIdxs,
MessageInfos: file_recover_proto_msgTypes, MessageInfos: file_disk_mapper_recoverproto_recover_proto_msgTypes,
}.Build() }.Build()
File_recover_proto = out.File File_disk_mapper_recoverproto_recover_proto = out.File
file_recover_proto_rawDesc = nil file_disk_mapper_recoverproto_recover_proto_rawDesc = nil
file_recover_proto_goTypes = nil file_disk_mapper_recoverproto_recover_proto_goTypes = nil
file_recover_proto_depIdxs = nil file_disk_mapper_recoverproto_recover_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type APIClient interface {
Recover(ctx context.Context, in *RecoverMessage, opts ...grpc.CallOption) (*RecoverResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) Recover(ctx context.Context, in *RecoverMessage, opts ...grpc.CallOption) (*RecoverResponse, error) {
out := new(RecoverResponse)
err := c.cc.Invoke(ctx, "/recoverproto.API/Recover", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
type APIServer interface {
Recover(context.Context, *RecoverMessage) (*RecoverResponse, error)
}
// UnimplementedAPIServer can be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (*UnimplementedAPIServer) Recover(context.Context, *RecoverMessage) (*RecoverResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Recover not implemented")
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_Recover_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(RecoverMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Recover(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/recoverproto.API/Recover",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Recover(ctx, req.(*RecoverMessage))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "recoverproto.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Recover",
Handler: _API_Recover_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "disk-mapper/recoverproto/recover.proto",
} }

View File

@ -1,107 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: recover.proto
package recoverproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type APIClient interface {
// Recover sends the necessary information to the recoveryserver to initiate recovery of a node.
Recover(ctx context.Context, in *RecoverMessage, opts ...grpc.CallOption) (*RecoverResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) Recover(ctx context.Context, in *RecoverMessage, opts ...grpc.CallOption) (*RecoverResponse, error) {
out := new(RecoverResponse)
err := c.cc.Invoke(ctx, "/recoverproto.API/Recover", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
// All implementations must embed UnimplementedAPIServer
// for forward compatibility
type APIServer interface {
// Recover sends the necessary information to the recoveryserver to initiate recovery of a node.
Recover(context.Context, *RecoverMessage) (*RecoverResponse, error)
mustEmbedUnimplementedAPIServer()
}
// UnimplementedAPIServer must be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (UnimplementedAPIServer) Recover(context.Context, *RecoverMessage) (*RecoverResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Recover not implemented")
}
func (UnimplementedAPIServer) mustEmbedUnimplementedAPIServer() {}
// UnsafeAPIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to APIServer will
// result in compilation errors.
type UnsafeAPIServer interface {
mustEmbedUnimplementedAPIServer()
}
func RegisterAPIServer(s grpc.ServiceRegistrar, srv APIServer) {
s.RegisterService(&API_ServiceDesc, srv)
}
func _API_Recover_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(RecoverMessage)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).Recover(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/recoverproto.API/Recover",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).Recover(ctx, req.(*RecoverMessage))
}
return interceptor(ctx, in, info, handler)
}
// API_ServiceDesc is the grpc.ServiceDesc for API service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var API_ServiceDesc = grpc.ServiceDesc{
ServiceName: "recoverproto.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "Recover",
Handler: _API_Recover_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "recover.proto",
}

View File

@ -115,17 +115,39 @@ The following [resource providers need to be registered](https://learn.microsoft
* `Microsoft.Compute` * `Microsoft.Compute`
* `Microsoft.ManagedIdentity` * `Microsoft.ManagedIdentity`
* `Microsoft.Network` * `Microsoft.Network`
* `microsoft.insights` * `Microsoft.Insights`
* `Microsoft.Attestation` \[2]
By default, Constellation tries to register these automatically if they haven't been registered before. By default, Constellation tries to register these automatically if they haven't been registered before.
You need the following permissions for your user account: To [create the IAM configuration](../workflows/config.md#creating-an-iam-configuration) for Constellation, you need the following permissions:
* `Microsoft.Authorization/roleDefinitions/*`
* `Microsoft.Authorization/roleAssignments/*`
* `*/register/action` \[1]
* `Microsoft.ManagedIdentity/userAssignedIdentities/*`
* `Microsoft.Resources/subscriptions/resourcegroups/*`
- `Contributor` (to create cloud resources) The built-in `Owner` role is a superset of these permissions.
- `User Access Administrator` (to create a service account)
If you don't have these permissions with scope *subscription*, ask your administrator to [create the service account and a resource group for your Constellation cluster](first-steps.md). To [create a Constellation cluster](../workflows/create.md#the-create-step), you need the following permissions:
Your user account needs the `Contributor` permission scoped to this resource group. * `Microsoft.Insights/components/*`
* `Microsoft.Network/publicIPAddresses/*`
* `Microsoft.Network/virtualNetworks/*`
* `Microsoft.Network/loadBalancers/*`
* `Microsoft.Network/networkSecurityGroups/*`
* `Microsoft.Network/loadBalancers/backendAddressPools/*`
* `Microsoft.Network/virtualNetworks/subnets/*`
* `Microsoft.Compute/virtualMachineScaleSets/*`
* `Microsoft.ManagedIdentity/userAssignedIdentities/*`
* `Microsoft.Attestation/attestationProviders/*` \[2]
The built-in `Contributor` role is a superset of these permissions.
Follow Microsoft's guide on [understanding](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-definitions) and [assigning roles](https://learn.microsoft.com/en-us/azure/role-based-access-control/role-assignments).
1: You can omit `*/register/Action` if the resource providers mentioned above are already registered and the `ARM_SKIP_PROVIDER_REGISTRATION` environment variable is set to `true` when creating the IAM configuration.
2: You can omit `Microsoft.Attestation/attestationProviders/*` and the registration of `Microsoft.Attestation` if `EnforceIDKeyDigest` isn't set to `MAAFallback` in the [config file](../workflows/config.md#configure-your-cluster).
</tabItem> </tabItem>
<tabItem value="gcp" label="GCP"> <tabItem value="gcp" label="GCP">
@ -133,10 +155,78 @@ Your user account needs the `Contributor` permission scoped to this resource gro
Create a new project for Constellation or use an existing one. Create a new project for Constellation or use an existing one.
Enable the [Compute Engine API](https://console.cloud.google.com/apis/library/compute.googleapis.com) on it. Enable the [Compute Engine API](https://console.cloud.google.com/apis/library/compute.googleapis.com) on it.
You need the following permissions on this project: To [create the IAM configuration](../workflows/config.md#creating-an-iam-configuration) for Constellation, you need the following permissions:
* `iam.serviceAccountKeys.create`
* `iam.serviceAccountKeys.delete`
* `iam.serviceAccountKeys.get`
* `iam.serviceAccounts.create`
* `iam.serviceAccounts.delete`
* `iam.serviceAccounts.get`
* `resourcemanager.projects.getIamPolicy`
* `resourcemanager.projects.setIamPolicy`
- `compute.*` (or the subset defined by `roles/compute.instanceAdmin.v1`) Together, the built-in roles `roles/editor` and `roles/resourcemanager.projectIamAdmin` form a superset of these permissions.
- `iam.serviceAccountUser`
To [create a Constellation cluster](../workflows/create.md#the-create-step), you need the following permissions:
* `compute.addresses.createInternal`
* `compute.addresses.deleteInternal`
* `compute.addresses.get`
* `compute.addresses.useInternal`
* `compute.backendServices.create`
* `compute.backendServices.delete`
* `compute.backendServices.get`
* `compute.backendServices.use`
* `compute.disks.create`
* `compute.firewalls.create`
* `compute.firewalls.delete`
* `compute.firewalls.get`
* `compute.globalAddresses.create`
* `compute.globalAddresses.delete`
* `compute.globalAddresses.get`
* `compute.globalAddresses.use`
* `compute.globalForwardingRules.create`
* `compute.globalForwardingRules.delete`
* `compute.globalForwardingRules.get`
* `compute.globalForwardingRules.setLabels`
* `compute.globalOperations.get`
* `compute.healthChecks.create`
* `compute.healthChecks.delete`
* `compute.healthChecks.get`
* `compute.healthChecks.useReadOnly`
* `compute.instanceGroupManagers.create`
* `compute.instanceGroupManagers.delete`
* `compute.instanceGroupManagers.get`
* `compute.instanceGroups.create`
* `compute.instanceGroups.delete`
* `compute.instanceGroups.get`
* `compute.instanceGroups.use`
* `compute.instanceTemplates.create`
* `compute.instanceTemplates.delete`
* `compute.instanceTemplates.get`
* `compute.instanceTemplates.useReadOnly`
* `compute.instances.create`
* `compute.instances.setLabels`
* `compute.instances.setMetadata`
* `compute.instances.setTags`
* `compute.networks.create`
* `compute.networks.delete`
* `compute.networks.get`
* `compute.networks.updatePolicy`
* `compute.routers.create`
* `compute.routers.delete`
* `compute.routers.get`
* `compute.routers.update`
* `compute.subnetworks.create`
* `compute.subnetworks.delete`
* `compute.subnetworks.get`
* `compute.subnetworks.use`
* `compute.targetTcpProxies.create`
* `compute.targetTcpProxies.delete`
* `compute.targetTcpProxies.get`
* `compute.targetTcpProxies.use`
* `iam.serviceAccounts.actAs`
Together, the built-in roles `roles/editor`, `roles/compute.instanceAdmin` and `roles/resourcemanager.projectIamAdmin` form a superset of these permissions.
Follow Google's guide on [understanding](https://cloud.google.com/iam/docs/understanding-roles) and [assigning roles](https://cloud.google.com/iam/docs/granting-changing-revoking-access). Follow Google's guide on [understanding](https://cloud.google.com/iam/docs/understanding-roles) and [assigning roles](https://cloud.google.com/iam/docs/granting-changing-revoking-access).
@ -145,7 +235,7 @@ Follow Google's guide on [understanding](https://cloud.google.com/iam/docs/under
To set up a Constellation cluster, you need to perform two tasks that require permissions: create the infrastructure and create roles for cluster nodes. Both of these actions can be performed by different users, e.g., an administrator to create roles and a DevOps engineer to create the infrastructure. To set up a Constellation cluster, you need to perform two tasks that require permissions: create the infrastructure and create roles for cluster nodes. Both of these actions can be performed by different users, e.g., an administrator to create roles and a DevOps engineer to create the infrastructure.
To create the AWS IAM policies, your user requires the following minimal set of permissions: To [create the IAM configuration](../workflows/config.md#creating-an-iam-configuration) for Constellation, you need the following permissions:
```json ```json
{ {
@ -182,8 +272,9 @@ To create the AWS IAM policies, your user requires the following minimal set of
} }
``` ```
To create the infrastructure, you can either use a predefined role from Amazon, The built-in `AdministratorAccess` policy is a superset of these permissions.
such as `PowerUserAccess`, or use the following minimal set of permissions:
To [create a Constellation cluster](../workflows/create.md#the-create-step), you need the following permissions:
```json ```json
{ {
@ -269,6 +360,8 @@ such as `PowerUserAccess`, or use the following minimal set of permissions:
} }
``` ```
The built-in `PowerUserAccess` policy is a superset of these permissions.
Follow Amazon's guide on [understanding](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html) and [managing policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_managed-vs-inline.html). Follow Amazon's guide on [understanding](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies.html) and [managing policies](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_policies_managed-vs-inline.html).
</tabItem> </tabItem>

View File

@ -82,7 +82,7 @@ If you haven't generated a configuration file yet, you can do so by adding the `
<tabs groupId="csp"> <tabs groupId="csp">
<tabItem value="azure" label="Azure"> <tabItem value="azure" label="Azure">
You must be authenticated with the [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) in the shell session. You must be authenticated with the [Azure CLI](https://learn.microsoft.com/en-us/cli/azure/install-azure-cli) in the shell session with a user that has the [required permissions for IAM creation](../getting-started/install.md#set-up-cloud-credentials).
```bash ```bash
constellation iam create azure --region=westus --resourceGroup=constellTest --servicePrincipal=spTest constellation iam create azure --region=westus --resourceGroup=constellTest --servicePrincipal=spTest
@ -105,7 +105,7 @@ Since `clientSecretValue` is a sensitive value, you can leave it empty in the co
</tabItem> </tabItem>
<tabItem value="gcp" label="GCP"> <tabItem value="gcp" label="GCP">
You must be authenticated with the [GCP CLI](https://cloud.google.com/sdk/gcloud) in the shell session. You must be authenticated with the [GCP CLI](https://cloud.google.com/sdk/gcloud) in the shell session with a user that has the [required permissions for IAM creation](../getting-started/install.md#set-up-cloud-credentials).
```bash ```bash
constellation iam create gcp --projectID=yourproject-12345 --zone=europe-west2-a --serviceAccountID=constell-test constellation iam create gcp --projectID=yourproject-12345 --zone=europe-west2-a --serviceAccountID=constell-test
@ -120,7 +120,7 @@ Paste the output into the corresponding fields of the `constellation-conf.yaml`
</tabItem> </tabItem>
<tabItem value="aws" label="AWS"> <tabItem value="aws" label="AWS">
You must be authenticated with the [AWS CLI](https://aws.amazon.com/en/cli/) in the shell session. You must be authenticated with the [AWS CLI](https://aws.amazon.com/en/cli/) in the shell session with a user that has the [required permissions for IAM creation](../getting-started/install.md#set-up-cloud-credentials).
```bash ```bash
constellation iam create aws --zone=eu-central-1a --prefix=constellTest constellation iam create aws --zone=eu-central-1a --prefix=constellTest

View File

@ -55,3 +55,4 @@ whitepaper
WireGuard WireGuard
Xeon Xeon
xsltproc xsltproc
[Ss]uperset

View File

@ -0,0 +1,28 @@
load("@io_bazel_rules_go//go:def.bzl", "go_binary", "go_library")
go_library(
name = "bazel-deps-mirror_lib",
srcs = [
"bazel-deps-mirror.go",
"check.go",
"fix.go",
],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror",
visibility = ["//visibility:private"],
deps = [
"//hack/bazel-deps-mirror/internal/bazelfiles",
"//hack/bazel-deps-mirror/internal/issues",
"//hack/bazel-deps-mirror/internal/mirror",
"//hack/bazel-deps-mirror/internal/rules",
"//internal/logger",
"@com_github_bazelbuild_buildtools//build",
"@com_github_spf13_cobra//:cobra",
"@org_uber_go_zap//zapcore",
],
)
go_binary(
name = "bazel-deps-mirror",
embed = [":bazel-deps-mirror_lib"],
visibility = ["//visibility:public"],
)

View File

@ -0,0 +1,9 @@
# Bazel deps mirror
This directory contains tooling to automatically mirror the dependencies of a Bazel project into the Constellation CDN at `https://cdn.confidential.cloud/`.
The tool searches for various rules in the WORKSPACE.bazel file and all loaded .bzl files.
It has the following commands:
- check: checks if the dependencies all have a mirror URL and optionally checks if the mirror really returns the expected file
- mirror: mirrors all dependencies that don't have a mirror URL yet. Also normalizes the `urls` attribute of rules

View File

@ -0,0 +1,82 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// bazel-deps-mirror adds external dependencies to edgeless systems' mirror.
package main
import (
"context"
"fmt"
"os"
"os/signal"
"github.com/spf13/cobra"
)
func main() {
if err := execute(); err != nil {
os.Exit(1)
}
}
func execute() error {
rootCmd := newRootCmd()
ctx, cancel := signalContext(context.Background(), os.Interrupt)
defer cancel()
return rootCmd.ExecuteContext(ctx)
}
func newRootCmd() *cobra.Command {
rootCmd := &cobra.Command{
Use: "bazel-deps-mirror",
Short: "Add external Bazel dependencies to edgeless systems' mirror",
Long: "Add external Bazel dependencies to edgeless systems' mirror.",
PersistentPreRun: preRunRoot,
}
rootCmd.SetOut(os.Stdout)
rootCmd.PersistentFlags().Bool("verbose", false, "Enable verbose output")
rootCmd.PersistentFlags().String("region", "eu-central-1", "AWS region of the API S3 bucket")
rootCmd.PersistentFlags().String("bucket", "cdn-constellation-backend", "S3 bucket name of the API")
rootCmd.PersistentFlags().String("mirror-base-url", "https://cdn.confidential.cloud", "Base URL of the public mirror endpoint")
rootCmd.AddCommand(newCheckCmd())
rootCmd.AddCommand(newFixCmd())
return rootCmd
}
// signalContext returns a context that is canceled on the handed signal.
// The signal isn't watched after its first occurrence. Call the cancel
// function to ensure the internal goroutine is stopped and the signal isn't
// watched any longer.
func signalContext(ctx context.Context, sig os.Signal) (context.Context, context.CancelFunc) {
sigCtx, stop := signal.NotifyContext(ctx, sig)
done := make(chan struct{}, 1)
stopDone := make(chan struct{}, 1)
go func() {
defer func() { stopDone <- struct{}{} }()
defer stop()
select {
case <-sigCtx.Done():
fmt.Println(" Signal caught. Press ctrl+c again to terminate the program immediately.")
case <-done:
}
}()
cancelFunc := func() {
done <- struct{}{}
<-stopDone
}
return sigCtx, cancelFunc
}
func preRunRoot(cmd *cobra.Command, _ []string) {
cmd.SilenceUsage = true
}

View File

@ -0,0 +1,184 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package main
import (
"context"
"errors"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/spf13/cobra"
"go.uber.org/zap/zapcore"
)
func newCheckCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "check",
Short: "Check if all Bazel dependencies are mirrored and the corresponding rules are properly formatted.",
RunE: runCheck,
}
cmd.Flags().Bool("mirror", false, "Performs authenticated checks to validate if all referenced CAS objects are still consistent within the mirror.")
cmd.Flags().Bool("mirror-unauthenticated", false, "Performs unauthenticated, slow checks to validate if all referenced CAS objects are still consistent within the mirror. Doesn't require authentication to the mirror but may be inefficient.")
cmd.MarkFlagsMutuallyExclusive("mirror", "mirror-unauthenticated")
return cmd
}
func runCheck(cmd *cobra.Command, _ []string) error {
flags, err := parseCheckFlags(cmd)
if err != nil {
return err
}
log := logger.New(logger.PlainLog, flags.logLevel)
log.Debugf("Parsed flags: %+v", flags)
filesHelper, err := bazelfiles.New()
if err != nil {
return err
}
log.Debugf("Searching for Bazel files in the current WORKSPACE and all subdirectories...")
bazelFiles, err := filesHelper.FindFiles()
if err != nil {
return err
}
var mirrorCheck mirrorChecker
switch {
case flags.mirrorUnauthenticated:
log.Debugf("Checking consistency of all referenced CAS objects without authentication.")
mirrorCheck = mirror.NewUnauthenticated(flags.mirrorBaseURL, mirror.Run, log)
case flags.mirror:
log.Debugf("Checking consistency of all referenced CAS objects using AWS S3.")
mirrorCheck, err = mirror.New(cmd.Context(), flags.region, flags.bucket, flags.mirrorBaseURL, mirror.Run, log)
if err != nil {
return err
}
default:
mirrorCheck = &noOpMirrorChecker{}
}
iss := issues.New()
for _, bazelFile := range bazelFiles {
issByFile, err := checkBazelFile(cmd.Context(), filesHelper, mirrorCheck, bazelFile, log)
if err != nil {
return err
}
if len(issByFile) > 0 {
iss.Set(bazelFile.AbsPath, issByFile)
}
}
if len(iss) > 0 {
log.Infof("Found issues in rules")
iss.Report(cmd.OutOrStdout())
return errors.New("found issues in rules")
}
log.Infof("No issues found 🦭")
return nil
}
func checkBazelFile(ctx context.Context, fileHelper *bazelfiles.Helper, mirrorCheck mirrorChecker, bazelFile bazelfiles.BazelFile, log *logger.Logger) (issByFile issues.ByFile, err error) {
log.Debugf("Checking file: %s", bazelFile.RelPath)
issByFile = issues.NewByFile()
buildfile, err := fileHelper.LoadFile(bazelFile)
if err != nil {
return nil, err
}
found := rules.Rules(buildfile, rules.SupportedRules)
if len(found) == 0 {
log.Debugf("No rules found in file: %s", bazelFile.RelPath)
return
}
log.Debugf("Found %d rules in file: %s", len(found), bazelFile.RelPath)
for _, rule := range found {
log.Debugf("Checking rule: %s", rule.Name())
// check if the rule is a valid pinned dependency rule (has all required attributes)
if issues := rules.ValidatePinned(rule); len(issues) > 0 {
issByFile.Add(rule.Name(), issues...)
continue
}
// check if the rule is a valid mirror rule
if issues := rules.Check(rule); len(issues) > 0 {
issByFile.Add(rule.Name(), issues...)
}
// check if the referenced CAS object is still consistent
// may be a no-op if --check-consistency is not set
expectedHash, expectedHashErr := rules.GetHash(rule)
if expectedHashErr == nil && rules.HasMirrorURL(rule) {
if issue := mirrorCheck.Check(ctx, expectedHash); issue != nil {
issByFile.Add(rule.Name(), issue)
}
}
}
return
}
type checkFlags struct {
mirrorUnauthenticated bool
mirror bool
region string
bucket string
mirrorBaseURL string
logLevel zapcore.Level
}
func parseCheckFlags(cmd *cobra.Command) (checkFlags, error) {
mirrorUnauthenticated, err := cmd.Flags().GetBool("mirror-unauthenticated")
if err != nil {
return checkFlags{}, err
}
mirror, err := cmd.Flags().GetBool("mirror")
if err != nil {
return checkFlags{}, err
}
verbose, err := cmd.Flags().GetBool("verbose")
if err != nil {
return checkFlags{}, err
}
logLevel := zapcore.InfoLevel
if verbose {
logLevel = zapcore.DebugLevel
}
region, err := cmd.Flags().GetString("region")
if err != nil {
return checkFlags{}, err
}
bucket, err := cmd.Flags().GetString("bucket")
if err != nil {
return checkFlags{}, err
}
mirrorBaseURL, err := cmd.Flags().GetString("mirror-base-url")
if err != nil {
return checkFlags{}, err
}
return checkFlags{
mirrorUnauthenticated: mirrorUnauthenticated,
mirror: mirror,
region: region,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
logLevel: logLevel,
}, nil
}
type mirrorChecker interface {
Check(ctx context.Context, expectedHash string) error
}
type noOpMirrorChecker struct{}
func (m *noOpMirrorChecker) Check(ctx context.Context, expectedHash string) error {
return nil
}

View File

@ -0,0 +1,230 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package main
import (
"context"
"errors"
"github.com/bazelbuild/buildtools/build"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror"
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/spf13/cobra"
"go.uber.org/zap/zapcore"
)
func newFixCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "fix",
Short: "fix all Bazel dependency rules by uploading artifacts to the mirror (if needed) and formatting the rules.",
RunE: runFix,
}
cmd.Flags().Bool("unauthenticated", false, "Doesn't require authentication to the mirror but cannot upload files.")
cmd.Flags().Bool("dry-run", false, "Don't actually change files or upload anything.")
return cmd
}
func runFix(cmd *cobra.Command, _ []string) error {
flags, err := parseFixFlags(cmd)
if err != nil {
return err
}
log := logger.New(logger.PlainLog, flags.logLevel)
log.Debugf("Parsed flags: %+v", flags)
fileHelper, err := bazelfiles.New()
if err != nil {
return err
}
log.Debugf("Searching for Bazel files in the current WORKSPACE and all subdirectories...")
bazelFiles, err := fileHelper.FindFiles()
if err != nil {
return err
}
var mirrorUpload mirrorUploader
switch {
case flags.unauthenticated:
log.Warnf("Fixing rules without authentication for AWS S3. If artifacts are not yet mirrored, this will fail.")
mirrorUpload = mirror.NewUnauthenticated(flags.mirrorBaseURL, flags.dryRun, log)
default:
log.Debugf("Fixing rules with authentication for AWS S3.")
mirrorUpload, err = mirror.New(cmd.Context(), flags.region, flags.bucket, flags.mirrorBaseURL, flags.dryRun, log)
if err != nil {
return err
}
}
issues := issues.New()
for _, bazelFile := range bazelFiles {
fileIssues, err := fixBazelFile(cmd.Context(), fileHelper, mirrorUpload, bazelFile, flags.dryRun, log)
if err != nil {
return err
}
if len(fileIssues) > 0 {
issues.Set(bazelFile.AbsPath, fileIssues)
}
}
if len(issues) > 0 {
log.Warnf("Found %d unfixable issues in rules", len(issues))
issues.Report(cmd.OutOrStdout())
return errors.New("found issues in rules")
}
log.Infof("No unfixable issues found")
return nil
}
func fixBazelFile(ctx context.Context, fileHelper *bazelfiles.Helper, mirrorUpload mirrorUploader, bazelFile bazelfiles.BazelFile, dryRun bool, log *logger.Logger) (iss issues.ByFile, err error) {
iss = issues.NewByFile()
var changed bool // true if any rule in this file was changed
log.Infof("Checking file: %s", bazelFile.RelPath)
buildfile, err := fileHelper.LoadFile(bazelFile)
if err != nil {
return iss, err
}
found := rules.Rules(buildfile, rules.SupportedRules)
if len(found) == 0 {
log.Debugf("No rules found in file: %s", bazelFile.RelPath)
return iss, nil
}
log.Debugf("Found %d rules in file: %s", len(found), bazelFile.RelPath)
for _, rule := range found {
changedRule, ruleIssues := fixRule(ctx, mirrorUpload, rule, log)
if len(ruleIssues) > 0 {
iss.Add(rule.Name(), ruleIssues...)
}
changed = changed || changedRule
}
if len(iss) > 0 {
log.Warnf("File %s has issues. Not saving!", bazelFile.RelPath)
return iss, nil
}
if !changed {
log.Debugf("No changes to file: %s", bazelFile.RelPath)
return iss, nil
}
if dryRun {
diff, err := fileHelper.Diff(bazelFile, buildfile)
if err != nil {
return iss, err
}
log.Infof("Dry run: would save updated file %s with diff:\n%s", bazelFile.RelPath, diff)
return iss, nil
}
log.Infof("Saving updated file: %s", bazelFile.RelPath)
if err := fileHelper.WriteFile(bazelFile, buildfile); err != nil {
return iss, err
}
return iss, nil
}
func fixRule(ctx context.Context, mirrorUpload mirrorUploader, rule *build.Rule, log *logger.Logger) (changed bool, iss []error) {
log.Debugf("Fixing rule: %s", rule.Name())
// check if the rule is a valid pinned dependency rule (has all required attributes)
issue := rules.ValidatePinned(rule)
if issue != nil {
// don't try to fix the rule if it's invalid
iss = append(iss, issue...)
return
}
// check if the referenced CAS object exists in the mirror and is consistent
expectedHash, expectedHashErr := rules.GetHash(rule)
if expectedHashErr != nil {
// don't try to fix the rule if the hash is missing
iss = append(iss,
errors.New("hash attribute is missing. unable to check if the artifact is already mirrored or upload it"))
return
}
if rules.HasMirrorURL(rule) {
changed = rules.Normalize(rule)
return
}
log.Infof("Artifact %s with hash %s is not yet mirrored. Uploading...", rule.Name(), expectedHash)
if uploadErr := mirrorUpload.Mirror(ctx, expectedHash, rules.GetURLs(rule)); uploadErr != nil {
// don't try to fix the rule if the upload failed
iss = append(iss, uploadErr)
return
}
// now the artifact is mirrored (if it wasn't already) and we can fix the rule
mirrorURL, err := mirrorUpload.MirrorURL(expectedHash)
if err != nil {
iss = append(iss, err)
return
}
rules.AddURLs(rule, []string{mirrorURL})
// normalize the rule
rules.Normalize(rule)
return true, iss
}
type fixFlags struct {
unauthenticated bool
dryRun bool
region string
bucket string
mirrorBaseURL string
logLevel zapcore.Level
}
func parseFixFlags(cmd *cobra.Command) (fixFlags, error) {
unauthenticated, err := cmd.Flags().GetBool("unauthenticated")
if err != nil {
return fixFlags{}, err
}
dryRun, err := cmd.Flags().GetBool("dry-run")
if err != nil {
return fixFlags{}, err
}
verbose, err := cmd.Flags().GetBool("verbose")
if err != nil {
return fixFlags{}, err
}
logLevel := zapcore.InfoLevel
if verbose {
logLevel = zapcore.DebugLevel
}
region, err := cmd.Flags().GetString("region")
if err != nil {
return fixFlags{}, err
}
bucket, err := cmd.Flags().GetString("bucket")
if err != nil {
return fixFlags{}, err
}
mirrorBaseURL, err := cmd.Flags().GetString("mirror-base-url")
if err != nil {
return fixFlags{}, err
}
return fixFlags{
unauthenticated: unauthenticated,
dryRun: dryRun,
region: region,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
logLevel: logLevel,
}, nil
}
type mirrorUploader interface {
Check(ctx context.Context, expectedHash string) error
Mirror(ctx context.Context, hash string, urls []string) error
MirrorURL(hash string) (string, error)
}

View File

@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "bazelfiles",
srcs = ["files.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_hexops_gotextdiff//:gotextdiff",
"@com_github_hexops_gotextdiff//myers",
"@com_github_hexops_gotextdiff//span",
"@com_github_spf13_afero//:afero",
],
)
go_test(
name = "bazelfiles_test",
srcs = ["files_test.go"],
embed = [":bazelfiles"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_bazelbuild_buildtools//edit",
"@com_github_spf13_afero//:afero",
"@com_github_stretchr_testify//assert",
"@com_github_stretchr_testify//require",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,158 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package bazelfiles is used to find and handle Bazel WORKSPACE and bzl files.
package bazelfiles
import (
"fmt"
"os"
"path/filepath"
"github.com/bazelbuild/buildtools/build"
"github.com/hexops/gotextdiff"
"github.com/hexops/gotextdiff/myers"
"github.com/hexops/gotextdiff/span"
"github.com/spf13/afero"
)
// Helper is used to find and handle Bazel WORKSPACE and bzl files.
type Helper struct {
fs afero.Fs
workspaceRoot string
}
// New creates a new BazelFilesHelper.
func New() (*Helper, error) {
workspaceRoot, err := findWorkspaceRoot(os.LookupEnv)
if err != nil {
return nil, err
}
return &Helper{
fs: afero.NewBasePathFs(afero.NewOsFs(), workspaceRoot),
workspaceRoot: workspaceRoot,
}, nil
}
// FindFiles returns the paths to all Bazel files in the Bazel workspace.
func (h *Helper) FindFiles() ([]BazelFile, error) {
workspaceFile, err := h.findWorkspaceFile()
if err != nil {
return nil, err
}
bzlFiles, err := h.findBzlFiles()
if err != nil {
return nil, err
}
return append(bzlFiles, workspaceFile), nil
}
// findWorkspaceFile returns the path to the Bazel WORKSPACE.bazel file (or WORKSPACE if the former doesn't exist).
func (h *Helper) findWorkspaceFile() (BazelFile, error) {
if _, err := h.fs.Stat("WORKSPACE.bazel"); err == nil {
return BazelFile{
RelPath: "WORKSPACE.bazel",
AbsPath: filepath.Join(h.workspaceRoot, "WORKSPACE.bazel"),
Type: BazelFileTypeWorkspace,
}, nil
}
if _, err := h.fs.Stat("WORKSPACE"); err == nil {
return BazelFile{
RelPath: "WORKSPACE",
AbsPath: filepath.Join(h.workspaceRoot, "WORKSPACE"),
Type: BazelFileTypeWorkspace,
}, nil
}
return BazelFile{}, fmt.Errorf("failed to find Bazel WORKSPACE file")
}
// findBzlFiles returns the paths to all .bzl files in the Bazel workspace.
func (h *Helper) findBzlFiles() ([]BazelFile, error) {
var bzlFiles []BazelFile
err := afero.Walk(h.fs, ".", func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
if filepath.Ext(path) != ".bzl" {
return nil
}
bzlFiles = append(bzlFiles, BazelFile{
RelPath: path,
AbsPath: filepath.Join(h.workspaceRoot, path),
Type: BazelFileTypeBzl,
})
return nil
})
if err != nil {
return nil, err
}
return bzlFiles, nil
}
// LoadFile loads a Bazel file.
func (h *Helper) LoadFile(bf BazelFile) (*build.File, error) {
data, err := afero.ReadFile(h.fs, bf.RelPath)
if err != nil {
return nil, err
}
switch bf.Type {
case BazelFileTypeBzl:
return build.ParseBzl(bf.AbsPath, data)
case BazelFileTypeWorkspace:
return build.ParseWorkspace(bf.AbsPath, data)
}
return nil, fmt.Errorf("unknown Bazel file type: %d", bf.Type)
}
// WriteFile writes (updates) a Bazel file.
func (h *Helper) WriteFile(bf BazelFile, buildfile *build.File) error {
return afero.WriteFile(h.fs, bf.RelPath, build.Format(buildfile), 0o644)
}
// Diff returns the diff between the saved and the updated (in-memory) version of a Bazel file.
func (h *Helper) Diff(bf BazelFile, buildfile *build.File) (string, error) {
savedData, err := afero.ReadFile(h.fs, bf.RelPath)
if err != nil {
return "", err
}
updatedData := build.Format(buildfile)
edits := myers.ComputeEdits(span.URIFromPath(bf.RelPath), string(savedData), string(updatedData))
diff := fmt.Sprint(gotextdiff.ToUnified("a/"+bf.RelPath, "b/"+bf.RelPath, string(savedData), edits))
return diff, nil
}
// findWorkspaceRoot returns the path to the Bazel workspace root.
func findWorkspaceRoot(lookupEnv LookupEnv) (string, error) {
workspaceRoot, ok := lookupEnv("BUILD_WORKSPACE_DIRECTORY")
if !ok {
return "", fmt.Errorf("failed to find Bazel workspace root: not executed via \"bazel run\" and BUILD_WORKSPACE_DIRECTORY not set")
}
return workspaceRoot, nil
}
// BazelFile is a reference (path) to a Bazel file.
type BazelFile struct {
RelPath string
AbsPath string
Type BazelFileType
}
// BazelFileType is the type of a Bazel file.
type BazelFileType int
const (
BazelFileTypeBzl = iota // BazelFileTypeBzl is a .bzl file
BazelFileTypeWorkspace // BazelFileTypeWorkspace is a WORKSPACE or WORKSPACE.bazel file
)
// LookupEnv can be the real os.LookupEnv or a mock for testing.
type LookupEnv func(key string) (string, bool)

View File

@ -0,0 +1,259 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package bazelfiles
import (
"testing"
"github.com/bazelbuild/buildtools/build"
"github.com/bazelbuild/buildtools/edit"
"github.com/spf13/afero"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestFindFiles(t *testing.T) {
testCases := map[string]struct {
files []string
wantFiles []BazelFile
wantErr bool
}{
"no WORKSPACE file": {
files: []string{},
wantFiles: []BazelFile{},
wantErr: true,
},
"only WORKSPACE file": {
files: []string{"WORKSPACE"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE",
AbsPath: "/WORKSPACE",
Type: BazelFileTypeWorkspace,
},
},
},
"only WORKSPACE.bazel file": {
files: []string{"WORKSPACE.bazel"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
},
},
"both WORKSPACE and WORKSPACE.bazel files": {
files: []string{"WORKSPACE", "WORKSPACE.bazel"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
},
},
"only .bzl file": {
files: []string{"foo.bzl"},
wantErr: true,
},
"all kinds": {
files: []string{"WORKSPACE", "WORKSPACE.bazel", "foo.bzl", "bar.bzl", "unused.txt", "folder/baz.bzl"},
wantFiles: []BazelFile{
{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
},
{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
{
RelPath: "bar.bzl",
AbsPath: "/bar.bzl",
Type: BazelFileTypeBzl,
},
{
RelPath: "folder/baz.bzl",
AbsPath: "/folder/baz.bzl",
Type: BazelFileTypeBzl,
},
},
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
for _, file := range tc.files {
_, err := fs.Create(file)
assert.NoError(err)
}
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
gotFiles, err := helper.FindFiles()
if tc.wantErr {
assert.Error(err)
return
}
require.NoError(err)
assert.ElementsMatch(tc.wantFiles, gotFiles)
})
}
}
func TestLoadFile(t *testing.T) {
testCases := map[string]struct {
file BazelFile
contents string
wantErr bool
}{
"file does not exist": {
file: BazelFile{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
wantErr: true,
},
"file has unknown type": {
file: BazelFile{
RelPath: "foo.txt",
AbsPath: "/foo.txt",
Type: BazelFileType(999),
},
contents: "foo",
wantErr: true,
},
"file is a bzl file": {
file: BazelFile{
RelPath: "foo.bzl",
AbsPath: "/foo.bzl",
Type: BazelFileTypeBzl,
},
contents: "load(\"bar.bzl\", \"bar\")",
},
"file is a workspace file": {
file: BazelFile{
RelPath: "WORKSPACE",
AbsPath: "/WORKSPACE",
Type: BazelFileTypeWorkspace,
},
contents: "workspace(name = \"foo\")",
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
if tc.contents != "" {
err := afero.WriteFile(fs, tc.file.RelPath, []byte(tc.contents), 0o644)
require.NoError(err)
}
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
_, err := helper.LoadFile(tc.file)
if tc.wantErr {
assert.Error(err)
return
}
require.NoError(err)
})
}
}
func TestReadWriteFile(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
err := afero.WriteFile(fs, "a.bzl", []byte("load(\"bar.bzl\", \"bar\")\n"), 0o644)
require.NoError(err)
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
bf, err := helper.LoadFile(BazelFile{
RelPath: "a.bzl",
AbsPath: "/a.bzl",
Type: BazelFileTypeBzl,
})
require.NoError(err)
err = helper.WriteFile(BazelFile{
RelPath: "b.bzl",
AbsPath: "/b.bzl",
Type: BazelFileTypeBzl,
}, bf)
require.NoError(err)
_, err = fs.Stat("b.bzl")
assert.NoError(err)
contents, err := afero.ReadFile(fs, "b.bzl")
assert.NoError(err)
assert.Equal("load(\"bar.bzl\", \"bar\")\n", string(contents))
}
func TestDiff(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
fs := afero.NewMemMapFs()
err := afero.WriteFile(fs, "WORKSPACE.bazel", []byte(""), 0o644)
require.NoError(err)
helper := Helper{
fs: fs,
workspaceRoot: "/",
}
fileRef := BazelFile{
RelPath: "WORKSPACE.bazel",
AbsPath: "/WORKSPACE.bazel",
Type: BazelFileTypeWorkspace,
}
bf, err := helper.LoadFile(fileRef)
require.NoError(err)
diff, err := helper.Diff(fileRef, bf)
require.NoError(err)
assert.Empty(diff)
bf.Stmt = edit.InsertAtEnd(
bf.Stmt,
&build.CallExpr{
X: &build.Ident{Name: "workspace"},
List: []build.Expr{
&build.AssignExpr{
LHS: &build.Ident{Name: "name"},
Op: "=",
RHS: &build.StringExpr{Value: "foo"},
},
},
},
)
diff, err = helper.Diff(fileRef, bf)
require.NoError(err)
assert.Equal("--- a/WORKSPACE.bazel\n+++ b/WORKSPACE.bazel\n@@ -1 +1 @@\n+workspace(name = \"foo\")\n", diff)
err = helper.WriteFile(fileRef, bf)
require.NoError(err)
contents, err := afero.ReadFile(fs, "WORKSPACE.bazel")
assert.NoError(err)
assert.Equal("workspace(name = \"foo\")\n", string(contents))
diff, err = helper.Diff(fileRef, bf)
require.NoError(err)
assert.Empty(diff)
}

View File

@ -0,0 +1,19 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "issues",
srcs = ["issues.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
)
go_test(
name = "issues_test",
srcs = ["issues_test.go"],
embed = [":issues"],
deps = [
"@com_github_stretchr_testify//assert",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,88 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package issues can store and report issues found during the bazel-deps-mirror process.
package issues
import (
"fmt"
"io"
"sort"
)
// Map is a map of issues arranged by path => rulename => issues.
type Map map[string]map[string][]error
// New creates a new Map.
func New() Map {
return make(map[string]map[string][]error)
}
// Set sets all issues for a file.
func (m Map) Set(file string, issues ByFile) {
m[file] = issues
}
// Report prints all issues to a writer in a human-readable format.
func (m Map) Report(w io.Writer) {
files := make([]string, 0, len(m))
for f := range m {
files = append(files, f)
}
sort.Strings(files)
for _, file := range files {
rules := make([]string, 0, len(m[file]))
for r := range m[file] {
rules = append(rules, r)
}
sort.Strings(rules)
fmt.Fprintf(w, "File %s (%d issues total):\n", file, m.IssuesPerFile(file))
for _, rule := range rules {
ruleIssues := m[file][rule]
if len(ruleIssues) == 0 {
continue
}
fmt.Fprintf(w, " Rule %s (%d issues total):\n", rule, m.IssuesPerRule(file, rule))
for _, issue := range ruleIssues {
fmt.Fprintf(w, " %s\n", issue)
}
}
}
}
// FileHasIssues returns true if the file has any issues.
func (m Map) FileHasIssues(file string) bool {
return m[file] != nil
}
// IssuesPerFile returns the number of issues for a file.
func (m Map) IssuesPerFile(file string) int {
sum := 0
for _, ruleIssues := range m[file] {
sum += len(ruleIssues)
}
return sum
}
// IssuesPerRule returns the number of issues for a rule.
func (m Map) IssuesPerRule(file string, rule string) int {
return len(m[file][rule])
}
// ByFile is a map of issues belonging to one file arranged by rulename => issues.
type ByFile map[string][]error
// NewByFile creates a new ByFile.
func NewByFile() ByFile {
return make(map[string][]error)
}
// Add adds one or more issues belonging to a rule.
func (m ByFile) Add(rule string, issues ...error) {
m[rule] = append(m[rule], issues...)
}

View File

@ -0,0 +1,46 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package issues
import (
"bytes"
"errors"
"testing"
"github.com/stretchr/testify/assert"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestMap(t *testing.T) {
assert := assert.New(t)
m := New()
assert.Equal(0, len(m))
assert.False(m.FileHasIssues("file1"))
m.Set("file1", map[string][]error{
"rule1": {errors.New("r1_issue1"), errors.New("r1_issue2")},
"rule2": {errors.New("r2_issue1")},
})
assert.Equal(3, m.IssuesPerFile("file1"))
assert.True(m.FileHasIssues("file1"))
// let report write to a buffer
b := new(bytes.Buffer)
m.Report(b)
rep := b.String()
assert.Equal(rep, `File file1 (3 issues total):
Rule rule1 (2 issues total):
r1_issue1
r1_issue2
Rule rule2 (1 issues total):
r2_issue1
`)
}

View File

@ -0,0 +1,30 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "mirror",
srcs = ["mirror.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"//internal/logger",
"@com_github_aws_aws_sdk_go_v2_config//:config",
"@com_github_aws_aws_sdk_go_v2_feature_s3_manager//:manager",
"@com_github_aws_aws_sdk_go_v2_service_s3//:s3",
"@com_github_aws_aws_sdk_go_v2_service_s3//types",
],
)
go_test(
name = "mirror_test",
srcs = ["mirror_test.go"],
embed = [":mirror"],
deps = [
"//internal/logger",
"@com_github_aws_aws_sdk_go_v2_feature_s3_manager//:manager",
"@com_github_aws_aws_sdk_go_v2_service_s3//:s3",
"@com_github_aws_aws_sdk_go_v2_service_s3//types",
"@com_github_stretchr_testify//assert",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,270 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package mirror is used upload and download Bazel dependencies to and from a mirror.
package mirror
import (
"context"
"crypto/sha256"
"encoding/base64"
"encoding/hex"
"errors"
"fmt"
"io"
"net/http"
"net/url"
"path"
awsconfig "github.com/aws/aws-sdk-go-v2/config"
s3manager "github.com/aws/aws-sdk-go-v2/feature/s3/manager"
"github.com/aws/aws-sdk-go-v2/service/s3"
s3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/edgelesssys/constellation/v2/internal/logger"
)
// Maintainer can upload and download files to and from a CAS mirror.
type Maintainer struct {
objectStorageClient objectStorageClient
uploadClient uploadClient
httpClient httpClient
// bucket is the name of the S3 bucket to use.
bucket string
// mirrorBaseURL is the base URL of the public CAS http endpoint.
mirrorBaseURL string
unauthenticated bool
dryRun bool
log *logger.Logger
}
// NewUnauthenticated creates a new Maintainer that dose not require authentication can only download files from a CAS mirror.
func NewUnauthenticated(mirrorBaseURL string, dryRun bool, log *logger.Logger) *Maintainer {
return &Maintainer{
httpClient: http.DefaultClient,
mirrorBaseURL: mirrorBaseURL,
unauthenticated: true,
dryRun: dryRun,
log: log,
}
}
// New creates a new Maintainer that can upload and download files to and from a CAS mirror.
func New(ctx context.Context, region, bucket, mirrorBaseURL string, dryRun bool, log *logger.Logger) (*Maintainer, error) {
cfg, err := awsconfig.LoadDefaultConfig(ctx, awsconfig.WithRegion(region))
if err != nil {
return nil, err
}
s3C := s3.NewFromConfig(cfg)
uploadC := s3manager.NewUploader(s3C)
return &Maintainer{
objectStorageClient: s3C,
uploadClient: uploadC,
bucket: bucket,
mirrorBaseURL: mirrorBaseURL,
httpClient: http.DefaultClient,
dryRun: dryRun,
log: log,
}, nil
}
// MirrorURL returns the public URL of a file in the CAS mirror.
func (m *Maintainer) MirrorURL(hash string) (string, error) {
if _, err := hex.DecodeString(hash); err != nil {
return "", fmt.Errorf("invalid hash %q: %w", hash, err)
}
key := path.Join(keyBase, hash)
pubURL, err := url.Parse(m.mirrorBaseURL)
if err != nil {
return "", err
}
pubURL.Path = path.Join(pubURL.Path, key)
return pubURL.String(), nil
}
// Mirror downloads a file from one of the existing (non-mirror) urls and uploads it to the CAS mirror.
// It also calculates the hash of the file during streaming and checks if it matches the expected hash.
func (m *Maintainer) Mirror(ctx context.Context, hash string, urls []string) error {
if m.unauthenticated {
return errors.New("cannot upload in unauthenticated mode")
}
for _, url := range urls {
m.log.Debugf("Mirroring file with hash %v from %q", hash, url)
body, err := m.downloadFromUpstream(ctx, url)
if err != nil {
m.log.Debugf("Failed to download file from %q: %v", url, err)
continue
}
defer body.Close()
streamedHash := sha256.New()
tee := io.TeeReader(body, streamedHash)
if err := m.put(ctx, hash, tee); err != nil {
m.log.Warnf("Failed to stream file from upstream %q to mirror: %v.. Trying next url.", url, err)
continue
}
actualHash := hex.EncodeToString(streamedHash.Sum(nil))
if actualHash != hash {
return fmt.Errorf("hash mismatch while streaming file to mirror: expected %v, got %v", hash, actualHash)
}
pubURL, err := m.MirrorURL(hash)
if err != nil {
return err
}
m.log.Debugf("File uploaded successfully to mirror from %q as %q", url, pubURL)
return nil
}
return fmt.Errorf("failed to download / reupload file with hash %v from any of the urls: %v", hash, urls)
}
// Check checks if a file is present and has the correct hash in the CAS mirror.
func (m *Maintainer) Check(ctx context.Context, expectedHash string) error {
m.log.Debugf("Checking consistency of object with hash %v", expectedHash)
if m.unauthenticated {
return m.checkUnauthenticated(ctx, expectedHash)
}
return m.checkAuthenticated(ctx, expectedHash)
}
// checkReadonly checks if a file is present and has the correct hash in the CAS mirror.
// It uses the authenticated CAS s3 endpoint to download the file metadata.
func (m *Maintainer) checkAuthenticated(ctx context.Context, expectedHash string) error {
key := path.Join(keyBase, expectedHash)
m.log.Debugf("Check: s3 getObjectAttributes {Bucket: %v, Key: %v}", m.bucket, key)
attributes, err := m.objectStorageClient.GetObjectAttributes(ctx, &s3.GetObjectAttributesInput{
Bucket: &m.bucket,
Key: &key,
ObjectAttributes: []s3types.ObjectAttributes{s3types.ObjectAttributesChecksum, s3types.ObjectAttributesObjectParts},
})
if err != nil {
return err
}
hasChecksum := attributes.Checksum != nil && attributes.Checksum.ChecksumSHA256 != nil && len(*attributes.Checksum.ChecksumSHA256) > 0
isSinglePart := attributes.ObjectParts == nil || attributes.ObjectParts.TotalPartsCount == 1
if !hasChecksum || !isSinglePart {
// checksums are not guaranteed to be present
// and if present, they are only meaningful for single part objects
// fallback if checksum cannot be verified from attributes
m.log.Debugf("S3 object attributes cannot be used to verify key %v. Falling back to download.", key)
return m.checkUnauthenticated(ctx, expectedHash)
}
actualHash, err := base64.StdEncoding.DecodeString(*attributes.Checksum.ChecksumSHA256)
if err != nil {
return err
}
return compareHashes(expectedHash, actualHash)
}
// checkReadonly checks if a file is present and has the correct hash in the CAS mirror.
// It uses the public CAS http endpoint to download the file.
func (m *Maintainer) checkUnauthenticated(ctx context.Context, expectedHash string) error {
pubURL, err := m.MirrorURL(expectedHash)
if err != nil {
return err
}
m.log.Debugf("Check: http get {Url: %v}", pubURL)
req, err := http.NewRequestWithContext(ctx, http.MethodGet, pubURL, http.NoBody)
if err != nil {
return err
}
resp, err := m.httpClient.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return fmt.Errorf("unexpected status code %v", resp.StatusCode)
}
actualHash := sha256.New()
if _, err := io.Copy(actualHash, resp.Body); err != nil {
return err
}
return compareHashes(expectedHash, actualHash.Sum(nil))
}
// put uploads a file to the CAS mirror.
func (m *Maintainer) put(ctx context.Context, hash string, data io.Reader) error {
if m.unauthenticated {
return errors.New("cannot upload in unauthenticated mode")
}
key := path.Join(keyBase, hash)
if m.dryRun {
m.log.Debugf("DryRun: s3 put object {Bucket: %v, Key: %v}", m.bucket, key)
return nil
}
m.log.Debugf("Uploading object with hash %v to s3://%v/%v", hash, m.bucket, key)
_, err := m.uploadClient.Upload(ctx, &s3.PutObjectInput{
Bucket: &m.bucket,
Key: &key,
Body: data,
ChecksumAlgorithm: s3types.ChecksumAlgorithmSha256,
})
return err
}
// downloadFromUpstream downloads a file from one of the existing (non-mirror) urls.
func (m *Maintainer) downloadFromUpstream(ctx context.Context, url string) (body io.ReadCloser, retErr error) {
req, err := http.NewRequestWithContext(ctx, http.MethodGet, url, http.NoBody)
if err != nil {
return nil, err
}
resp, err := m.httpClient.Do(req)
if err != nil {
return nil, err
}
defer func() {
if retErr != nil {
resp.Body.Close()
}
}()
if resp.StatusCode != http.StatusOK {
return nil, fmt.Errorf("unexpected status code %v", resp.StatusCode)
}
return resp.Body, nil
}
func compareHashes(expectedHash string, actualHash []byte) error {
if len(actualHash) != sha256.Size {
return fmt.Errorf("actual hash should to be %v bytes, got %v", sha256.Size, len(actualHash))
}
if len(expectedHash) != hex.EncodedLen(sha256.Size) {
return fmt.Errorf("expected hash should be %v bytes, got %v", hex.EncodedLen(sha256.Size), len(expectedHash))
}
actualHashStr := hex.EncodeToString(actualHash)
if expectedHash != actualHashStr {
return fmt.Errorf("expected hash %v, mirror returned %v", expectedHash, actualHashStr)
}
return nil
}
type objectStorageClient interface {
GetObjectAttributes(ctx context.Context, params *s3.GetObjectAttributesInput, optFns ...func(*s3.Options)) (*s3.GetObjectAttributesOutput, error)
}
type uploadClient interface {
Upload(ctx context.Context, input *s3.PutObjectInput, opts ...func(*s3manager.Uploader)) (*s3manager.UploadOutput, error)
}
type httpClient interface {
Get(url string) (*http.Response, error)
Do(req *http.Request) (*http.Response, error)
}
const (
// DryRun is a flag to enable dry run mode.
DryRun = true
// Run is a flag to perform actual operations.
Run = false
keyBase = "constellation/cas/sha256"
)

View File

@ -0,0 +1,285 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package mirror
import (
"bytes"
"context"
"io"
"log"
"net/http"
"testing"
s3manager "github.com/aws/aws-sdk-go-v2/feature/s3/manager"
"github.com/aws/aws-sdk-go-v2/service/s3"
"github.com/aws/aws-sdk-go-v2/service/s3/types"
"github.com/edgelesssys/constellation/v2/internal/logger"
"github.com/stretchr/testify/assert"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestMirrorURL(t *testing.T) {
testCases := map[string]struct {
hash string
wantURL string
wantErr bool
}{
"empty hash": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
wantURL: "https://example.com/constellation/cas/sha256/e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
},
"other hash": {
hash: "0000000000000000000000000000000000000000000000000000000000000000",
wantURL: "https://example.com/constellation/cas/sha256/0000000000000000000000000000000000000000000000000000000000000000",
},
"invalid hash": {
hash: "\x00",
wantErr: true,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
mirrorBaseURL: "https://example.com/",
}
url, err := m.MirrorURL(tc.hash)
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
assert.Equal(t, tc.wantURL, url)
})
}
}
func TestMirror(t *testing.T) {
testCases := map[string]struct {
unauthenticated bool
hash string
data []byte
upstreamURL string
statusCode int
failUpload bool
wantErr bool
}{
"cannot upload in unauthenticated mode": {
unauthenticated: true,
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
wantErr: true,
},
"http error": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusNotFound,
wantErr: true,
},
"hash mismatch": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte("evil"),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
wantErr: true,
},
"upload error": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
failUpload: true,
wantErr: true,
},
"success": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
data: []byte(""),
upstreamURL: "https://example.com/empty",
statusCode: http.StatusOK,
},
"success with different hash": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
data: []byte("foo"),
upstreamURL: "https://example.com/foo",
statusCode: http.StatusOK,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
httpClient: &http.Client{
Transport: &stubUpstream{
statusCode: tc.statusCode,
body: tc.data,
},
},
uploadClient: &stubUploadClient{
uploadErr: func() error {
if tc.failUpload {
return assert.AnError
}
return nil
}(),
},
unauthenticated: tc.unauthenticated,
log: logger.NewTest(t),
}
err := m.Mirror(context.Background(), tc.hash, []string{tc.upstreamURL})
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
func TestCheck(t *testing.T) {
testCases := map[string]struct {
hash string
unauthenticatedResponse []byte
unauthenticatedStatusCode int
authenticatedResponse *s3.GetObjectAttributesOutput
authenticatedErr error
wantErr bool
}{
"unauthenticated mode, http error": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
unauthenticatedResponse: []byte("foo"), // ignored
unauthenticatedStatusCode: http.StatusNotFound,
wantErr: true,
},
"unauthenticated mode, hash mismatch": {
hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
wantErr: true,
},
"unauthenticated mode, success": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
},
"authenticated mode, get attributes fails": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedErr: assert.AnError,
wantErr: true,
},
"authenticated mode, hash mismatch": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
Checksum: &types.Checksum{
ChecksumSHA256: toPtr("tcH7Lvxta0Z0wv3MSM4BtDo7fAN2PAwzVd4Ame4PjHM="),
},
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 1,
},
},
wantErr: true,
},
"authenticated mode, success": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
Checksum: &types.Checksum{
ChecksumSHA256: toPtr("LCa0a2j/xo/5m0U8HTBBNBNCLXBkg7+g+YpeiGJm564="),
},
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 1,
},
},
},
"authenticated mode, fallback to unauthenticated": {
hash: "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
authenticatedResponse: &s3.GetObjectAttributesOutput{
ObjectParts: &types.GetObjectAttributesParts{
TotalPartsCount: 2,
},
},
unauthenticatedResponse: []byte("foo"),
unauthenticatedStatusCode: http.StatusOK,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
m := Maintainer{
unauthenticated: (tc.authenticatedResponse == nil),
httpClient: &http.Client{
Transport: &stubUpstream{
statusCode: tc.unauthenticatedStatusCode,
body: tc.unauthenticatedResponse,
},
},
objectStorageClient: &stubObjectStorageClient{
response: tc.authenticatedResponse,
err: tc.authenticatedErr,
},
log: logger.NewTest(t),
}
err := m.Check(context.Background(), tc.hash)
if tc.wantErr {
assert.Error(t, err)
} else {
assert.NoError(t, err)
}
})
}
}
// stubUpstream implements http.RoundTripper and returns a canned response.
type stubUpstream struct {
statusCode int
body []byte
}
func (s *stubUpstream) RoundTrip(req *http.Request) (*http.Response, error) {
log.Printf("stubUpstream: %s %s -> %q\n", req.Method, req.URL, string(s.body))
return &http.Response{
StatusCode: s.statusCode,
Body: io.NopCloser(bytes.NewReader(s.body)),
}, nil
}
type stubUploadClient struct {
uploadErr error
uploadedData []byte
}
func (s *stubUploadClient) Upload(
_ context.Context, input *s3.PutObjectInput,
_ ...func(*s3manager.Uploader),
) (*s3manager.UploadOutput, error) {
var err error
s.uploadedData, err = io.ReadAll(input.Body)
if err != nil {
panic(err)
}
return nil, s.uploadErr
}
func toPtr[T any](v T) *T {
return &v
}
type stubObjectStorageClient struct {
response *s3.GetObjectAttributesOutput
err error
}
func (s *stubObjectStorageClient) GetObjectAttributes(
_ context.Context, _ *s3.GetObjectAttributesInput, _ ...func(*s3.Options),
) (*s3.GetObjectAttributesOutput, error) {
return s.response, s.err
}

View File

@ -0,0 +1,25 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("//bazel/go:go_test.bzl", "go_test")
go_library(
name = "rules",
srcs = ["rules.go"],
importpath = "github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules",
visibility = ["//hack/bazel-deps-mirror:__subpackages__"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@org_golang_x_exp//slices",
],
)
go_test(
name = "rules_test",
srcs = ["rules_test.go"],
embed = [":rules"],
deps = [
"@com_github_bazelbuild_buildtools//build",
"@com_github_stretchr_testify//assert",
"@com_github_stretchr_testify//require",
"@org_uber_go_goleak//:goleak",
],
)

View File

@ -0,0 +1,297 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
// package rules is used find and modify Bazel rules in WORKSPACE and bzl files.
package rules
import (
"errors"
"fmt"
"sort"
"strings"
"github.com/bazelbuild/buildtools/build"
"golang.org/x/exp/slices"
)
// Rules is used to find and modify Bazel rules of a set of rule kinds in WORKSPACE and .bzl files.
// Filter is a list of rule kinds to consider.
// If filter is empty, all rules are considered.
func Rules(file *build.File, filter []string) (rules []*build.Rule) {
allRules := file.Rules("")
if len(filter) == 0 {
return allRules
}
ruleLoop:
for _, rule := range allRules {
for _, ruleKind := range filter {
if rule.Kind() == ruleKind {
rules = append(rules, rule)
continue ruleLoop
}
}
}
return
}
// ValidatePinned checks if the given rule is a pinned dependency rule.
// That is, if it has a name, either a url or urls attribute, and a sha256 attribute.
func ValidatePinned(rule *build.Rule) (validationErrs []error) {
if rule.Name() == "" {
validationErrs = append(validationErrs, errors.New("rule has no name"))
}
hasURL := rule.Attr("url") != nil
hasURLs := rule.Attr("urls") != nil
if !hasURL && !hasURLs {
validationErrs = append(validationErrs, errors.New("rule has no url or urls attribute"))
}
if hasURL && hasURLs {
validationErrs = append(validationErrs, errors.New("rule has both url and urls attribute"))
}
if hasURL {
url := rule.AttrString("url")
if url == "" {
validationErrs = append(validationErrs, errors.New("rule has empty url attribute"))
}
}
if hasURLs {
urls := rule.AttrStrings("urls")
if len(urls) == 0 {
validationErrs = append(validationErrs, errors.New("rule has empty urls list attribute"))
} else {
for _, url := range urls {
if url == "" {
validationErrs = append(validationErrs, errors.New("rule has empty url in urls attribute"))
}
}
}
}
if rule.Attr("sha256") == nil {
validationErrs = append(validationErrs, errors.New("rule has no sha256 attribute"))
} else {
sha256 := rule.AttrString("sha256")
if sha256 == "" {
validationErrs = append(validationErrs, errors.New("rule has empty sha256 attribute"))
}
}
return
}
// Check checks if a dependency rule is normalized and contains a mirror url.
// All errors reported by this function can be fixed by calling AddURLs and Normalize.
func Check(rule *build.Rule) (validationErrs []error) {
hasURL := rule.Attr("url") != nil
if hasURL {
validationErrs = append(validationErrs, errors.New("rule has url (singular) attribute"))
}
urls := rule.AttrStrings("urls")
sorted := make([]string, len(urls))
copy(sorted, urls)
sortURLs(sorted)
for i, url := range urls {
if url != sorted[i] {
validationErrs = append(validationErrs, errors.New("rule has unsorted urls attributes"))
break
}
}
if !HasMirrorURL(rule) {
validationErrs = append(validationErrs, errors.New("rule is not mirrored"))
}
if rule.Kind() == "http_archive" && rule.Attr("type") == nil {
validationErrs = append(validationErrs, errors.New("http_archive rule has no type attribute"))
}
if rule.Kind() == "rpm" && len(urls) != 1 {
validationErrs = append(validationErrs, errors.New("rpm rule has unstable urls that are not the edgeless mirror"))
}
return
}
// Normalize normalizes a rule and returns true if the rule was changed.
func Normalize(rule *build.Rule) (changed bool) {
changed = addTypeAttribute(rule)
urls := GetURLs(rule)
normalizedURLS := append([]string{}, urls...)
// rpm rules must have exactly one url (the edgeless mirror)
if mirrorU, err := mirrorURL(rule); rule.Kind() == "rpm" && err == nil {
normalizedURLS = []string{mirrorU}
}
sortURLs(normalizedURLS)
normalizedURLS = deduplicateURLs(normalizedURLS)
if slices.Equal(urls, normalizedURLS) && rule.Attr("url") == nil {
return
}
setURLs(rule, normalizedURLS)
changed = true
return
}
// AddURLs adds a url to a rule.
func AddURLs(rule *build.Rule, urls []string) {
existingURLs := GetURLs(rule)
existingURLs = append(existingURLs, urls...)
sortURLs(existingURLs)
deduplicatedURLs := deduplicateURLs(existingURLs)
setURLs(rule, deduplicatedURLs)
}
// GetHash returns the sha256 hash of a rule.
func GetHash(rule *build.Rule) (string, error) {
hash := rule.AttrString("sha256")
if hash == "" {
return "", fmt.Errorf("rule %s has empty or missing sha256 attribute", rule.Name())
}
return hash, nil
}
// GetURLs returns the sorted urls of a rule.
func GetURLs(rule *build.Rule) []string {
urls := rule.AttrStrings("urls")
url := rule.AttrString("url")
if url != "" {
urls = append(urls, url)
}
return urls
}
// HasMirrorURL returns true if the rule has a url from the Edgeless mirror.
func HasMirrorURL(rule *build.Rule) bool {
_, err := mirrorURL(rule)
return err == nil
}
func deduplicateURLs(urls []string) (deduplicated []string) {
seen := make(map[string]bool)
for _, url := range urls {
if !seen[url] {
deduplicated = append(deduplicated, url)
seen[url] = true
}
}
return
}
// addTypeAttribute adds the type attribute to http_archive rules if it is missing.
// it returns true if the rule was changed.
// it returns an error if the rule does not have enough information to add the type attribute.
func addTypeAttribute(rule *build.Rule) bool {
// only http_archive rules have a type attribute
if rule.Kind() != "http_archive" {
return false
}
if rule.Attr("type") != nil {
return false
}
// iterate over all URLs and check if they have a known archive type
var archiveType string
urlLoop:
for _, url := range GetURLs(rule) {
switch {
case strings.HasSuffix(url, ".aar"):
archiveType = "aar"
break urlLoop
case strings.HasSuffix(url, ".ar"):
archiveType = "ar"
break urlLoop
case strings.HasSuffix(url, ".deb"):
archiveType = "deb"
break urlLoop
case strings.HasSuffix(url, ".jar"):
archiveType = "jar"
break urlLoop
case strings.HasSuffix(url, ".tar.bz2"):
archiveType = "tar.bz2"
break urlLoop
case strings.HasSuffix(url, ".tar.gz"):
archiveType = "tar.gz"
break urlLoop
case strings.HasSuffix(url, ".tar.xz"):
archiveType = "tar.xz"
break urlLoop
case strings.HasSuffix(url, ".tar.zst"):
archiveType = "tar.zst"
break urlLoop
case strings.HasSuffix(url, ".tar"):
archiveType = "tar"
break urlLoop
case strings.HasSuffix(url, ".tgz"):
archiveType = "tgz"
break urlLoop
case strings.HasSuffix(url, ".txz"):
archiveType = "txz"
break urlLoop
case strings.HasSuffix(url, ".tzst"):
archiveType = "tzst"
break urlLoop
case strings.HasSuffix(url, ".war"):
archiveType = "war"
break urlLoop
case strings.HasSuffix(url, ".zip"):
archiveType = "zip"
break urlLoop
}
}
if archiveType == "" {
return false
}
rule.SetAttr("type", &build.StringExpr{Value: archiveType})
return true
}
// mirrorURL returns the first mirror URL for a rule.
func mirrorURL(rule *build.Rule) (string, error) {
urls := GetURLs(rule)
for _, url := range urls {
if strings.HasPrefix(url, edgelessMirrorPrefix) {
return url, nil
}
}
return "", fmt.Errorf("rule %s has no mirror url", rule.Name())
}
func setURLs(rule *build.Rule, urls []string) {
// delete single url attribute if it exists
rule.DelAttr("url")
urlsAttr := []build.Expr{}
for _, url := range urls {
urlsAttr = append(urlsAttr, &build.StringExpr{Value: url})
}
rule.SetAttr("urls", &build.ListExpr{List: urlsAttr, ForceMultiLine: true})
}
func sortURLs(urls []string) {
// Bazel mirror should be first
// edgeless mirror should be second
// other urls should be last
// if there are multiple urls from the same mirror, they should be sorted alphabetically
sort.Slice(urls, func(i, j int) bool {
rank := func(url string) int {
if strings.HasPrefix(url, bazelMirrorPrefix) {
return 0
}
if strings.HasPrefix(url, edgelessMirrorPrefix) {
return 1
}
return 2
}
if rank(urls[i]) != rank(urls[j]) {
return rank(urls[i]) < rank(urls[j])
}
return urls[i] < urls[j]
})
}
// SupportedRules is a list of all rules that can be mirrored.
var SupportedRules = []string{
"http_archive",
"http_file",
"rpm",
}
const (
bazelMirrorPrefix = "https://mirror.bazel.build/"
edgelessMirrorPrefix = "https://cdn.confidential.cloud/constellation/cas/sha256/"
)

View File

@ -0,0 +1,450 @@
/*
Copyright (c) Edgeless Systems GmbH
SPDX-License-Identifier: AGPL-3.0-only
*/
package rules
import (
"testing"
"github.com/bazelbuild/buildtools/build"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/goleak"
)
func TestMain(m *testing.M) {
goleak.VerifyTestMain(m)
}
func TestRules(t *testing.T) {
assert := assert.New(t)
const bzlFileContents = `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
load("@bazeldnf//:deps.bzl", "rpm")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
)
http_file(
name = "bar_file",
sha256 = "fcde2b2edba56bf408601fb721fe9b5c338d10ee429ea04fae5511b68fbf8fb9",
urls = ["https://example.com/bar"],
)
rpm(
name = "baz_rpm",
sha256 = "9e7ab438597fee20e16e8e441bed0ce966bd59e0fb993fa7c94be31fb1384d88",
urls = ["https://example.com/baz.rpm"],
)
git_repository(
name = "qux_git",
remote = "https://example.com/qux.git",
commit = "1234567890abcdef",
)
`
bf, err := build.Parse("foo.bzl", []byte(bzlFileContents))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
assert.Len(rules, 3)
expectedNames := []string{"foo_archive", "bar_file", "baz_rpm"}
for i, rule := range rules {
assert.Equal(expectedNames[i], rule.Name())
}
allRules := Rules(bf, nil)
assert.Len(allRules, 4)
expectedNames = []string{"foo_archive", "bar_file", "baz_rpm", "qux_git"}
for i, rule := range allRules {
assert.Equal(expectedNames[i], rule.Name())
}
}
func TestValidatePinned(t *testing.T) {
testCases := map[string]struct {
rule string
expectedIssueCount int
}{
"no issues, singular url": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 0,
},
"no issues, url list": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
)
`,
expectedIssueCount: 0,
},
"no issues, url list with multiple urls": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz", "https://example.com/foo2.tar.gz"],
)
`,
expectedIssueCount: 0,
},
"missing name": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing sha256 attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing url attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
`,
expectedIssueCount: 1,
},
"url and urls attribute given": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://example.com/foo.tar.gz",
urls = ["https://example.com/foo.tar.gz"],
)
`,
expectedIssueCount: 1,
},
"empty url attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "",
)
`,
expectedIssueCount: 1,
},
"empty urls attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [],
)
`,
expectedIssueCount: 1,
},
"empty url in urls attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [""],
)
`,
expectedIssueCount: 1,
},
"empty sha256 attribute": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "",
url = "https://example.com/foo.tar.gz",
)
`,
expectedIssueCount: 1,
},
"missing all attributes": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
)
`,
expectedIssueCount: 2,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
bf, err := build.Parse("foo.bzl", []byte(tc.rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
issues := ValidatePinned(rules[0])
if tc.expectedIssueCount == 0 {
assert.Nil(issues)
return
}
assert.Len(issues, tc.expectedIssueCount)
})
}
}
func TestCheckNormalize(t *testing.T) {
testCases := map[string]struct {
rule string
expectedIssueCount int
cannotFix bool
}{
"rule with single url": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
url = "https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
type = "tar.gz",
)
`,
expectedIssueCount: 1,
},
"rule with unsorted urls": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://example.com/a/foo.tar.gz",
"https://example.com/b/foo.tar.gz",
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://mirror.bazel.build/example.com/a/foo.tar.gz",
],
type = "tar.gz",
)
`,
expectedIssueCount: 1,
},
"rule that is not mirrored": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://example.com/foo.tar.gz"],
type = "tar.gz",
)
`,
expectedIssueCount: 1,
cannotFix: true,
},
"http_archive with no type": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://example.com/foo.tar.gz",
],
)
`,
expectedIssueCount: 1,
},
"rpm rule with urls that are not the mirror": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
rpm(
name = "foo_rpm",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = [
"https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
"https://example.com/foo.rpm",
],
)
`,
expectedIssueCount: 1,
},
"http_archive rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
type = "tar.gz",
)
`,
expectedIssueCount: 0,
},
"rpm rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
rpm(
name = "foo_rpm",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
)
`,
expectedIssueCount: 0,
},
"http_file rule that is correct": {
rule: `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file")
http_file(
name = "foo_file",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
urls = ["https://cdn.confidential.cloud/constellation/cas/sha256/2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae"],
)
`,
expectedIssueCount: 0,
},
}
for name, tc := range testCases {
t.Run(name, func(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
bf, err := build.Parse("foo.bzl", []byte(tc.rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
issues := Check(rules[0])
if tc.expectedIssueCount == 0 {
assert.Nil(issues)
return
}
assert.Equal(len(issues), tc.expectedIssueCount)
changed := Normalize(rules[0])
if tc.expectedIssueCount > 0 && !tc.cannotFix {
assert.True(changed)
} else {
assert.False(changed)
}
if tc.cannotFix {
assert.NotNil(Check(rules[0]))
} else {
assert.Nil(Check(rules[0]))
}
})
}
}
func TestAddURLs(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
rule := `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
`
bf, err := build.Parse("foo.bzl", []byte(rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 1)
AddURLs(rules[0], []string{"https://example.com/a", "https://example.com/b"})
assert.Equal([]string{"https://example.com/a", "https://example.com/b"}, GetURLs(rules[0]))
}
func TestGetHash(t *testing.T) {
assert := assert.New(t)
require := require.New(t)
rule := `
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
http_archive(
name = "foo_archive",
sha256 = "2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
)
http_archive(
name = "bar_archive",
)
`
bf, err := build.Parse("foo.bzl", []byte(rule))
if err != nil {
t.Fatal(err)
}
rules := Rules(bf, SupportedRules)
require.Len(rules, 2)
hash, err := GetHash(rules[0])
assert.NoError(err)
assert.Equal("2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae", hash)
_, err = GetHash(rules[1])
assert.Error(err)
}

View File

@ -37,11 +37,19 @@ replace (
) )
require ( require (
github.com/aws/aws-sdk-go-v2/config v1.18.19
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59
github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e
github.com/edgelesssys/constellation/v2 v2.6.0 github.com/edgelesssys/constellation/v2 v2.6.0
github.com/go-git/go-git/v5 v5.5.2 github.com/go-git/go-git/v5 v5.5.2
github.com/hexops/gotextdiff v1.0.3
github.com/spf13/afero v1.9.5
github.com/spf13/cobra v1.6.1 github.com/spf13/cobra v1.6.1
github.com/stretchr/testify v1.8.2 github.com/stretchr/testify v1.8.2
go.uber.org/goleak v1.2.1
go.uber.org/zap v1.24.0 go.uber.org/zap v1.24.0
golang.org/x/exp v0.0.0-20220823124025-807a23277127
golang.org/x/mod v0.8.0 golang.org/x/mod v0.8.0
gopkg.in/square/go-jose.v2 v2.6.0 gopkg.in/square/go-jose.v2 v2.6.0
libvirt.org/go/libvirt v1.8010.0 libvirt.org/go/libvirt v1.8010.0
@ -80,10 +88,8 @@ require (
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d // indirect
github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect
github.com/aws/aws-sdk-go-v2/config v1.18.19 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 // indirect github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.59 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 // indirect github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.31 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 // indirect github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.25 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 // indirect github.com/aws/aws-sdk-go-v2/internal/ini v1.3.32 // indirect
@ -94,7 +100,6 @@ require (
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.26 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 // indirect github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.25 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.14.0 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.31.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 // indirect github.com/aws/aws-sdk-go-v2/service/sso v1.12.6 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 // indirect github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.6 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 // indirect github.com/aws/aws-sdk-go-v2/service/sts v1.18.7 // indirect
@ -239,14 +244,13 @@ require (
github.com/sassoftware/relic v0.0.0-20210427151427-dfb082b79b74 // indirect github.com/sassoftware/relic v0.0.0-20210427151427-dfb082b79b74 // indirect
github.com/schollz/progressbar/v3 v3.13.1 // indirect github.com/schollz/progressbar/v3 v3.13.1 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.5.0 // indirect github.com/secure-systems-lab/go-securesystemslib v0.5.0 // indirect
github.com/sergi/go-diff v1.2.0 // indirect github.com/sergi/go-diff v1.3.1 // indirect
github.com/shopspring/decimal v1.3.1 // indirect github.com/shopspring/decimal v1.3.1 // indirect
github.com/siderolabs/talos/pkg/machinery v1.3.2 // indirect github.com/siderolabs/talos/pkg/machinery v1.3.2 // indirect
github.com/sigstore/rekor v1.0.1 // indirect github.com/sigstore/rekor v1.0.1 // indirect
github.com/sigstore/sigstore v1.6.0 // indirect github.com/sigstore/sigstore v1.6.0 // indirect
github.com/sirupsen/logrus v1.9.0 // indirect github.com/sirupsen/logrus v1.9.0 // indirect
github.com/skeema/knownhosts v1.1.0 // indirect github.com/skeema/knownhosts v1.1.0 // indirect
github.com/spf13/afero v1.9.5 // indirect
github.com/spf13/cast v1.5.0 // indirect github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/pflag v1.0.5 // indirect
github.com/tent/canonical-json-go v0.0.0-20130607151641-96e4ba3a7613 // indirect github.com/tent/canonical-json-go v0.0.0-20130607151641-96e4ba3a7613 // indirect
@ -265,7 +269,6 @@ require (
go.uber.org/atomic v1.10.0 // indirect go.uber.org/atomic v1.10.0 // indirect
go.uber.org/multierr v1.9.0 // indirect go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.6.0 // indirect golang.org/x/crypto v0.6.0 // indirect
golang.org/x/exp v0.0.0-20220823124025-807a23277127 // indirect
golang.org/x/net v0.8.0 // indirect golang.org/x/net v0.8.0 // indirect
golang.org/x/oauth2 v0.6.0 // indirect golang.org/x/oauth2 v0.6.0 // indirect
golang.org/x/sync v0.1.0 // indirect golang.org/x/sync v0.1.0 // indirect

View File

@ -257,6 +257,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.18.7/go.mod h1:JuTnSoeePXmMVe9G8Ncjj
github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8= github.com/aws/smithy-go v1.13.5 h1:hgz0X/DX0dGqTYpGALqXJoRKRj5oQ7150i5FdTePzO8=
github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA= github.com/aws/smithy-go v1.13.5/go.mod h1:Tg+OJXh4MB2R/uN61Ko2f6hTZwB/ZYGOtib8J3gBHzA=
github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I= github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod h1:q/89r3U2H7sSsE2t6Kca0lfwTK8JdoNGS/yzM/4iH5I=
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e h1:XmPu4mXICgdGnC5dXGjUGbwUD/kUmS0l5Aop3LaevBM=
github.com/bazelbuild/buildtools v0.0.0-20230317132445-9c3c1fc0106e/go.mod h1:689QdV3hBP7Vo9dJMmzhoYIyo/9iMhEmHkJcnaPRCbo=
github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A= github.com/beevik/etree v1.1.0/go.mod h1:r8Aw8JqVegEf0w2fDnATrX9VpkMcyFeM0FhwO62wh+A=
github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM= github.com/benbjohnson/clock v1.0.3/go.mod h1:bGMdMPoPVvcYyt1gHDf4J2KE153Yf9BuiUKYMaxlTDM=
github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A=
@ -809,6 +811,8 @@ github.com/hashicorp/terraform-exec v0.17.3 h1:MX14Kvnka/oWGmIkyuyvL6POx25ZmKrjl
github.com/hashicorp/terraform-exec v0.17.3/go.mod h1:+NELG0EqQekJzhvikkeQsOAZpsw0cv/03rbeQJqscAI= github.com/hashicorp/terraform-exec v0.17.3/go.mod h1:+NELG0EqQekJzhvikkeQsOAZpsw0cv/03rbeQJqscAI=
github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s= github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s=
github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM= github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM=
github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM=
github.com/hexops/gotextdiff v1.0.3/go.mod h1:pSWU5MAI3yDq+fZBTazCSJysOMbxWL1BSow5/V2vxeg=
github.com/honeycombio/beeline-go v1.10.0 h1:cUDe555oqvw8oD76BQJ8alk7FP0JZ/M/zXpNvOEDLDc= github.com/honeycombio/beeline-go v1.10.0 h1:cUDe555oqvw8oD76BQJ8alk7FP0JZ/M/zXpNvOEDLDc=
github.com/honeycombio/libhoney-go v1.16.0 h1:kPpqoz6vbOzgp7jC6SR7SkNj7rua7rgxvznI6M3KdHc= github.com/honeycombio/libhoney-go v1.16.0 h1:kPpqoz6vbOzgp7jC6SR7SkNj7rua7rgxvznI6M3KdHc=
github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs= github.com/howeyc/gopass v0.0.0-20190910152052-7cb4b85ec19c/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs=
@ -1208,8 +1212,9 @@ github.com/secure-systems-lab/go-securesystemslib v0.5.0 h1:oTiNu0QnulMQgN/hLK12
github.com/secure-systems-lab/go-securesystemslib v0.5.0/go.mod h1:uoCqUC0Ap7jrBSEanxT+SdACYJTVplRXWLkGMuDjXqk= github.com/secure-systems-lab/go-securesystemslib v0.5.0/go.mod h1:uoCqUC0Ap7jrBSEanxT+SdACYJTVplRXWLkGMuDjXqk=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/sergi/go-diff v1.3.1 h1:xkr+Oxo4BOQKmkn/B9eMK0g5Kg/983T9DqqPHwYqD+8=
github.com/sergi/go-diff v1.3.1/go.mod h1:aMJSSKb2lpPvRNec0+w3fl7LP9IOFzdc9Pa4NFbPK1I=
github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
@ -1424,6 +1429,7 @@ go.opentelemetry.io/otel/sdk/export/metric v0.20.0/go.mod h1:h7RBNMsDJ5pmI1zExLi
go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE= go.opentelemetry.io/otel/sdk/metric v0.20.0/go.mod h1:knxiS8Xd4E/N+ZqKmUPf3gTTZ4/0TjTXukfxjzSTpHE=
go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw= go.opentelemetry.io/otel/trace v0.20.0/go.mod h1:6GjCW8zgDjwGHGa6GkyeB8+/5vjT16gUEi0Nf1iBdgw=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI= go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
go.starlark.net v0.0.0-20210223155950-e043a3d3c984/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
go.starlark.net v0.0.0-20220223235035-243c74974e97 h1:ghIB+2LQvihWROIGpcAVPq/ce5O2uMQersgxXiOeTS4= go.starlark.net v0.0.0-20220223235035-243c74974e97 h1:ghIB+2LQvihWROIGpcAVPq/ce5O2uMQersgxXiOeTS4=
go.starlark.net v0.0.0-20220223235035-243c74974e97/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0= go.starlark.net v0.0.0-20220223235035-243c74974e97/go.mod h1:t3mmBBPzAVvK0L0n1drDmrQsJ8FoIx4INCqVMTr/Zo0=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@ -1435,6 +1441,7 @@ go.uber.org/atomic v1.10.0 h1:9qC72Qh0+3MqyJbAn8YU5xVq1frD8bn3JtD2oXtafVQ=
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A= go.uber.org/goleak v1.1.10/go.mod h1:8a7PlsEVH3e/a/GLqe5IIrQx6GzcnRmZEufDUTk4A7A=
go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A= go.uber.org/goleak v1.2.1 h1:NBol2c7O1ZokfZ0LEU9K6Whx/KnwvepVetCUhtKja4A=
go.uber.org/goleak v1.2.1/go.mod h1:qlT2yGI9QafXHhZZLxlSuNsMw3FFLxBr+tBRlmO1xH4=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.3.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=
go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4= go.uber.org/multierr v1.4.0/go.mod h1:VgVr7evmIr6uPjLBxg28wmKNXyqE9akIJ5XnfpiKl+4=

View File

@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1.5-labs # syntax=docker/dockerfile:1.5-labs
FROM alpine:3.17.2@sha256:e2e16842c9b54d985bf1ef9242a313f36b856181f188de21313820e177002501 as builder FROM alpine:3.17.3@sha256:b6ca290b6b4cdcca5b3db3ffa338ee0285c11744b4a6abaa9627746ee3291d8d as builder
# #
# Install dependencies # Install dependencies

View File

@ -1,5 +1,5 @@
# syntax=docker/dockerfile:1.5-labs # syntax=docker/dockerfile:1.5-labs
FROM alpine:3.17.2@sha256:e2e16842c9b54d985bf1ef9242a313f36b856181f188de21313820e177002501 as builder FROM alpine:3.17.3@sha256:b6ca290b6b4cdcca5b3db3ffa338ee0285c11744b4a6abaa9627746ee3291d8d as builder
ADD --checksum=sha256:11968a8b706095a081ac30168849b351b0263a6df5c224119aa914d7e5afb0c1 \ ADD --checksum=sha256:11968a8b706095a081ac30168849b351b0263a6df5c224119aa914d7e5afb0c1 \
https://github.com/reproducible-containers/repro-get/releases/download/v0.3.0/repro-get-v0.3.0.linux-amd64 \ https://github.com/reproducible-containers/repro-get/releases/download/v0.3.0/repro-get-v0.3.0.linux-amd64 \

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "joinproto_proto", name = "joinproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/joinservice/joinproto", importpath = "github.com/edgelesssys/constellation/v2/joinservice/joinproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "join.pb.go",
go_proto_library = ":joinproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: join.proto // source: joinservice/joinproto/join.proto
package joinproto package joinproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -25,18 +29,15 @@ type IssueJoinTicketRequest struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// disk_uuid is the UUID of a node's state disk. DiskUuid string `protobuf:"bytes,1,opt,name=disk_uuid,json=diskUuid,proto3" json:"disk_uuid,omitempty"`
DiskUuid string `protobuf:"bytes,1,opt,name=disk_uuid,json=diskUuid,proto3" json:"disk_uuid,omitempty"`
// certificate_request is a certificate request for the node's kubelet certificate.
CertificateRequest []byte `protobuf:"bytes,2,opt,name=certificate_request,json=certificateRequest,proto3" json:"certificate_request,omitempty"` CertificateRequest []byte `protobuf:"bytes,2,opt,name=certificate_request,json=certificateRequest,proto3" json:"certificate_request,omitempty"`
// is_control_plane indicates whether the node is a control-plane node. IsControlPlane bool `protobuf:"varint,3,opt,name=is_control_plane,json=isControlPlane,proto3" json:"is_control_plane,omitempty"`
IsControlPlane bool `protobuf:"varint,3,opt,name=is_control_plane,json=isControlPlane,proto3" json:"is_control_plane,omitempty"`
} }
func (x *IssueJoinTicketRequest) Reset() { func (x *IssueJoinTicketRequest) Reset() {
*x = IssueJoinTicketRequest{} *x = IssueJoinTicketRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[0] mi := &file_joinservice_joinproto_join_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -49,7 +50,7 @@ func (x *IssueJoinTicketRequest) String() string {
func (*IssueJoinTicketRequest) ProtoMessage() {} func (*IssueJoinTicketRequest) ProtoMessage() {}
func (x *IssueJoinTicketRequest) ProtoReflect() protoreflect.Message { func (x *IssueJoinTicketRequest) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[0] mi := &file_joinservice_joinproto_join_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -62,7 +63,7 @@ func (x *IssueJoinTicketRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use IssueJoinTicketRequest.ProtoReflect.Descriptor instead. // Deprecated: Use IssueJoinTicketRequest.ProtoReflect.Descriptor instead.
func (*IssueJoinTicketRequest) Descriptor() ([]byte, []int) { func (*IssueJoinTicketRequest) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{0} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{0}
} }
func (x *IssueJoinTicketRequest) GetDiskUuid() string { func (x *IssueJoinTicketRequest) GetDiskUuid() string {
@ -91,34 +92,22 @@ type IssueJoinTicketResponse struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// state_disk_key is the key used to encrypt the state disk. StateDiskKey []byte `protobuf:"bytes,1,opt,name=state_disk_key,json=stateDiskKey,proto3" json:"state_disk_key,omitempty"`
StateDiskKey []byte `protobuf:"bytes,1,opt,name=state_disk_key,json=stateDiskKey,proto3" json:"state_disk_key,omitempty"` MeasurementSalt []byte `protobuf:"bytes,2,opt,name=measurement_salt,json=measurementSalt,proto3" json:"measurement_salt,omitempty"`
// measurement_salt is a salt used to derive the node's ClusterID. MeasurementSecret []byte `protobuf:"bytes,3,opt,name=measurement_secret,json=measurementSecret,proto3" json:"measurement_secret,omitempty"`
// This value is persisted on the state disk. KubeletCert []byte `protobuf:"bytes,4,opt,name=kubelet_cert,json=kubeletCert,proto3" json:"kubelet_cert,omitempty"`
MeasurementSalt []byte `protobuf:"bytes,2,opt,name=measurement_salt,json=measurementSalt,proto3" json:"measurement_salt,omitempty"` ApiServerEndpoint string `protobuf:"bytes,5,opt,name=api_server_endpoint,json=apiServerEndpoint,proto3" json:"api_server_endpoint,omitempty"`
// measurement_secret is a secret used to derive the node's ClusterID. Token string `protobuf:"bytes,6,opt,name=token,proto3" json:"token,omitempty"`
// This value is NOT persisted on the state disk. DiscoveryTokenCaCertHash string `protobuf:"bytes,7,opt,name=discovery_token_ca_cert_hash,json=discoveryTokenCaCertHash,proto3" json:"discovery_token_ca_cert_hash,omitempty"`
MeasurementSecret []byte `protobuf:"bytes,3,opt,name=measurement_secret,json=measurementSecret,proto3" json:"measurement_secret,omitempty"` ControlPlaneFiles []*ControlPlaneCertOrKey `protobuf:"bytes,8,rep,name=control_plane_files,json=controlPlaneFiles,proto3" json:"control_plane_files,omitempty"`
// kubelet_cert is the certificate to be used by the kubelet. KubernetesVersion string `protobuf:"bytes,9,opt,name=kubernetes_version,json=kubernetesVersion,proto3" json:"kubernetes_version,omitempty"`
KubeletCert []byte `protobuf:"bytes,4,opt,name=kubelet_cert,json=kubeletCert,proto3" json:"kubelet_cert,omitempty"` KubernetesComponents []*KubernetesComponent `protobuf:"bytes,10,rep,name=kubernetes_components,json=kubernetesComponents,proto3" json:"kubernetes_components,omitempty"`
// api_server_endpoint is the endpoint of Constellation's API server.
ApiServerEndpoint string `protobuf:"bytes,5,opt,name=api_server_endpoint,json=apiServerEndpoint,proto3" json:"api_server_endpoint,omitempty"`
// token is the Kubernetes Join Token to be used by the node to join the cluster.
Token string `protobuf:"bytes,6,opt,name=token,proto3" json:"token,omitempty"`
// discovery_token_ca_cert_hash is a hash of the root certificate authority presented by the Kubernetes control-plane.
DiscoveryTokenCaCertHash string `protobuf:"bytes,7,opt,name=discovery_token_ca_cert_hash,json=discoveryTokenCaCertHash,proto3" json:"discovery_token_ca_cert_hash,omitempty"`
// control_plane_files is a list of control-plane certificates and keys.
ControlPlaneFiles []*ControlPlaneCertOrKey `protobuf:"bytes,8,rep,name=control_plane_files,json=controlPlaneFiles,proto3" json:"control_plane_files,omitempty"`
// kubernetes_version is the Kubernetes version to install on the node.
KubernetesVersion string `protobuf:"bytes,9,opt,name=kubernetes_version,json=kubernetesVersion,proto3" json:"kubernetes_version,omitempty"`
// kubernetes_components is a list of components to install on the node.
KubernetesComponents []*KubernetesComponent `protobuf:"bytes,10,rep,name=kubernetes_components,json=kubernetesComponents,proto3" json:"kubernetes_components,omitempty"`
} }
func (x *IssueJoinTicketResponse) Reset() { func (x *IssueJoinTicketResponse) Reset() {
*x = IssueJoinTicketResponse{} *x = IssueJoinTicketResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[1] mi := &file_joinservice_joinproto_join_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -131,7 +120,7 @@ func (x *IssueJoinTicketResponse) String() string {
func (*IssueJoinTicketResponse) ProtoMessage() {} func (*IssueJoinTicketResponse) ProtoMessage() {}
func (x *IssueJoinTicketResponse) ProtoReflect() protoreflect.Message { func (x *IssueJoinTicketResponse) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[1] mi := &file_joinservice_joinproto_join_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -144,7 +133,7 @@ func (x *IssueJoinTicketResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use IssueJoinTicketResponse.ProtoReflect.Descriptor instead. // Deprecated: Use IssueJoinTicketResponse.ProtoReflect.Descriptor instead.
func (*IssueJoinTicketResponse) Descriptor() ([]byte, []int) { func (*IssueJoinTicketResponse) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{1} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{1}
} }
func (x *IssueJoinTicketResponse) GetStateDiskKey() []byte { func (x *IssueJoinTicketResponse) GetStateDiskKey() []byte {
@ -222,16 +211,14 @@ type ControlPlaneCertOrKey struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// name of the certificate or key.
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"` Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// data of the certificate or key.
Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"` Data []byte `protobuf:"bytes,2,opt,name=data,proto3" json:"data,omitempty"`
} }
func (x *ControlPlaneCertOrKey) Reset() { func (x *ControlPlaneCertOrKey) Reset() {
*x = ControlPlaneCertOrKey{} *x = ControlPlaneCertOrKey{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[2] mi := &file_joinservice_joinproto_join_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -244,7 +231,7 @@ func (x *ControlPlaneCertOrKey) String() string {
func (*ControlPlaneCertOrKey) ProtoMessage() {} func (*ControlPlaneCertOrKey) ProtoMessage() {}
func (x *ControlPlaneCertOrKey) ProtoReflect() protoreflect.Message { func (x *ControlPlaneCertOrKey) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[2] mi := &file_joinservice_joinproto_join_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -257,7 +244,7 @@ func (x *ControlPlaneCertOrKey) ProtoReflect() protoreflect.Message {
// Deprecated: Use ControlPlaneCertOrKey.ProtoReflect.Descriptor instead. // Deprecated: Use ControlPlaneCertOrKey.ProtoReflect.Descriptor instead.
func (*ControlPlaneCertOrKey) Descriptor() ([]byte, []int) { func (*ControlPlaneCertOrKey) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{2} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{2}
} }
func (x *ControlPlaneCertOrKey) GetName() string { func (x *ControlPlaneCertOrKey) GetName() string {
@ -279,14 +266,13 @@ type IssueRejoinTicketRequest struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// disk_uuid is the UUID of a node's state disk.
DiskUuid string `protobuf:"bytes,1,opt,name=disk_uuid,json=diskUuid,proto3" json:"disk_uuid,omitempty"` DiskUuid string `protobuf:"bytes,1,opt,name=disk_uuid,json=diskUuid,proto3" json:"disk_uuid,omitempty"`
} }
func (x *IssueRejoinTicketRequest) Reset() { func (x *IssueRejoinTicketRequest) Reset() {
*x = IssueRejoinTicketRequest{} *x = IssueRejoinTicketRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[3] mi := &file_joinservice_joinproto_join_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -299,7 +285,7 @@ func (x *IssueRejoinTicketRequest) String() string {
func (*IssueRejoinTicketRequest) ProtoMessage() {} func (*IssueRejoinTicketRequest) ProtoMessage() {}
func (x *IssueRejoinTicketRequest) ProtoReflect() protoreflect.Message { func (x *IssueRejoinTicketRequest) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[3] mi := &file_joinservice_joinproto_join_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -312,7 +298,7 @@ func (x *IssueRejoinTicketRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use IssueRejoinTicketRequest.ProtoReflect.Descriptor instead. // Deprecated: Use IssueRejoinTicketRequest.ProtoReflect.Descriptor instead.
func (*IssueRejoinTicketRequest) Descriptor() ([]byte, []int) { func (*IssueRejoinTicketRequest) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{3} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{3}
} }
func (x *IssueRejoinTicketRequest) GetDiskUuid() string { func (x *IssueRejoinTicketRequest) GetDiskUuid() string {
@ -327,17 +313,14 @@ type IssueRejoinTicketResponse struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// state_disk_key is the key to decrypt the state disk. StateDiskKey []byte `protobuf:"bytes,1,opt,name=state_disk_key,json=stateDiskKey,proto3" json:"state_disk_key,omitempty"`
StateDiskKey []byte `protobuf:"bytes,1,opt,name=state_disk_key,json=stateDiskKey,proto3" json:"state_disk_key,omitempty"`
// measurement_secret is a secret used to derive the node's ClusterID.
// This value is NOT persisted on the state disk.
MeasurementSecret []byte `protobuf:"bytes,2,opt,name=measurement_secret,json=measurementSecret,proto3" json:"measurement_secret,omitempty"` MeasurementSecret []byte `protobuf:"bytes,2,opt,name=measurement_secret,json=measurementSecret,proto3" json:"measurement_secret,omitempty"`
} }
func (x *IssueRejoinTicketResponse) Reset() { func (x *IssueRejoinTicketResponse) Reset() {
*x = IssueRejoinTicketResponse{} *x = IssueRejoinTicketResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[4] mi := &file_joinservice_joinproto_join_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -350,7 +333,7 @@ func (x *IssueRejoinTicketResponse) String() string {
func (*IssueRejoinTicketResponse) ProtoMessage() {} func (*IssueRejoinTicketResponse) ProtoMessage() {}
func (x *IssueRejoinTicketResponse) ProtoReflect() protoreflect.Message { func (x *IssueRejoinTicketResponse) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[4] mi := &file_joinservice_joinproto_join_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -363,7 +346,7 @@ func (x *IssueRejoinTicketResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use IssueRejoinTicketResponse.ProtoReflect.Descriptor instead. // Deprecated: Use IssueRejoinTicketResponse.ProtoReflect.Descriptor instead.
func (*IssueRejoinTicketResponse) Descriptor() ([]byte, []int) { func (*IssueRejoinTicketResponse) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{4} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{4}
} }
func (x *IssueRejoinTicketResponse) GetStateDiskKey() []byte { func (x *IssueRejoinTicketResponse) GetStateDiskKey() []byte {
@ -380,26 +363,21 @@ func (x *IssueRejoinTicketResponse) GetMeasurementSecret() []byte {
return nil return nil
} }
// Discuss if we want to import the init proto instead of duplicating it
type KubernetesComponent struct { type KubernetesComponent struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// url to download the component from. Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"`
Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
// hash of the component.
Hash string `protobuf:"bytes,2,opt,name=hash,proto3" json:"hash,omitempty"`
// install_path is the path to install the component to.
InstallPath string `protobuf:"bytes,3,opt,name=install_path,json=installPath,proto3" json:"install_path,omitempty"` InstallPath string `protobuf:"bytes,3,opt,name=install_path,json=installPath,proto3" json:"install_path,omitempty"`
// extract indicates whether the component is an archive and needs to be extracted. Extract bool `protobuf:"varint,4,opt,name=extract,proto3" json:"extract,omitempty"`
Extract bool `protobuf:"varint,4,opt,name=extract,proto3" json:"extract,omitempty"`
} }
func (x *KubernetesComponent) Reset() { func (x *KubernetesComponent) Reset() {
*x = KubernetesComponent{} *x = KubernetesComponent{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_join_proto_msgTypes[5] mi := &file_joinservice_joinproto_join_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -412,7 +390,7 @@ func (x *KubernetesComponent) String() string {
func (*KubernetesComponent) ProtoMessage() {} func (*KubernetesComponent) ProtoMessage() {}
func (x *KubernetesComponent) ProtoReflect() protoreflect.Message { func (x *KubernetesComponent) ProtoReflect() protoreflect.Message {
mi := &file_join_proto_msgTypes[5] mi := &file_joinservice_joinproto_join_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -425,7 +403,7 @@ func (x *KubernetesComponent) ProtoReflect() protoreflect.Message {
// Deprecated: Use KubernetesComponent.ProtoReflect.Descriptor instead. // Deprecated: Use KubernetesComponent.ProtoReflect.Descriptor instead.
func (*KubernetesComponent) Descriptor() ([]byte, []int) { func (*KubernetesComponent) Descriptor() ([]byte, []int) {
return file_join_proto_rawDescGZIP(), []int{5} return file_joinservice_joinproto_join_proto_rawDescGZIP(), []int{5}
} }
func (x *KubernetesComponent) GetUrl() string { func (x *KubernetesComponent) GetUrl() string {
@ -456,107 +434,109 @@ func (x *KubernetesComponent) GetExtract() bool {
return false return false
} }
var File_join_proto protoreflect.FileDescriptor var File_joinservice_joinproto_join_proto protoreflect.FileDescriptor
var file_join_proto_rawDesc = []byte{ var file_joinservice_joinproto_join_proto_rawDesc = []byte{
0x0a, 0x0a, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x04, 0x6a, 0x6f, 0x0a, 0x20, 0x6a, 0x6f, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x6a, 0x6f,
0x69, 0x6e, 0x22, 0x90, 0x01, 0x0a, 0x16, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x69, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x70, 0x72, 0x6f,
0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x74, 0x6f, 0x12, 0x04, 0x6a, 0x6f, 0x69, 0x6e, 0x22, 0x90, 0x01, 0x0a, 0x16, 0x49, 0x73, 0x73,
0x09, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75,
0x52, 0x08, 0x64, 0x69, 0x73, 0x6b, 0x55, 0x75, 0x69, 0x64, 0x12, 0x2f, 0x0a, 0x13, 0x63, 0x65, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x75, 0x75, 0x69, 0x64,
0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x5f, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x69, 0x73, 0x6b, 0x55, 0x75, 0x69, 0x64,
0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x12, 0x2f, 0x0a, 0x13, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x5f,
0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x69, 0x72, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x12, 0x63,
0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x18, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x69, 0x73, 0x43, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x74, 0x12, 0x28, 0x0a, 0x10, 0x69, 0x73, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f,
0x50, 0x6c, 0x61, 0x6e, 0x65, 0x22, 0x92, 0x04, 0x0a, 0x17, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x0e, 0x69, 0x73, 0x43,
0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x22, 0x92, 0x04, 0x0a, 0x17,
0x65, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52,
0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x65,
0x44, 0x69, 0x73, 0x6b, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x10, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x5f, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52,
0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x61, 0x6c, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x44, 0x69, 0x73, 0x6b, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a,
0x0c, 0x52, 0x0f, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x61, 0x10, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x61, 0x6c,
0x6c, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0f, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65,
0x74, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x61, 0x6c, 0x74, 0x12, 0x2d, 0x0a, 0x12, 0x6d, 0x65, 0x61, 0x73,
0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x63, 0x72, 0x65, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x03,
0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6b, 0x75, 0x62, 0x65, 0x6c, 0x65, 0x74, 0x5f, 0x63, 0x65, 0x72, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e,
0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x6b, 0x75, 0x62, 0x65, 0x6c, 0x65, 0x74, 0x74, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x12, 0x21, 0x0a, 0x0c, 0x6b, 0x75, 0x62, 0x65, 0x6c,
0x43, 0x65, 0x72, 0x74, 0x12, 0x2e, 0x0a, 0x13, 0x61, 0x70, 0x69, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x74, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x6b,
0x65, 0x72, 0x5f, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x75, 0x62, 0x65, 0x6c, 0x65, 0x74, 0x43, 0x65, 0x72, 0x74, 0x12, 0x2e, 0x0a, 0x13, 0x61, 0x70,
0x09, 0x52, 0x11, 0x61, 0x70, 0x69, 0x53, 0x65, 0x72, 0x76, 0x65, 0x72, 0x45, 0x6e, 0x64, 0x70, 0x69, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x65, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e,
0x6f, 0x69, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x06, 0x20, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x61, 0x70, 0x69, 0x53, 0x65, 0x72, 0x76,
0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x3e, 0x0a, 0x1c, 0x64, 0x69, 0x65, 0x72, 0x45, 0x6e, 0x64, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x74, 0x6f,
0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x5f, 0x63, 0x61, 0x6b, 0x65, 0x6e, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x74, 0x6f, 0x6b, 0x65, 0x6e,
0x5f, 0x63, 0x65, 0x72, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x12, 0x3e, 0x0a, 0x1c, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x5f, 0x74, 0x6f,
0x52, 0x18, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x6b, 0x65, 0x6e, 0x5f, 0x63, 0x61, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68,
0x43, 0x61, 0x43, 0x65, 0x72, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x4f, 0x0a, 0x13, 0x63, 0x6f, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x52, 0x18, 0x64, 0x69, 0x73, 0x63, 0x6f, 0x76, 0x65, 0x72,
0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x61, 0x43, 0x65, 0x72, 0x74, 0x48, 0x61, 0x73, 0x68,
0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x63, 0x12, 0x4f, 0x0a, 0x13, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e,
0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x5f, 0x63, 0x65, 0x72, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1f, 0x2e,
0x74, 0x5f, 0x6f, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x52, 0x11, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61,
0x6c, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x6b, 0x6e, 0x65, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x5f, 0x6f, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x52, 0x11,
0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x50, 0x6c, 0x61, 0x6e, 0x65, 0x46, 0x69, 0x6c, 0x65,
0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x73, 0x12, 0x2d, 0x0a, 0x12, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f,
0x74, 0x65, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x4e, 0x0a, 0x15, 0x6b, 0x75, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x09, 0x20, 0x01, 0x28, 0x09, 0x52, 0x11, 0x6b,
0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e,
0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x12, 0x4e, 0x0a, 0x15, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x63,
0x2e, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32,
0x6e, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x19, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65,
0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73, 0x22, 0x43, 0x0a, 0x19, 0x63, 0x6f, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x52, 0x14, 0x6b, 0x75, 0x62, 0x65,
0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e, 0x65, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x73,
0x5f, 0x6f, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x22, 0x43, 0x0a, 0x19, 0x63, 0x6f, 0x6e, 0x74, 0x72, 0x6f, 0x6c, 0x5f, 0x70, 0x6c, 0x61, 0x6e,
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x65, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x5f, 0x6f, 0x72, 0x5f, 0x6b, 0x65, 0x79, 0x12, 0x12, 0x0a,
0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d,
0x37, 0x0a, 0x18, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52,
0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x37, 0x0a, 0x18, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65,
0x69, 0x73, 0x6b, 0x5f, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08,
0x64, 0x69, 0x73, 0x6b, 0x55, 0x75, 0x69, 0x64, 0x22, 0x70, 0x0a, 0x19, 0x49, 0x73, 0x73, 0x75,
0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73,
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x73, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x64,
0x69, 0x73, 0x6b, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x73,
0x74, 0x61, 0x74, 0x65, 0x44, 0x69, 0x73, 0x6b, 0x4b, 0x65, 0x79, 0x12, 0x2d, 0x0a, 0x12, 0x6d,
0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x5f, 0x73, 0x65, 0x63, 0x72, 0x65,
0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65,
0x6d, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74, 0x22, 0x78, 0x0a, 0x13, 0x4b, 0x75,
0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e,
0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28,
0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a, 0x0c, 0x69, 0x6e, 0x73, 0x74, 0x61,
0x6c, 0x6c, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x69,
0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68, 0x12, 0x18, 0x0a, 0x07, 0x65, 0x78,
0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x65, 0x78, 0x74,
0x72, 0x61, 0x63, 0x74, 0x32, 0xab, 0x01, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x4e, 0x0a, 0x0f,
0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x12,
0x1c, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e,
0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e,
0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69,
0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x11,
0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65,
0x74, 0x12, 0x1e, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65,
0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
0x74, 0x1a, 0x1f, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x75, 0x75, 0x69, 0x64, 0x18, 0x01,
0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x64, 0x69, 0x73, 0x6b, 0x55, 0x75, 0x69, 0x64, 0x22, 0x70,
0x73, 0x65, 0x42, 0x3f, 0x5a, 0x3d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x0a, 0x19, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63,
0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x24, 0x0a, 0x0e, 0x73,
0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x6a, 0x6f, 0x74, 0x61, 0x74, 0x65, 0x5f, 0x64, 0x69, 0x73, 0x6b, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20,
0x69, 0x6e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x6a, 0x6f, 0x69, 0x6e, 0x70, 0x72, 0x01, 0x28, 0x0c, 0x52, 0x0c, 0x73, 0x74, 0x61, 0x74, 0x65, 0x44, 0x69, 0x73, 0x6b, 0x4b, 0x65,
0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, 0x79, 0x12, 0x2d, 0x0a, 0x12, 0x6d, 0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74,
0x5f, 0x73, 0x65, 0x63, 0x72, 0x65, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x6d,
0x65, 0x61, 0x73, 0x75, 0x72, 0x65, 0x6d, 0x65, 0x6e, 0x74, 0x53, 0x65, 0x63, 0x72, 0x65, 0x74,
0x22, 0x78, 0x0a, 0x13, 0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x43, 0x6f,
0x6d, 0x70, 0x6f, 0x6e, 0x65, 0x6e, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01,
0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x61, 0x73,
0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x61, 0x73, 0x68, 0x12, 0x21, 0x0a,
0x0c, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x5f, 0x70, 0x61, 0x74, 0x68, 0x18, 0x03, 0x20,
0x01, 0x28, 0x09, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x6c, 0x6c, 0x50, 0x61, 0x74, 0x68,
0x12, 0x18, 0x0a, 0x07, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28,
0x08, 0x52, 0x07, 0x65, 0x78, 0x74, 0x72, 0x61, 0x63, 0x74, 0x32, 0xab, 0x01, 0x0a, 0x03, 0x41,
0x50, 0x49, 0x12, 0x4e, 0x0a, 0x0f, 0x49, 0x73, 0x73, 0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54,
0x69, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x1c, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73,
0x75, 0x65, 0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x71, 0x75,
0x65, 0x73, 0x74, 0x1a, 0x1d, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49, 0x73, 0x73, 0x75, 0x65,
0x4a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x73, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x49, 0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69,
0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74, 0x12, 0x1e, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49,
0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74,
0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, 0x6a, 0x6f, 0x69, 0x6e, 0x2e, 0x49,
0x73, 0x73, 0x75, 0x65, 0x52, 0x65, 0x6a, 0x6f, 0x69, 0x6e, 0x54, 0x69, 0x63, 0x6b, 0x65, 0x74,
0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x3f, 0x5a, 0x3d, 0x67, 0x69, 0x74, 0x68,
0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73,
0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e,
0x2f, 0x76, 0x32, 0x2f, 0x6a, 0x6f, 0x69, 0x6e, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f,
0x6a, 0x6f, 0x69, 0x6e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x33,
} }
var ( var (
file_join_proto_rawDescOnce sync.Once file_joinservice_joinproto_join_proto_rawDescOnce sync.Once
file_join_proto_rawDescData = file_join_proto_rawDesc file_joinservice_joinproto_join_proto_rawDescData = file_joinservice_joinproto_join_proto_rawDesc
) )
func file_join_proto_rawDescGZIP() []byte { func file_joinservice_joinproto_join_proto_rawDescGZIP() []byte {
file_join_proto_rawDescOnce.Do(func() { file_joinservice_joinproto_join_proto_rawDescOnce.Do(func() {
file_join_proto_rawDescData = protoimpl.X.CompressGZIP(file_join_proto_rawDescData) file_joinservice_joinproto_join_proto_rawDescData = protoimpl.X.CompressGZIP(file_joinservice_joinproto_join_proto_rawDescData)
}) })
return file_join_proto_rawDescData return file_joinservice_joinproto_join_proto_rawDescData
} }
var file_join_proto_msgTypes = make([]protoimpl.MessageInfo, 6) var file_joinservice_joinproto_join_proto_msgTypes = make([]protoimpl.MessageInfo, 6)
var file_join_proto_goTypes = []interface{}{ var file_joinservice_joinproto_join_proto_goTypes = []interface{}{
(*IssueJoinTicketRequest)(nil), // 0: join.IssueJoinTicketRequest (*IssueJoinTicketRequest)(nil), // 0: join.IssueJoinTicketRequest
(*IssueJoinTicketResponse)(nil), // 1: join.IssueJoinTicketResponse (*IssueJoinTicketResponse)(nil), // 1: join.IssueJoinTicketResponse
(*ControlPlaneCertOrKey)(nil), // 2: join.control_plane_cert_or_key (*ControlPlaneCertOrKey)(nil), // 2: join.control_plane_cert_or_key
@ -564,7 +544,7 @@ var file_join_proto_goTypes = []interface{}{
(*IssueRejoinTicketResponse)(nil), // 4: join.IssueRejoinTicketResponse (*IssueRejoinTicketResponse)(nil), // 4: join.IssueRejoinTicketResponse
(*KubernetesComponent)(nil), // 5: join.KubernetesComponent (*KubernetesComponent)(nil), // 5: join.KubernetesComponent
} }
var file_join_proto_depIdxs = []int32{ var file_joinservice_joinproto_join_proto_depIdxs = []int32{
2, // 0: join.IssueJoinTicketResponse.control_plane_files:type_name -> join.control_plane_cert_or_key 2, // 0: join.IssueJoinTicketResponse.control_plane_files:type_name -> join.control_plane_cert_or_key
5, // 1: join.IssueJoinTicketResponse.kubernetes_components:type_name -> join.KubernetesComponent 5, // 1: join.IssueJoinTicketResponse.kubernetes_components:type_name -> join.KubernetesComponent
0, // 2: join.API.IssueJoinTicket:input_type -> join.IssueJoinTicketRequest 0, // 2: join.API.IssueJoinTicket:input_type -> join.IssueJoinTicketRequest
@ -578,13 +558,13 @@ var file_join_proto_depIdxs = []int32{
0, // [0:2] is the sub-list for field type_name 0, // [0:2] is the sub-list for field type_name
} }
func init() { file_join_proto_init() } func init() { file_joinservice_joinproto_join_proto_init() }
func file_join_proto_init() { func file_joinservice_joinproto_join_proto_init() {
if File_join_proto != nil { if File_joinservice_joinproto_join_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_join_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IssueJoinTicketRequest); i { switch v := v.(*IssueJoinTicketRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -596,7 +576,7 @@ func file_join_proto_init() {
return nil return nil
} }
} }
file_join_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IssueJoinTicketResponse); i { switch v := v.(*IssueJoinTicketResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -608,7 +588,7 @@ func file_join_proto_init() {
return nil return nil
} }
} }
file_join_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ControlPlaneCertOrKey); i { switch v := v.(*ControlPlaneCertOrKey); i {
case 0: case 0:
return &v.state return &v.state
@ -620,7 +600,7 @@ func file_join_proto_init() {
return nil return nil
} }
} }
file_join_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IssueRejoinTicketRequest); i { switch v := v.(*IssueRejoinTicketRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -632,7 +612,7 @@ func file_join_proto_init() {
return nil return nil
} }
} }
file_join_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IssueRejoinTicketResponse); i { switch v := v.(*IssueRejoinTicketResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -644,7 +624,7 @@ func file_join_proto_init() {
return nil return nil
} }
} }
file_join_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { file_joinservice_joinproto_join_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*KubernetesComponent); i { switch v := v.(*KubernetesComponent); i {
case 0: case 0:
return &v.state return &v.state
@ -661,18 +641,134 @@ func file_join_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_join_proto_rawDesc, RawDescriptor: file_joinservice_joinproto_join_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 6, NumMessages: 6,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_join_proto_goTypes, GoTypes: file_joinservice_joinproto_join_proto_goTypes,
DependencyIndexes: file_join_proto_depIdxs, DependencyIndexes: file_joinservice_joinproto_join_proto_depIdxs,
MessageInfos: file_join_proto_msgTypes, MessageInfos: file_joinservice_joinproto_join_proto_msgTypes,
}.Build() }.Build()
File_join_proto = out.File File_joinservice_joinproto_join_proto = out.File
file_join_proto_rawDesc = nil file_joinservice_joinproto_join_proto_rawDesc = nil
file_join_proto_goTypes = nil file_joinservice_joinproto_join_proto_goTypes = nil
file_join_proto_depIdxs = nil file_joinservice_joinproto_join_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type APIClient interface {
IssueJoinTicket(ctx context.Context, in *IssueJoinTicketRequest, opts ...grpc.CallOption) (*IssueJoinTicketResponse, error)
IssueRejoinTicket(ctx context.Context, in *IssueRejoinTicketRequest, opts ...grpc.CallOption) (*IssueRejoinTicketResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) IssueJoinTicket(ctx context.Context, in *IssueJoinTicketRequest, opts ...grpc.CallOption) (*IssueJoinTicketResponse, error) {
out := new(IssueJoinTicketResponse)
err := c.cc.Invoke(ctx, "/join.API/IssueJoinTicket", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *aPIClient) IssueRejoinTicket(ctx context.Context, in *IssueRejoinTicketRequest, opts ...grpc.CallOption) (*IssueRejoinTicketResponse, error) {
out := new(IssueRejoinTicketResponse)
err := c.cc.Invoke(ctx, "/join.API/IssueRejoinTicket", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
type APIServer interface {
IssueJoinTicket(context.Context, *IssueJoinTicketRequest) (*IssueJoinTicketResponse, error)
IssueRejoinTicket(context.Context, *IssueRejoinTicketRequest) (*IssueRejoinTicketResponse, error)
}
// UnimplementedAPIServer can be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (*UnimplementedAPIServer) IssueJoinTicket(context.Context, *IssueJoinTicketRequest) (*IssueJoinTicketResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method IssueJoinTicket not implemented")
}
func (*UnimplementedAPIServer) IssueRejoinTicket(context.Context, *IssueRejoinTicketRequest) (*IssueRejoinTicketResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method IssueRejoinTicket not implemented")
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_IssueJoinTicket_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IssueJoinTicketRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).IssueJoinTicket(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/join.API/IssueJoinTicket",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).IssueJoinTicket(ctx, req.(*IssueJoinTicketRequest))
}
return interceptor(ctx, in, info, handler)
}
func _API_IssueRejoinTicket_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IssueRejoinTicketRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).IssueRejoinTicket(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/join.API/IssueRejoinTicket",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).IssueRejoinTicket(ctx, req.(*IssueRejoinTicketRequest))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "join.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "IssueJoinTicket",
Handler: _API_IssueJoinTicket_Handler,
},
{
MethodName: "IssueRejoinTicket",
Handler: _API_IssueRejoinTicket_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "joinservice/joinproto/join.proto",
} }

View File

@ -1,145 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: join.proto
package joinproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type APIClient interface {
// IssueJoinTicket issues a join ticket for a new node.
IssueJoinTicket(ctx context.Context, in *IssueJoinTicketRequest, opts ...grpc.CallOption) (*IssueJoinTicketResponse, error)
// IssueRejoinTicket issues a join ticket for a node that has previously joined the cluster.
IssueRejoinTicket(ctx context.Context, in *IssueRejoinTicketRequest, opts ...grpc.CallOption) (*IssueRejoinTicketResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) IssueJoinTicket(ctx context.Context, in *IssueJoinTicketRequest, opts ...grpc.CallOption) (*IssueJoinTicketResponse, error) {
out := new(IssueJoinTicketResponse)
err := c.cc.Invoke(ctx, "/join.API/IssueJoinTicket", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *aPIClient) IssueRejoinTicket(ctx context.Context, in *IssueRejoinTicketRequest, opts ...grpc.CallOption) (*IssueRejoinTicketResponse, error) {
out := new(IssueRejoinTicketResponse)
err := c.cc.Invoke(ctx, "/join.API/IssueRejoinTicket", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
// All implementations must embed UnimplementedAPIServer
// for forward compatibility
type APIServer interface {
// IssueJoinTicket issues a join ticket for a new node.
IssueJoinTicket(context.Context, *IssueJoinTicketRequest) (*IssueJoinTicketResponse, error)
// IssueRejoinTicket issues a join ticket for a node that has previously joined the cluster.
IssueRejoinTicket(context.Context, *IssueRejoinTicketRequest) (*IssueRejoinTicketResponse, error)
mustEmbedUnimplementedAPIServer()
}
// UnimplementedAPIServer must be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (UnimplementedAPIServer) IssueJoinTicket(context.Context, *IssueJoinTicketRequest) (*IssueJoinTicketResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method IssueJoinTicket not implemented")
}
func (UnimplementedAPIServer) IssueRejoinTicket(context.Context, *IssueRejoinTicketRequest) (*IssueRejoinTicketResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method IssueRejoinTicket not implemented")
}
func (UnimplementedAPIServer) mustEmbedUnimplementedAPIServer() {}
// UnsafeAPIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to APIServer will
// result in compilation errors.
type UnsafeAPIServer interface {
mustEmbedUnimplementedAPIServer()
}
func RegisterAPIServer(s grpc.ServiceRegistrar, srv APIServer) {
s.RegisterService(&API_ServiceDesc, srv)
}
func _API_IssueJoinTicket_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IssueJoinTicketRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).IssueJoinTicket(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/join.API/IssueJoinTicket",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).IssueJoinTicket(ctx, req.(*IssueJoinTicketRequest))
}
return interceptor(ctx, in, info, handler)
}
func _API_IssueRejoinTicket_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IssueRejoinTicketRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).IssueRejoinTicket(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/join.API/IssueRejoinTicket",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).IssueRejoinTicket(ctx, req.(*IssueRejoinTicketRequest))
}
return interceptor(ctx, in, info, handler)
}
// API_ServiceDesc is the grpc.ServiceDesc for API service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var API_ServiceDesc = grpc.ServiceDesc{
ServiceName: "join.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "IssueJoinTicket",
Handler: _API_IssueJoinTicket_Handler,
},
{
MethodName: "IssueRejoinTicket",
Handler: _API_IssueRejoinTicket_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "join.proto",
}

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "keyserviceproto_proto", name = "keyserviceproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/keyservice/keyserviceproto", importpath = "github.com/edgelesssys/constellation/v2/keyservice/keyserviceproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "keyservice.pb.go",
go_proto_library = ":keyserviceproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: keyservice.proto // source: keyservice/keyserviceproto/keyservice.proto
package keyserviceproto package keyserviceproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -32,7 +36,7 @@ type GetDataKeyRequest struct {
func (x *GetDataKeyRequest) Reset() { func (x *GetDataKeyRequest) Reset() {
*x = GetDataKeyRequest{} *x = GetDataKeyRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_keyservice_proto_msgTypes[0] mi := &file_keyservice_keyserviceproto_keyservice_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -45,7 +49,7 @@ func (x *GetDataKeyRequest) String() string {
func (*GetDataKeyRequest) ProtoMessage() {} func (*GetDataKeyRequest) ProtoMessage() {}
func (x *GetDataKeyRequest) ProtoReflect() protoreflect.Message { func (x *GetDataKeyRequest) ProtoReflect() protoreflect.Message {
mi := &file_keyservice_proto_msgTypes[0] mi := &file_keyservice_keyserviceproto_keyservice_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -58,7 +62,7 @@ func (x *GetDataKeyRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use GetDataKeyRequest.ProtoReflect.Descriptor instead. // Deprecated: Use GetDataKeyRequest.ProtoReflect.Descriptor instead.
func (*GetDataKeyRequest) Descriptor() ([]byte, []int) { func (*GetDataKeyRequest) Descriptor() ([]byte, []int) {
return file_keyservice_proto_rawDescGZIP(), []int{0} return file_keyservice_keyserviceproto_keyservice_proto_rawDescGZIP(), []int{0}
} }
func (x *GetDataKeyRequest) GetDataKeyId() string { func (x *GetDataKeyRequest) GetDataKeyId() string {
@ -86,7 +90,7 @@ type GetDataKeyResponse struct {
func (x *GetDataKeyResponse) Reset() { func (x *GetDataKeyResponse) Reset() {
*x = GetDataKeyResponse{} *x = GetDataKeyResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_keyservice_proto_msgTypes[1] mi := &file_keyservice_keyserviceproto_keyservice_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -99,7 +103,7 @@ func (x *GetDataKeyResponse) String() string {
func (*GetDataKeyResponse) ProtoMessage() {} func (*GetDataKeyResponse) ProtoMessage() {}
func (x *GetDataKeyResponse) ProtoReflect() protoreflect.Message { func (x *GetDataKeyResponse) ProtoReflect() protoreflect.Message {
mi := &file_keyservice_proto_msgTypes[1] mi := &file_keyservice_keyserviceproto_keyservice_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -112,7 +116,7 @@ func (x *GetDataKeyResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use GetDataKeyResponse.ProtoReflect.Descriptor instead. // Deprecated: Use GetDataKeyResponse.ProtoReflect.Descriptor instead.
func (*GetDataKeyResponse) Descriptor() ([]byte, []int) { func (*GetDataKeyResponse) Descriptor() ([]byte, []int) {
return file_keyservice_proto_rawDescGZIP(), []int{1} return file_keyservice_keyserviceproto_keyservice_proto_rawDescGZIP(), []int{1}
} }
func (x *GetDataKeyResponse) GetDataKey() []byte { func (x *GetDataKeyResponse) GetDataKey() []byte {
@ -122,48 +126,50 @@ func (x *GetDataKeyResponse) GetDataKey() []byte {
return nil return nil
} }
var File_keyservice_proto protoreflect.FileDescriptor var File_keyservice_keyserviceproto_keyservice_proto protoreflect.FileDescriptor
var file_keyservice_proto_rawDesc = []byte{ var file_keyservice_keyserviceproto_keyservice_proto_rawDesc = []byte{
0x0a, 0x10, 0x6b, 0x65, 0x79, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x0a, 0x2b, 0x6b, 0x65, 0x79, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x6b, 0x65, 0x79,
0x74, 0x6f, 0x12, 0x03, 0x6b, 0x6d, 0x73, 0x22, 0x4b, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x44, 0x61, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x6b, 0x65, 0x79,
0x74, 0x61, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x03, 0x6b,
0x64, 0x61, 0x74, 0x61, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x6d, 0x73, 0x22, 0x4b, 0x0a, 0x11, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79,
0x09, 0x52, 0x09, 0x64, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1e, 0x0a, 0x0b, 0x64, 0x61, 0x74, 0x61, 0x5f,
0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x64, 0x61,
0x6e, 0x67, 0x74, 0x68, 0x22, 0x2f, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74,
0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x64, 0x61, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x22,
0x74, 0x61, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x64, 0x61, 0x2f, 0x0a, 0x12, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x73,
0x74, 0x61, 0x4b, 0x65, 0x79, 0x32, 0x44, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x3d, 0x0a, 0x0a, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x64, 0x61, 0x74, 0x61, 0x5f, 0x6b, 0x65,
0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x12, 0x16, 0x2e, 0x6b, 0x6d, 0x73, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x64, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79,
0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x32, 0x44, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x3d, 0x0a, 0x0a, 0x47, 0x65, 0x74, 0x44, 0x61,
0x73, 0x74, 0x1a, 0x17, 0x2e, 0x6b, 0x6d, 0x73, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x12, 0x16, 0x2e, 0x6b, 0x6d, 0x73, 0x2e, 0x47, 0x65, 0x74, 0x44,
0x4b, 0x65, 0x79, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x44, 0x5a, 0x42, 0x67, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x17, 0x2e,
0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x6b, 0x6d, 0x73, 0x2e, 0x47, 0x65, 0x74, 0x44, 0x61, 0x74, 0x61, 0x4b, 0x65, 0x79, 0x52, 0x65,
0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x44, 0x5a, 0x42, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62,
0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73,
0x65, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x70, 0x72, 0x6f, 0x74, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76,
0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, 0x32, 0x2f, 0x6b, 0x65, 0x79, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x2f, 0x6b, 0x65, 0x79,
0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (
file_keyservice_proto_rawDescOnce sync.Once file_keyservice_keyserviceproto_keyservice_proto_rawDescOnce sync.Once
file_keyservice_proto_rawDescData = file_keyservice_proto_rawDesc file_keyservice_keyserviceproto_keyservice_proto_rawDescData = file_keyservice_keyserviceproto_keyservice_proto_rawDesc
) )
func file_keyservice_proto_rawDescGZIP() []byte { func file_keyservice_keyserviceproto_keyservice_proto_rawDescGZIP() []byte {
file_keyservice_proto_rawDescOnce.Do(func() { file_keyservice_keyserviceproto_keyservice_proto_rawDescOnce.Do(func() {
file_keyservice_proto_rawDescData = protoimpl.X.CompressGZIP(file_keyservice_proto_rawDescData) file_keyservice_keyserviceproto_keyservice_proto_rawDescData = protoimpl.X.CompressGZIP(file_keyservice_keyserviceproto_keyservice_proto_rawDescData)
}) })
return file_keyservice_proto_rawDescData return file_keyservice_keyserviceproto_keyservice_proto_rawDescData
} }
var file_keyservice_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_keyservice_keyserviceproto_keyservice_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_keyservice_proto_goTypes = []interface{}{ var file_keyservice_keyserviceproto_keyservice_proto_goTypes = []interface{}{
(*GetDataKeyRequest)(nil), // 0: kms.GetDataKeyRequest (*GetDataKeyRequest)(nil), // 0: kms.GetDataKeyRequest
(*GetDataKeyResponse)(nil), // 1: kms.GetDataKeyResponse (*GetDataKeyResponse)(nil), // 1: kms.GetDataKeyResponse
} }
var file_keyservice_proto_depIdxs = []int32{ var file_keyservice_keyserviceproto_keyservice_proto_depIdxs = []int32{
0, // 0: kms.API.GetDataKey:input_type -> kms.GetDataKeyRequest 0, // 0: kms.API.GetDataKey:input_type -> kms.GetDataKeyRequest
1, // 1: kms.API.GetDataKey:output_type -> kms.GetDataKeyResponse 1, // 1: kms.API.GetDataKey:output_type -> kms.GetDataKeyResponse
1, // [1:2] is the sub-list for method output_type 1, // [1:2] is the sub-list for method output_type
@ -173,13 +179,13 @@ var file_keyservice_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for field type_name 0, // [0:0] is the sub-list for field type_name
} }
func init() { file_keyservice_proto_init() } func init() { file_keyservice_keyserviceproto_keyservice_proto_init() }
func file_keyservice_proto_init() { func file_keyservice_keyserviceproto_keyservice_proto_init() {
if File_keyservice_proto != nil { if File_keyservice_keyserviceproto_keyservice_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_keyservice_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_keyservice_keyserviceproto_keyservice_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetDataKeyRequest); i { switch v := v.(*GetDataKeyRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -191,7 +197,7 @@ func file_keyservice_proto_init() {
return nil return nil
} }
} }
file_keyservice_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_keyservice_keyserviceproto_keyservice_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetDataKeyResponse); i { switch v := v.(*GetDataKeyResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -208,18 +214,98 @@ func file_keyservice_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_keyservice_proto_rawDesc, RawDescriptor: file_keyservice_keyserviceproto_keyservice_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 2, NumMessages: 2,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_keyservice_proto_goTypes, GoTypes: file_keyservice_keyserviceproto_keyservice_proto_goTypes,
DependencyIndexes: file_keyservice_proto_depIdxs, DependencyIndexes: file_keyservice_keyserviceproto_keyservice_proto_depIdxs,
MessageInfos: file_keyservice_proto_msgTypes, MessageInfos: file_keyservice_keyserviceproto_keyservice_proto_msgTypes,
}.Build() }.Build()
File_keyservice_proto = out.File File_keyservice_keyserviceproto_keyservice_proto = out.File
file_keyservice_proto_rawDesc = nil file_keyservice_keyserviceproto_keyservice_proto_rawDesc = nil
file_keyservice_proto_goTypes = nil file_keyservice_keyserviceproto_keyservice_proto_goTypes = nil
file_keyservice_proto_depIdxs = nil file_keyservice_keyserviceproto_keyservice_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type APIClient interface {
GetDataKey(ctx context.Context, in *GetDataKeyRequest, opts ...grpc.CallOption) (*GetDataKeyResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) GetDataKey(ctx context.Context, in *GetDataKeyRequest, opts ...grpc.CallOption) (*GetDataKeyResponse, error) {
out := new(GetDataKeyResponse)
err := c.cc.Invoke(ctx, "/kms.API/GetDataKey", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
type APIServer interface {
GetDataKey(context.Context, *GetDataKeyRequest) (*GetDataKeyResponse, error)
}
// UnimplementedAPIServer can be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (*UnimplementedAPIServer) GetDataKey(context.Context, *GetDataKeyRequest) (*GetDataKeyResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDataKey not implemented")
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_GetDataKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetDataKeyRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).GetDataKey(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/kms.API/GetDataKey",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).GetDataKey(ctx, req.(*GetDataKeyRequest))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "kms.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetDataKey",
Handler: _API_GetDataKey_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "keyservice/keyserviceproto/keyservice.proto",
} }

View File

@ -1,105 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: keyservice.proto
package keyserviceproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type APIClient interface {
GetDataKey(ctx context.Context, in *GetDataKeyRequest, opts ...grpc.CallOption) (*GetDataKeyResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) GetDataKey(ctx context.Context, in *GetDataKeyRequest, opts ...grpc.CallOption) (*GetDataKeyResponse, error) {
out := new(GetDataKeyResponse)
err := c.cc.Invoke(ctx, "/kms.API/GetDataKey", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
// All implementations must embed UnimplementedAPIServer
// for forward compatibility
type APIServer interface {
GetDataKey(context.Context, *GetDataKeyRequest) (*GetDataKeyResponse, error)
mustEmbedUnimplementedAPIServer()
}
// UnimplementedAPIServer must be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (UnimplementedAPIServer) GetDataKey(context.Context, *GetDataKeyRequest) (*GetDataKeyResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetDataKey not implemented")
}
func (UnimplementedAPIServer) mustEmbedUnimplementedAPIServer() {}
// UnsafeAPIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to APIServer will
// result in compilation errors.
type UnsafeAPIServer interface {
mustEmbedUnimplementedAPIServer()
}
func RegisterAPIServer(s grpc.ServiceRegistrar, srv APIServer) {
s.RegisterService(&API_ServiceDesc, srv)
}
func _API_GetDataKey_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetDataKeyRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).GetDataKey(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/kms.API/GetDataKey",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).GetDataKey(ctx, req.(*GetDataKeyRequest))
}
return interceptor(ctx, in, info, handler)
}
// API_ServiceDesc is the grpc.ServiceDesc for API service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var API_ServiceDesc = grpc.ServiceDesc{
ServiceName: "kms.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetDataKey",
Handler: _API_GetDataKey_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "keyservice.proto",
}

View File

@ -1,69 +0,0 @@
FROM ubuntu:22.04@sha256:7a57c69fe1e9d5b97c5fe649849e79f2cfc3bf11d10bbd5218b4eb61716aebe6 as build
ARG GO_VER=1.20.2
ARG GEN_GO_VER=1.28.1
ARG GEN_GO_GRPC_VER=1.2.0
ARG PB_VER=21.8
ENV DEBIAN_FRONTEND="noninteractive"
RUN apt-get update && apt-get install -y wget tar unzip
# Install Go
RUN wget -q https://go.dev/dl/go${GO_VER}.linux-amd64.tar.gz && \
tar -C /usr/local -xzf go${GO_VER}.linux-amd64.tar.gz && rm go${GO_VER}.linux-amd64.tar.gz
ENV PATH ${PATH}:/usr/local/go/bin:/root/go/bin
RUN wget -q https://github.com/protocolbuffers/protobuf/releases/download/v${PB_VER}/protoc-${PB_VER}-linux-x86_64.zip && \
unzip protoc-${PB_VER}-linux-x86_64.zip -d /root/.local && \
cp /root/.local/bin/protoc /usr/local/bin/protoc
ENV PATH="$PATH:/root/.local/bin"
RUN go install google.golang.org/protobuf/cmd/protoc-gen-go@v${GEN_GO_VER} && \
go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v${GEN_GO_GRPC_VER}
# Generate code for every existing proto file
## disk-mapper recover api
WORKDIR /disk-mapper
COPY disk-mapper/recoverproto/*.proto /disk-mapper
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## debugd service
WORKDIR /service
COPY debugd/service/*.proto /service
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## key management
WORKDIR /keyservice
COPY keyservice/keyserviceproto/*.proto /keyservice
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## join service
WORKDIR /joinservice
COPY joinservice/joinproto/*.proto /joinservice
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## verify
WORKDIR /verify
COPY verify/verifyproto/*.proto /verify
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## init
WORKDIR /init
COPY bootstrapper/initproto/*.proto /init
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
## upgrade agent
WORKDIR /upgrade-agent
COPY upgrade-agent/upgradeproto/*.proto /upgrade-agent
RUN protoc --go_out=. --go_opt=paths=source_relative --go-grpc_out=. --go-grpc_opt=paths=source_relative *.proto
FROM scratch as export
COPY --from=build /disk-mapper/*.go disk-mapper/recoverproto/
COPY --from=build /service/*.go debugd/service/
COPY --from=build /keyservice/*.go keyservice/keyserviceproto/
COPY --from=build /joinservice/*.go joinservice/joinproto/
COPY --from=build /verify/*.go verify/verifyproto/
COPY --from=build /init/*.go bootstrapper/initproto/
COPY --from=build /upgrade-agent/*.go upgrade-agent/upgradeproto/

View File

@ -1,11 +0,0 @@
## Proto generation
To generate Go source files from proto, we use docker.
The following command will generate Go source code files in docker and save the output to the relevant directory.
Run this once every time you make any changes or additions to the `.proto` files.
Add the generated `.go` files, and any changes to the `.proto` files, to your branch before creating a PR.
```bash
DOCKER_BUILDKIT=1 docker build -o .. -f Dockerfile.gen-proto ..
```

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "upgradeproto_proto", name = "upgradeproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/upgrade-agent/upgradeproto", importpath = "github.com/edgelesssys/constellation/v2/upgrade-agent/upgradeproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "upgrade.pb.go",
go_proto_library = ":upgradeproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: upgrade.proto // source: upgrade-agent/upgradeproto/upgrade.proto
package upgradeproto package upgradeproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -33,7 +37,7 @@ type ExecuteUpdateRequest struct {
func (x *ExecuteUpdateRequest) Reset() { func (x *ExecuteUpdateRequest) Reset() {
*x = ExecuteUpdateRequest{} *x = ExecuteUpdateRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_upgrade_proto_msgTypes[0] mi := &file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -46,7 +50,7 @@ func (x *ExecuteUpdateRequest) String() string {
func (*ExecuteUpdateRequest) ProtoMessage() {} func (*ExecuteUpdateRequest) ProtoMessage() {}
func (x *ExecuteUpdateRequest) ProtoReflect() protoreflect.Message { func (x *ExecuteUpdateRequest) ProtoReflect() protoreflect.Message {
mi := &file_upgrade_proto_msgTypes[0] mi := &file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -59,7 +63,7 @@ func (x *ExecuteUpdateRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use ExecuteUpdateRequest.ProtoReflect.Descriptor instead. // Deprecated: Use ExecuteUpdateRequest.ProtoReflect.Descriptor instead.
func (*ExecuteUpdateRequest) Descriptor() ([]byte, []int) { func (*ExecuteUpdateRequest) Descriptor() ([]byte, []int) {
return file_upgrade_proto_rawDescGZIP(), []int{0} return file_upgrade_agent_upgradeproto_upgrade_proto_rawDescGZIP(), []int{0}
} }
func (x *ExecuteUpdateRequest) GetKubeadmUrl() string { func (x *ExecuteUpdateRequest) GetKubeadmUrl() string {
@ -92,7 +96,7 @@ type ExecuteUpdateResponse struct {
func (x *ExecuteUpdateResponse) Reset() { func (x *ExecuteUpdateResponse) Reset() {
*x = ExecuteUpdateResponse{} *x = ExecuteUpdateResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_upgrade_proto_msgTypes[1] mi := &file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -105,7 +109,7 @@ func (x *ExecuteUpdateResponse) String() string {
func (*ExecuteUpdateResponse) ProtoMessage() {} func (*ExecuteUpdateResponse) ProtoMessage() {}
func (x *ExecuteUpdateResponse) ProtoReflect() protoreflect.Message { func (x *ExecuteUpdateResponse) ProtoReflect() protoreflect.Message {
mi := &file_upgrade_proto_msgTypes[1] mi := &file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -118,56 +122,57 @@ func (x *ExecuteUpdateResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use ExecuteUpdateResponse.ProtoReflect.Descriptor instead. // Deprecated: Use ExecuteUpdateResponse.ProtoReflect.Descriptor instead.
func (*ExecuteUpdateResponse) Descriptor() ([]byte, []int) { func (*ExecuteUpdateResponse) Descriptor() ([]byte, []int) {
return file_upgrade_proto_rawDescGZIP(), []int{1} return file_upgrade_agent_upgradeproto_upgrade_proto_rawDescGZIP(), []int{1}
} }
var File_upgrade_proto protoreflect.FileDescriptor var File_upgrade_agent_upgradeproto_upgrade_proto protoreflect.FileDescriptor
var file_upgrade_proto_rawDesc = []byte{ var file_upgrade_agent_upgradeproto_upgrade_proto_rawDesc = []byte{
0x0a, 0x0d, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0a, 0x28, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2d, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2f,
0x07, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x22, 0x96, 0x01, 0x0a, 0x14, 0x45, 0x78, 0x65, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x75, 0x70, 0x67,
0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x07, 0x75, 0x70, 0x67, 0x72,
0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x5f, 0x75, 0x72, 0x6c, 0x61, 0x64, 0x65, 0x22, 0x96, 0x01, 0x0a, 0x14, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f, 0x0a, 0x0b,
0x72, 0x6c, 0x12, 0x21, 0x0a, 0x0c, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x5f, 0x68, 0x61, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28,
0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x09, 0x52, 0x0a, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x55, 0x72, 0x6c, 0x12, 0x21, 0x0a,
0x6d, 0x48, 0x61, 0x73, 0x68, 0x12, 0x3a, 0x0a, 0x19, 0x77, 0x61, 0x6e, 0x74, 0x65, 0x64, 0x5f, 0x0c, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20,
0x6b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x6b, 0x75, 0x62, 0x65, 0x61, 0x64, 0x6d, 0x48, 0x61, 0x73, 0x68,
0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x61, 0x6e, 0x74, 0x65, 0x64, 0x12, 0x3a, 0x0a, 0x19, 0x77, 0x61, 0x6e, 0x74, 0x65, 0x64, 0x5f, 0x6b, 0x75, 0x62, 0x65, 0x72,
0x4b, 0x75, 0x62, 0x65, 0x72, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20,
0x6e, 0x22, 0x17, 0x0a, 0x15, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x01, 0x28, 0x09, 0x52, 0x17, 0x77, 0x61, 0x6e, 0x74, 0x65, 0x64, 0x4b, 0x75, 0x62, 0x65, 0x72,
0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x58, 0x0a, 0x06, 0x55, 0x70, 0x6e, 0x65, 0x74, 0x65, 0x73, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x17, 0x0a, 0x15,
0x64, 0x61, 0x74, 0x65, 0x12, 0x4e, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73,
0x70, 0x64, 0x61, 0x74, 0x65, 0x12, 0x1d, 0x2e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x32, 0x58, 0x0a, 0x06, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x12,
0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x4e, 0x0a, 0x0d, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65,
0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x45, 0x12, 0x1d, 0x2e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75,
0x78, 0x65, 0x63, 0x75, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x74, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
0x6f, 0x6e, 0x73, 0x65, 0x42, 0x44, 0x5a, 0x42, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x1e, 0x2e, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2e, 0x45, 0x78, 0x65, 0x63, 0x75, 0x74,
0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x65, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42,
0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x44, 0x5a, 0x42, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64,
0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65, 0x2d, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2f, 0x75, 0x70, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65,
0x67, 0x72, 0x61, 0x64, 0x65, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x75, 0x70, 0x67, 0x72, 0x61,
0x6f, 0x33, 0x64, 0x65, 0x2d, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x2f, 0x75, 0x70, 0x67, 0x72, 0x61, 0x64, 0x65,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
} }
var ( var (
file_upgrade_proto_rawDescOnce sync.Once file_upgrade_agent_upgradeproto_upgrade_proto_rawDescOnce sync.Once
file_upgrade_proto_rawDescData = file_upgrade_proto_rawDesc file_upgrade_agent_upgradeproto_upgrade_proto_rawDescData = file_upgrade_agent_upgradeproto_upgrade_proto_rawDesc
) )
func file_upgrade_proto_rawDescGZIP() []byte { func file_upgrade_agent_upgradeproto_upgrade_proto_rawDescGZIP() []byte {
file_upgrade_proto_rawDescOnce.Do(func() { file_upgrade_agent_upgradeproto_upgrade_proto_rawDescOnce.Do(func() {
file_upgrade_proto_rawDescData = protoimpl.X.CompressGZIP(file_upgrade_proto_rawDescData) file_upgrade_agent_upgradeproto_upgrade_proto_rawDescData = protoimpl.X.CompressGZIP(file_upgrade_agent_upgradeproto_upgrade_proto_rawDescData)
}) })
return file_upgrade_proto_rawDescData return file_upgrade_agent_upgradeproto_upgrade_proto_rawDescData
} }
var file_upgrade_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_upgrade_proto_goTypes = []interface{}{ var file_upgrade_agent_upgradeproto_upgrade_proto_goTypes = []interface{}{
(*ExecuteUpdateRequest)(nil), // 0: upgrade.ExecuteUpdateRequest (*ExecuteUpdateRequest)(nil), // 0: upgrade.ExecuteUpdateRequest
(*ExecuteUpdateResponse)(nil), // 1: upgrade.ExecuteUpdateResponse (*ExecuteUpdateResponse)(nil), // 1: upgrade.ExecuteUpdateResponse
} }
var file_upgrade_proto_depIdxs = []int32{ var file_upgrade_agent_upgradeproto_upgrade_proto_depIdxs = []int32{
0, // 0: upgrade.Update.ExecuteUpdate:input_type -> upgrade.ExecuteUpdateRequest 0, // 0: upgrade.Update.ExecuteUpdate:input_type -> upgrade.ExecuteUpdateRequest
1, // 1: upgrade.Update.ExecuteUpdate:output_type -> upgrade.ExecuteUpdateResponse 1, // 1: upgrade.Update.ExecuteUpdate:output_type -> upgrade.ExecuteUpdateResponse
1, // [1:2] is the sub-list for method output_type 1, // [1:2] is the sub-list for method output_type
@ -177,13 +182,13 @@ var file_upgrade_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for field type_name 0, // [0:0] is the sub-list for field type_name
} }
func init() { file_upgrade_proto_init() } func init() { file_upgrade_agent_upgradeproto_upgrade_proto_init() }
func file_upgrade_proto_init() { func file_upgrade_agent_upgradeproto_upgrade_proto_init() {
if File_upgrade_proto != nil { if File_upgrade_agent_upgradeproto_upgrade_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_upgrade_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExecuteUpdateRequest); i { switch v := v.(*ExecuteUpdateRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -195,7 +200,7 @@ func file_upgrade_proto_init() {
return nil return nil
} }
} }
file_upgrade_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*ExecuteUpdateResponse); i { switch v := v.(*ExecuteUpdateResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -212,18 +217,98 @@ func file_upgrade_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_upgrade_proto_rawDesc, RawDescriptor: file_upgrade_agent_upgradeproto_upgrade_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 2, NumMessages: 2,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_upgrade_proto_goTypes, GoTypes: file_upgrade_agent_upgradeproto_upgrade_proto_goTypes,
DependencyIndexes: file_upgrade_proto_depIdxs, DependencyIndexes: file_upgrade_agent_upgradeproto_upgrade_proto_depIdxs,
MessageInfos: file_upgrade_proto_msgTypes, MessageInfos: file_upgrade_agent_upgradeproto_upgrade_proto_msgTypes,
}.Build() }.Build()
File_upgrade_proto = out.File File_upgrade_agent_upgradeproto_upgrade_proto = out.File
file_upgrade_proto_rawDesc = nil file_upgrade_agent_upgradeproto_upgrade_proto_rawDesc = nil
file_upgrade_proto_goTypes = nil file_upgrade_agent_upgradeproto_upgrade_proto_goTypes = nil
file_upgrade_proto_depIdxs = nil file_upgrade_agent_upgradeproto_upgrade_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// UpdateClient is the client API for Update service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type UpdateClient interface {
ExecuteUpdate(ctx context.Context, in *ExecuteUpdateRequest, opts ...grpc.CallOption) (*ExecuteUpdateResponse, error)
}
type updateClient struct {
cc grpc.ClientConnInterface
}
func NewUpdateClient(cc grpc.ClientConnInterface) UpdateClient {
return &updateClient{cc}
}
func (c *updateClient) ExecuteUpdate(ctx context.Context, in *ExecuteUpdateRequest, opts ...grpc.CallOption) (*ExecuteUpdateResponse, error) {
out := new(ExecuteUpdateResponse)
err := c.cc.Invoke(ctx, "/upgrade.Update/ExecuteUpdate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// UpdateServer is the server API for Update service.
type UpdateServer interface {
ExecuteUpdate(context.Context, *ExecuteUpdateRequest) (*ExecuteUpdateResponse, error)
}
// UnimplementedUpdateServer can be embedded to have forward compatible implementations.
type UnimplementedUpdateServer struct {
}
func (*UnimplementedUpdateServer) ExecuteUpdate(context.Context, *ExecuteUpdateRequest) (*ExecuteUpdateResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ExecuteUpdate not implemented")
}
func RegisterUpdateServer(s *grpc.Server, srv UpdateServer) {
s.RegisterService(&_Update_serviceDesc, srv)
}
func _Update_ExecuteUpdate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ExecuteUpdateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UpdateServer).ExecuteUpdate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/upgrade.Update/ExecuteUpdate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UpdateServer).ExecuteUpdate(ctx, req.(*ExecuteUpdateRequest))
}
return interceptor(ctx, in, info, handler)
}
var _Update_serviceDesc = grpc.ServiceDesc{
ServiceName: "upgrade.Update",
HandlerType: (*UpdateServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "ExecuteUpdate",
Handler: _Update_ExecuteUpdate_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "upgrade-agent/upgradeproto/upgrade.proto",
} }

View File

@ -1,105 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: upgrade.proto
package upgradeproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// UpdateClient is the client API for Update service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type UpdateClient interface {
ExecuteUpdate(ctx context.Context, in *ExecuteUpdateRequest, opts ...grpc.CallOption) (*ExecuteUpdateResponse, error)
}
type updateClient struct {
cc grpc.ClientConnInterface
}
func NewUpdateClient(cc grpc.ClientConnInterface) UpdateClient {
return &updateClient{cc}
}
func (c *updateClient) ExecuteUpdate(ctx context.Context, in *ExecuteUpdateRequest, opts ...grpc.CallOption) (*ExecuteUpdateResponse, error) {
out := new(ExecuteUpdateResponse)
err := c.cc.Invoke(ctx, "/upgrade.Update/ExecuteUpdate", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// UpdateServer is the server API for Update service.
// All implementations must embed UnimplementedUpdateServer
// for forward compatibility
type UpdateServer interface {
ExecuteUpdate(context.Context, *ExecuteUpdateRequest) (*ExecuteUpdateResponse, error)
mustEmbedUnimplementedUpdateServer()
}
// UnimplementedUpdateServer must be embedded to have forward compatible implementations.
type UnimplementedUpdateServer struct {
}
func (UnimplementedUpdateServer) ExecuteUpdate(context.Context, *ExecuteUpdateRequest) (*ExecuteUpdateResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ExecuteUpdate not implemented")
}
func (UnimplementedUpdateServer) mustEmbedUnimplementedUpdateServer() {}
// UnsafeUpdateServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to UpdateServer will
// result in compilation errors.
type UnsafeUpdateServer interface {
mustEmbedUnimplementedUpdateServer()
}
func RegisterUpdateServer(s grpc.ServiceRegistrar, srv UpdateServer) {
s.RegisterService(&Update_ServiceDesc, srv)
}
func _Update_ExecuteUpdate_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ExecuteUpdateRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(UpdateServer).ExecuteUpdate(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/upgrade.Update/ExecuteUpdate",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(UpdateServer).ExecuteUpdate(ctx, req.(*ExecuteUpdateRequest))
}
return interceptor(ctx, in, info, handler)
}
// Update_ServiceDesc is the grpc.ServiceDesc for Update service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var Update_ServiceDesc = grpc.ServiceDesc{
ServiceName: "upgrade.Update",
HandlerType: (*UpdateServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "ExecuteUpdate",
Handler: _Update_ExecuteUpdate_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "upgrade.proto",
}

View File

@ -1,6 +1,7 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library") load("@io_bazel_rules_go//go:def.bzl", "go_library")
load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library") load("@io_bazel_rules_go//proto:def.bzl", "go_proto_library")
load("@rules_proto//proto:defs.bzl", "proto_library") load("@rules_proto//proto:defs.bzl", "proto_library")
load("//bazel/proto:rules.bzl", "write_go_proto_srcs")
proto_library( proto_library(
name = "verifyproto_proto", name = "verifyproto_proto",
@ -22,3 +23,10 @@ go_library(
importpath = "github.com/edgelesssys/constellation/v2/verify/verifyproto", importpath = "github.com/edgelesssys/constellation/v2/verify/verifyproto",
visibility = ["//visibility:public"], visibility = ["//visibility:public"],
) )
write_go_proto_srcs(
name = "write_generated_protos",
src = "verify.pb.go",
go_proto_library = ":verifyproto_go_proto",
visibility = ["//visibility:public"],
)

View File

@ -1,12 +1,16 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.1 // protoc-gen-go v1.29.1
// protoc v3.21.8 // protoc v4.22.1
// source: verify.proto // source: verify/verifyproto/verify.proto
package verifyproto package verifyproto
import ( import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl" protoimpl "google.golang.org/protobuf/runtime/protoimpl"
reflect "reflect" reflect "reflect"
@ -25,15 +29,13 @@ type GetAttestationRequest struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// bytes user_data = 1; removed
// nonce is a random nonce to prevent replay attacks.
Nonce []byte `protobuf:"bytes,2,opt,name=nonce,proto3" json:"nonce,omitempty"` Nonce []byte `protobuf:"bytes,2,opt,name=nonce,proto3" json:"nonce,omitempty"`
} }
func (x *GetAttestationRequest) Reset() { func (x *GetAttestationRequest) Reset() {
*x = GetAttestationRequest{} *x = GetAttestationRequest{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_verify_proto_msgTypes[0] mi := &file_verify_verifyproto_verify_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -46,7 +48,7 @@ func (x *GetAttestationRequest) String() string {
func (*GetAttestationRequest) ProtoMessage() {} func (*GetAttestationRequest) ProtoMessage() {}
func (x *GetAttestationRequest) ProtoReflect() protoreflect.Message { func (x *GetAttestationRequest) ProtoReflect() protoreflect.Message {
mi := &file_verify_proto_msgTypes[0] mi := &file_verify_verifyproto_verify_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -59,7 +61,7 @@ func (x *GetAttestationRequest) ProtoReflect() protoreflect.Message {
// Deprecated: Use GetAttestationRequest.ProtoReflect.Descriptor instead. // Deprecated: Use GetAttestationRequest.ProtoReflect.Descriptor instead.
func (*GetAttestationRequest) Descriptor() ([]byte, []int) { func (*GetAttestationRequest) Descriptor() ([]byte, []int) {
return file_verify_proto_rawDescGZIP(), []int{0} return file_verify_verifyproto_verify_proto_rawDescGZIP(), []int{0}
} }
func (x *GetAttestationRequest) GetNonce() []byte { func (x *GetAttestationRequest) GetNonce() []byte {
@ -74,14 +76,13 @@ type GetAttestationResponse struct {
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields unknownFields protoimpl.UnknownFields
// attestation is the attestation for the given user data and nonce.
Attestation []byte `protobuf:"bytes,1,opt,name=attestation,proto3" json:"attestation,omitempty"` Attestation []byte `protobuf:"bytes,1,opt,name=attestation,proto3" json:"attestation,omitempty"`
} }
func (x *GetAttestationResponse) Reset() { func (x *GetAttestationResponse) Reset() {
*x = GetAttestationResponse{} *x = GetAttestationResponse{}
if protoimpl.UnsafeEnabled { if protoimpl.UnsafeEnabled {
mi := &file_verify_proto_msgTypes[1] mi := &file_verify_verifyproto_verify_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi) ms.StoreMessageInfo(mi)
} }
@ -94,7 +95,7 @@ func (x *GetAttestationResponse) String() string {
func (*GetAttestationResponse) ProtoMessage() {} func (*GetAttestationResponse) ProtoMessage() {}
func (x *GetAttestationResponse) ProtoReflect() protoreflect.Message { func (x *GetAttestationResponse) ProtoReflect() protoreflect.Message {
mi := &file_verify_proto_msgTypes[1] mi := &file_verify_verifyproto_verify_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil { if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil { if ms.LoadMessageInfo() == nil {
@ -107,7 +108,7 @@ func (x *GetAttestationResponse) ProtoReflect() protoreflect.Message {
// Deprecated: Use GetAttestationResponse.ProtoReflect.Descriptor instead. // Deprecated: Use GetAttestationResponse.ProtoReflect.Descriptor instead.
func (*GetAttestationResponse) Descriptor() ([]byte, []int) { func (*GetAttestationResponse) Descriptor() ([]byte, []int) {
return file_verify_proto_rawDescGZIP(), []int{1} return file_verify_verifyproto_verify_proto_rawDescGZIP(), []int{1}
} }
func (x *GetAttestationResponse) GetAttestation() []byte { func (x *GetAttestationResponse) GetAttestation() []byte {
@ -117,47 +118,49 @@ func (x *GetAttestationResponse) GetAttestation() []byte {
return nil return nil
} }
var File_verify_proto protoreflect.FileDescriptor var File_verify_verifyproto_verify_proto protoreflect.FileDescriptor
var file_verify_proto_rawDesc = []byte{ var file_verify_verifyproto_verify_proto_rawDesc = []byte{
0x0a, 0x0c, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x06, 0x0a, 0x1f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x70,
0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x22, 0x2d, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74,
0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x6f, 0x12, 0x06, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x22, 0x2d, 0x0a, 0x15, 0x47, 0x65, 0x74,
0x14, 0x0a, 0x05, 0x6e, 0x6f, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65,
0x6e, 0x6f, 0x6e, 0x63, 0x65, 0x22, 0x3a, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x6e, 0x6f, 0x6e, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x0c, 0x52, 0x05, 0x6e, 0x6f, 0x6e, 0x63, 0x65, 0x22, 0x3a, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x41,
0x20, 0x0a, 0x0b, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x73, 0x65, 0x12, 0x20, 0x0a, 0x0b, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f,
0x6e, 0x32, 0x56, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x4f, 0x0a, 0x0e, 0x47, 0x65, 0x74, 0x41, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x61, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61,
0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x2e, 0x76, 0x65, 0x72, 0x74, 0x69, 0x6f, 0x6e, 0x32, 0x56, 0x0a, 0x03, 0x41, 0x50, 0x49, 0x12, 0x4f, 0x0a, 0x0e, 0x47,
0x69, 0x66, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1d, 0x2e,
0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74,
0x66, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61, 0x74, 0x69, 0x6f, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x76,
0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x3c, 0x5a, 0x3a, 0x67, 0x69, 0x74, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2e, 0x47, 0x65, 0x74, 0x41, 0x74, 0x74, 0x65, 0x73, 0x74, 0x61,
0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c, 0x65, 0x73, 0x73, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x42, 0x3c, 0x5a, 0x3a,
0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x65, 0x64, 0x67, 0x65, 0x6c,
0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x65, 0x73, 0x73, 0x73, 0x79, 0x73, 0x2f, 0x63, 0x6f, 0x6e, 0x73, 0x74, 0x65, 0x6c, 0x6c, 0x61,
0x66, 0x79, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x32, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x79, 0x2f, 0x76,
0x65, 0x72, 0x69, 0x66, 0x79, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74,
0x6f, 0x33,
} }
var ( var (
file_verify_proto_rawDescOnce sync.Once file_verify_verifyproto_verify_proto_rawDescOnce sync.Once
file_verify_proto_rawDescData = file_verify_proto_rawDesc file_verify_verifyproto_verify_proto_rawDescData = file_verify_verifyproto_verify_proto_rawDesc
) )
func file_verify_proto_rawDescGZIP() []byte { func file_verify_verifyproto_verify_proto_rawDescGZIP() []byte {
file_verify_proto_rawDescOnce.Do(func() { file_verify_verifyproto_verify_proto_rawDescOnce.Do(func() {
file_verify_proto_rawDescData = protoimpl.X.CompressGZIP(file_verify_proto_rawDescData) file_verify_verifyproto_verify_proto_rawDescData = protoimpl.X.CompressGZIP(file_verify_verifyproto_verify_proto_rawDescData)
}) })
return file_verify_proto_rawDescData return file_verify_verifyproto_verify_proto_rawDescData
} }
var file_verify_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_verify_verifyproto_verify_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_verify_proto_goTypes = []interface{}{ var file_verify_verifyproto_verify_proto_goTypes = []interface{}{
(*GetAttestationRequest)(nil), // 0: verify.GetAttestationRequest (*GetAttestationRequest)(nil), // 0: verify.GetAttestationRequest
(*GetAttestationResponse)(nil), // 1: verify.GetAttestationResponse (*GetAttestationResponse)(nil), // 1: verify.GetAttestationResponse
} }
var file_verify_proto_depIdxs = []int32{ var file_verify_verifyproto_verify_proto_depIdxs = []int32{
0, // 0: verify.API.GetAttestation:input_type -> verify.GetAttestationRequest 0, // 0: verify.API.GetAttestation:input_type -> verify.GetAttestationRequest
1, // 1: verify.API.GetAttestation:output_type -> verify.GetAttestationResponse 1, // 1: verify.API.GetAttestation:output_type -> verify.GetAttestationResponse
1, // [1:2] is the sub-list for method output_type 1, // [1:2] is the sub-list for method output_type
@ -167,13 +170,13 @@ var file_verify_proto_depIdxs = []int32{
0, // [0:0] is the sub-list for field type_name 0, // [0:0] is the sub-list for field type_name
} }
func init() { file_verify_proto_init() } func init() { file_verify_verifyproto_verify_proto_init() }
func file_verify_proto_init() { func file_verify_verifyproto_verify_proto_init() {
if File_verify_proto != nil { if File_verify_verifyproto_verify_proto != nil {
return return
} }
if !protoimpl.UnsafeEnabled { if !protoimpl.UnsafeEnabled {
file_verify_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} { file_verify_verifyproto_verify_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetAttestationRequest); i { switch v := v.(*GetAttestationRequest); i {
case 0: case 0:
return &v.state return &v.state
@ -185,7 +188,7 @@ func file_verify_proto_init() {
return nil return nil
} }
} }
file_verify_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { file_verify_verifyproto_verify_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*GetAttestationResponse); i { switch v := v.(*GetAttestationResponse); i {
case 0: case 0:
return &v.state return &v.state
@ -202,18 +205,98 @@ func file_verify_proto_init() {
out := protoimpl.TypeBuilder{ out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{ File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(), GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_verify_proto_rawDesc, RawDescriptor: file_verify_verifyproto_verify_proto_rawDesc,
NumEnums: 0, NumEnums: 0,
NumMessages: 2, NumMessages: 2,
NumExtensions: 0, NumExtensions: 0,
NumServices: 1, NumServices: 1,
}, },
GoTypes: file_verify_proto_goTypes, GoTypes: file_verify_verifyproto_verify_proto_goTypes,
DependencyIndexes: file_verify_proto_depIdxs, DependencyIndexes: file_verify_verifyproto_verify_proto_depIdxs,
MessageInfos: file_verify_proto_msgTypes, MessageInfos: file_verify_verifyproto_verify_proto_msgTypes,
}.Build() }.Build()
File_verify_proto = out.File File_verify_verifyproto_verify_proto = out.File
file_verify_proto_rawDesc = nil file_verify_verifyproto_verify_proto_rawDesc = nil
file_verify_proto_goTypes = nil file_verify_verifyproto_verify_proto_goTypes = nil
file_verify_proto_depIdxs = nil file_verify_verifyproto_verify_proto_depIdxs = nil
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConnInterface
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion6
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type APIClient interface {
GetAttestation(ctx context.Context, in *GetAttestationRequest, opts ...grpc.CallOption) (*GetAttestationResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) GetAttestation(ctx context.Context, in *GetAttestationRequest, opts ...grpc.CallOption) (*GetAttestationResponse, error) {
out := new(GetAttestationResponse)
err := c.cc.Invoke(ctx, "/verify.API/GetAttestation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
type APIServer interface {
GetAttestation(context.Context, *GetAttestationRequest) (*GetAttestationResponse, error)
}
// UnimplementedAPIServer can be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (*UnimplementedAPIServer) GetAttestation(context.Context, *GetAttestationRequest) (*GetAttestationResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetAttestation not implemented")
}
func RegisterAPIServer(s *grpc.Server, srv APIServer) {
s.RegisterService(&_API_serviceDesc, srv)
}
func _API_GetAttestation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetAttestationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).GetAttestation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/verify.API/GetAttestation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).GetAttestation(ctx, req.(*GetAttestationRequest))
}
return interceptor(ctx, in, info, handler)
}
var _API_serviceDesc = grpc.ServiceDesc{
ServiceName: "verify.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetAttestation",
Handler: _API_GetAttestation_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "verify/verifyproto/verify.proto",
} }

View File

@ -1,107 +0,0 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
// - protoc-gen-go-grpc v1.2.0
// - protoc v3.21.8
// source: verify.proto
package verifyproto
import (
context "context"
grpc "google.golang.org/grpc"
codes "google.golang.org/grpc/codes"
status "google.golang.org/grpc/status"
)
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
// Requires gRPC-Go v1.32.0 or later.
const _ = grpc.SupportPackageIsVersion7
// APIClient is the client API for API service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type APIClient interface {
// GetAttestation returns an attestation for the given user data and nonce.
GetAttestation(ctx context.Context, in *GetAttestationRequest, opts ...grpc.CallOption) (*GetAttestationResponse, error)
}
type aPIClient struct {
cc grpc.ClientConnInterface
}
func NewAPIClient(cc grpc.ClientConnInterface) APIClient {
return &aPIClient{cc}
}
func (c *aPIClient) GetAttestation(ctx context.Context, in *GetAttestationRequest, opts ...grpc.CallOption) (*GetAttestationResponse, error) {
out := new(GetAttestationResponse)
err := c.cc.Invoke(ctx, "/verify.API/GetAttestation", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// APIServer is the server API for API service.
// All implementations must embed UnimplementedAPIServer
// for forward compatibility
type APIServer interface {
// GetAttestation returns an attestation for the given user data and nonce.
GetAttestation(context.Context, *GetAttestationRequest) (*GetAttestationResponse, error)
mustEmbedUnimplementedAPIServer()
}
// UnimplementedAPIServer must be embedded to have forward compatible implementations.
type UnimplementedAPIServer struct {
}
func (UnimplementedAPIServer) GetAttestation(context.Context, *GetAttestationRequest) (*GetAttestationResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GetAttestation not implemented")
}
func (UnimplementedAPIServer) mustEmbedUnimplementedAPIServer() {}
// UnsafeAPIServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to APIServer will
// result in compilation errors.
type UnsafeAPIServer interface {
mustEmbedUnimplementedAPIServer()
}
func RegisterAPIServer(s grpc.ServiceRegistrar, srv APIServer) {
s.RegisterService(&API_ServiceDesc, srv)
}
func _API_GetAttestation_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(GetAttestationRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(APIServer).GetAttestation(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/verify.API/GetAttestation",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(APIServer).GetAttestation(ctx, req.(*GetAttestationRequest))
}
return interceptor(ctx, in, info, handler)
}
// API_ServiceDesc is the grpc.ServiceDesc for API service.
// It's only intended for direct use with grpc.RegisterService,
// and not to be introspected or modified (even as a copy)
var API_ServiceDesc = grpc.ServiceDesc{
ServiceName: "verify.API",
HandlerType: (*APIServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "GetAttestation",
Handler: _API_GetAttestation_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "verify.proto",
}