2023-03-30 03:41:56 -04:00
|
|
|
/*
|
|
|
|
Copyright (c) Edgeless Systems GmbH
|
|
|
|
|
|
|
|
SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
*/
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"errors"
|
2024-02-08 09:20:01 -05:00
|
|
|
"fmt"
|
|
|
|
"log/slog"
|
2023-03-30 03:41:56 -04:00
|
|
|
|
|
|
|
"github.com/bazelbuild/buildtools/build"
|
|
|
|
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/bazelfiles"
|
|
|
|
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/issues"
|
|
|
|
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/mirror"
|
|
|
|
"github.com/edgelesssys/constellation/v2/hack/bazel-deps-mirror/internal/rules"
|
|
|
|
"github.com/edgelesssys/constellation/v2/internal/logger"
|
|
|
|
"github.com/spf13/cobra"
|
|
|
|
)
|
|
|
|
|
|
|
|
func newFixCmd() *cobra.Command {
|
|
|
|
cmd := &cobra.Command{
|
|
|
|
Use: "fix",
|
|
|
|
Short: "fix all Bazel dependency rules by uploading artifacts to the mirror (if needed) and formatting the rules.",
|
|
|
|
RunE: runFix,
|
|
|
|
}
|
|
|
|
|
|
|
|
cmd.Flags().Bool("unauthenticated", false, "Doesn't require authentication to the mirror but cannot upload files.")
|
|
|
|
cmd.Flags().Bool("dry-run", false, "Don't actually change files or upload anything.")
|
|
|
|
|
|
|
|
return cmd
|
|
|
|
}
|
|
|
|
|
|
|
|
func runFix(cmd *cobra.Command, _ []string) error {
|
|
|
|
flags, err := parseFixFlags(cmd)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2024-02-08 09:20:01 -05:00
|
|
|
log := logger.NewTextLogger(flags.logLevel)
|
|
|
|
log.Debug(fmt.Sprintf("Parsed flags: %+v", flags))
|
2023-03-30 03:41:56 -04:00
|
|
|
|
|
|
|
fileHelper, err := bazelfiles.New()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug("Searching for Bazel files in the current WORKSPACE and all subdirectories...")
|
2023-03-30 03:41:56 -04:00
|
|
|
bazelFiles, err := fileHelper.FindFiles()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
var mirrorUpload mirrorUploader
|
|
|
|
switch {
|
|
|
|
case flags.unauthenticated:
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Warn("Fixing rules without authentication for AWS S3. If artifacts are not yet mirrored, this will fail.")
|
2023-03-30 03:41:56 -04:00
|
|
|
mirrorUpload = mirror.NewUnauthenticated(flags.mirrorBaseURL, flags.dryRun, log)
|
|
|
|
default:
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug("Fixing rules with authentication for AWS S3.")
|
2023-03-30 03:41:56 -04:00
|
|
|
mirrorUpload, err = mirror.New(cmd.Context(), flags.region, flags.bucket, flags.mirrorBaseURL, flags.dryRun, log)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
issues := issues.New()
|
|
|
|
for _, bazelFile := range bazelFiles {
|
|
|
|
fileIssues, err := fixBazelFile(cmd.Context(), fileHelper, mirrorUpload, bazelFile, flags.dryRun, log)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
if len(fileIssues) > 0 {
|
|
|
|
issues.Set(bazelFile.AbsPath, fileIssues)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(issues) > 0 {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Warn(fmt.Sprintf("Found %d unfixable issues in rules", len(issues)))
|
2023-03-30 03:41:56 -04:00
|
|
|
issues.Report(cmd.OutOrStdout())
|
|
|
|
return errors.New("found issues in rules")
|
|
|
|
}
|
|
|
|
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info("No unfixable issues found")
|
2023-03-30 03:41:56 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-02-08 09:20:01 -05:00
|
|
|
func fixBazelFile(ctx context.Context, fileHelper *bazelfiles.Helper, mirrorUpload mirrorUploader, bazelFile bazelfiles.BazelFile, dryRun bool, log *slog.Logger) (iss issues.ByFile, err error) {
|
2023-03-30 03:41:56 -04:00
|
|
|
iss = issues.NewByFile()
|
|
|
|
var changed bool // true if any rule in this file was changed
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info(fmt.Sprintf("Checking file: %s", bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
buildfile, err := fileHelper.LoadFile(bazelFile)
|
|
|
|
if err != nil {
|
|
|
|
return iss, err
|
|
|
|
}
|
|
|
|
found := rules.Rules(buildfile, rules.SupportedRules)
|
|
|
|
if len(found) == 0 {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug(fmt.Sprintf("No rules found in file: %s", bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
return iss, nil
|
|
|
|
}
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug(fmt.Sprintf("Found %d rules in file: %s", len(found), bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
for _, rule := range found {
|
|
|
|
changedRule, ruleIssues := fixRule(ctx, mirrorUpload, rule, log)
|
|
|
|
if len(ruleIssues) > 0 {
|
|
|
|
iss.Add(rule.Name(), ruleIssues...)
|
|
|
|
}
|
|
|
|
changed = changed || changedRule
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(iss) > 0 {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Warn(fmt.Sprintf("File %s has issues. Not saving!", bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
return iss, nil
|
|
|
|
}
|
|
|
|
if !changed {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug(fmt.Sprintf("No changes to file: %s", bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
return iss, nil
|
|
|
|
}
|
|
|
|
if dryRun {
|
|
|
|
diff, err := fileHelper.Diff(bazelFile, buildfile)
|
|
|
|
if err != nil {
|
|
|
|
return iss, err
|
|
|
|
}
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info(fmt.Sprintf("Dry run: would save updated file %s with diff:\n%s", bazelFile.RelPath, diff))
|
2023-03-30 03:41:56 -04:00
|
|
|
return iss, nil
|
|
|
|
}
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info(fmt.Sprintf("Saving updated file: %s", bazelFile.RelPath))
|
2023-03-30 03:41:56 -04:00
|
|
|
if err := fileHelper.WriteFile(bazelFile, buildfile); err != nil {
|
|
|
|
return iss, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return iss, nil
|
|
|
|
}
|
|
|
|
|
2024-02-08 09:20:01 -05:00
|
|
|
func learnHashForRule(ctx context.Context, mirrorUpload mirrorUploader, rule *build.Rule, log *slog.Logger) error {
|
2023-04-05 11:32:51 -04:00
|
|
|
upstreamURLs, err := rules.UpstreamURLs(rule)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
learnedHash, learnErr := mirrorUpload.Learn(ctx, upstreamURLs)
|
|
|
|
if learnErr != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
rules.SetHash(rule, learnedHash)
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Debug(fmt.Sprintf("Learned hash for rule %s: %s", rule.Name(), learnedHash))
|
2023-04-05 11:32:51 -04:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-02-08 09:20:01 -05:00
|
|
|
func fixRule(ctx context.Context, mirrorUpload mirrorUploader, rule *build.Rule, log *slog.Logger) (changed bool, iss []error) {
|
|
|
|
log.Debug(fmt.Sprintf("Fixing rule: %s", rule.Name()))
|
2023-04-05 11:32:51 -04:00
|
|
|
|
|
|
|
// try to learn the hash
|
|
|
|
if hash, err := rules.GetHash(rule); err != nil || hash == "" {
|
|
|
|
if err := learnHashForRule(ctx, mirrorUpload, rule, log); err != nil {
|
|
|
|
// don't try to fix the rule if the hash is missing and we can't learn it
|
|
|
|
iss = append(iss,
|
|
|
|
errors.New("hash attribute is missing and can't learn it from upstream."+
|
|
|
|
"Unable to check if the artifact is already mirrored or upload it"))
|
|
|
|
return false, iss
|
|
|
|
}
|
|
|
|
changed = true
|
|
|
|
}
|
|
|
|
|
2023-03-30 03:41:56 -04:00
|
|
|
// check if the rule is a valid pinned dependency rule (has all required attributes)
|
|
|
|
issue := rules.ValidatePinned(rule)
|
|
|
|
if issue != nil {
|
|
|
|
// don't try to fix the rule if it's invalid
|
|
|
|
iss = append(iss, issue...)
|
2023-04-05 11:32:51 -04:00
|
|
|
return false, iss
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
// check if the referenced CAS object exists in the mirror and is consistent
|
|
|
|
expectedHash, expectedHashErr := rules.GetHash(rule)
|
|
|
|
if expectedHashErr != nil {
|
|
|
|
// don't try to fix the rule if the hash is missing
|
2023-04-05 11:32:51 -04:00
|
|
|
iss = append(iss, expectedHashErr)
|
|
|
|
return false, iss
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
if rules.HasMirrorURL(rule) {
|
2023-04-05 11:32:51 -04:00
|
|
|
changed = rules.Normalize(rule) || changed
|
|
|
|
return changed, iss
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
|
2023-04-05 11:32:51 -04:00
|
|
|
if checkErr := mirrorUpload.Check(ctx, expectedHash); checkErr != nil {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info(fmt.Sprintf("Artifact %s with hash %s is not yet mirrored. Uploading...", rule.Name(), expectedHash))
|
2023-04-05 11:32:51 -04:00
|
|
|
if uploadErr := mirrorUpload.Mirror(ctx, expectedHash, rules.GetURLs(rule)); uploadErr != nil {
|
|
|
|
// don't try to fix the rule if the upload failed
|
|
|
|
iss = append(iss, uploadErr)
|
|
|
|
return changed, iss
|
|
|
|
}
|
|
|
|
} else {
|
2024-02-08 09:20:01 -05:00
|
|
|
log.Info(fmt.Sprintf("Artifact %s with hash %s was already uploaded before. Adding to rule...", rule.Name(), expectedHash))
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
2023-04-05 11:32:51 -04:00
|
|
|
|
2023-03-30 03:41:56 -04:00
|
|
|
// now the artifact is mirrored (if it wasn't already) and we can fix the rule
|
|
|
|
mirrorURL, err := mirrorUpload.MirrorURL(expectedHash)
|
|
|
|
if err != nil {
|
|
|
|
iss = append(iss, err)
|
2023-04-05 11:32:51 -04:00
|
|
|
return changed, iss
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
rules.AddURLs(rule, []string{mirrorURL})
|
|
|
|
|
|
|
|
// normalize the rule
|
|
|
|
rules.Normalize(rule)
|
|
|
|
return true, iss
|
|
|
|
}
|
|
|
|
|
|
|
|
type fixFlags struct {
|
|
|
|
unauthenticated bool
|
|
|
|
dryRun bool
|
|
|
|
region string
|
|
|
|
bucket string
|
|
|
|
mirrorBaseURL string
|
2024-02-08 09:20:01 -05:00
|
|
|
logLevel slog.Level
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
func parseFixFlags(cmd *cobra.Command) (fixFlags, error) {
|
|
|
|
unauthenticated, err := cmd.Flags().GetBool("unauthenticated")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
|
|
|
dryRun, err := cmd.Flags().GetBool("dry-run")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
|
|
|
verbose, err := cmd.Flags().GetBool("verbose")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
2024-02-08 09:20:01 -05:00
|
|
|
logLevel := slog.LevelInfo
|
2023-03-30 03:41:56 -04:00
|
|
|
if verbose {
|
2024-02-08 09:20:01 -05:00
|
|
|
logLevel = slog.LevelDebug
|
2023-03-30 03:41:56 -04:00
|
|
|
}
|
|
|
|
region, err := cmd.Flags().GetString("region")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
|
|
|
bucket, err := cmd.Flags().GetString("bucket")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
|
|
|
mirrorBaseURL, err := cmd.Flags().GetString("mirror-base-url")
|
|
|
|
if err != nil {
|
|
|
|
return fixFlags{}, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return fixFlags{
|
|
|
|
unauthenticated: unauthenticated,
|
|
|
|
dryRun: dryRun,
|
|
|
|
region: region,
|
|
|
|
bucket: bucket,
|
|
|
|
mirrorBaseURL: mirrorBaseURL,
|
|
|
|
logLevel: logLevel,
|
|
|
|
}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type mirrorUploader interface {
|
2023-04-05 11:32:51 -04:00
|
|
|
Learn(ctx context.Context, urls []string) (string, error)
|
2023-03-30 03:41:56 -04:00
|
|
|
Check(ctx context.Context, expectedHash string) error
|
|
|
|
Mirror(ctx context.Context, hash string, urls []string) error
|
|
|
|
MirrorURL(hash string) (string, error)
|
|
|
|
}
|