ci: separate logs and metrics indices (#2544)

* separate logs and metrics indices

* tidy
This commit is contained in:
Moritz Sanft 2023-10-31 12:09:27 +01:00 committed by GitHub
parent 0c03076181
commit 9a282df846
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 36 additions and 35 deletions

View File

@ -93,7 +93,6 @@ func NewStartTrigger(ctx context.Context, wg *sync.WaitGroup, provider cloudprov
pipelineConf := logstashConfInput{ pipelineConf := logstashConfInput{
Port: 5044, Port: 5044,
Host: openSearchHost, Host: openSearchHost,
IndexPrefix: "systemd-logs",
InfoMap: infoMapM, InfoMap: infoMapM,
Credentials: creds, Credentials: creds,
} }
@ -272,7 +271,6 @@ func startPod(ctx context.Context, logger *logger.Logger) error {
type logstashConfInput struct { type logstashConfInput struct {
Port int Port int
Host string Host string
IndexPrefix string
InfoMap map[string]string InfoMap map[string]string
Credentials credentials Credentials credentials
} }

View File

@ -55,12 +55,28 @@ filter {
} }
output { output {
opensearch { if ([@metadata][beat] == "filebeat") {
hosts => "{{ .Host }}" # Logs, which are output by filebeat, go to the logs-index.
index => "{{ .IndexPrefix }}-%{+YYYY.MM.dd}" opensearch {
user => "{{ .Credentials.Username }}" hosts => "{{ .Host }}"
password => "{{ .Credentials.Password }}" # YYYY doesn't handle rolling over the year, so we use xxxx instead.
ssl => true # See https://github.com/logstash-plugins/logstash-output-elasticsearch/issues/541#issuecomment-270923437.
ssl_certificate_verification => true index => "logs-%{+xxxx.ww}"
user => "{{ .Credentials.Username }}"
password => "{{ .Credentials.Password }}"
ssl => true
ssl_certificate_verification => true
}
} else {
opensearch {
hosts => "{{ .Host }}"
# YYYY doesn't handle rolling over the year, so we use xxxx instead.
# See https://github.com/logstash-plugins/logstash-output-elasticsearch/issues/541#issuecomment-270923437.
index => "metrics-%{+xxxx.ww}"
user => "{{ .Credentials.Username }}"
password => "{{ .Credentials.Password }}"
ssl => true
ssl_certificate_verification => true
}
} }
} }

View File

@ -28,7 +28,6 @@ func newTemplateCmd() *cobra.Command {
must(templateCmd.MarkFlagRequired("username")) must(templateCmd.MarkFlagRequired("username"))
templateCmd.Flags().String("password", "", "OpenSearch password (required)") templateCmd.Flags().String("password", "", "OpenSearch password (required)")
must(templateCmd.MarkFlagRequired("password")) must(templateCmd.MarkFlagRequired("password"))
templateCmd.Flags().String("index-prefix", "systemd-logs", "Prefix for logging index (e.g. systemd-logs)")
templateCmd.Flags().Int("port", 5045, "Logstash port") templateCmd.Flags().Int("port", 5045, "Logstash port")
templateCmd.Flags().StringToString("fields", nil, "Additional fields for the Logstash pipeline") templateCmd.Flags().StringToString("fields", nil, "Additional fields for the Logstash pipeline")
@ -49,7 +48,6 @@ func runTemplate(cmd *cobra.Command, _ []string) error {
flags.extraFields, flags.extraFields,
flags.username, flags.username,
flags.password, flags.password,
flags.indexPrefix,
flags.port, flags.port,
) )
if err := logstashPreparer.Prepare(flags.dir); err != nil { if err := logstashPreparer.Prepare(flags.dir); err != nil {
@ -89,11 +87,6 @@ func parseTemplateFlags(cmd *cobra.Command) (templateFlags, error) {
return templateFlags{}, fmt.Errorf("parse password string: %w", err) return templateFlags{}, fmt.Errorf("parse password string: %w", err)
} }
indexPrefix, err := cmd.Flags().GetString("index-prefix")
if err != nil {
return templateFlags{}, fmt.Errorf("parse index-prefix string: %w", err)
}
extraFields, err := cmd.Flags().GetStringToString("fields") extraFields, err := cmd.Flags().GetStringToString("fields")
if err != nil { if err != nil {
return templateFlags{}, fmt.Errorf("parse fields map: %w", err) return templateFlags{}, fmt.Errorf("parse fields map: %w", err)
@ -108,7 +101,6 @@ func parseTemplateFlags(cmd *cobra.Command) (templateFlags, error) {
dir: dir, dir: dir,
username: username, username: username,
password: password, password: password,
indexPrefix: indexPrefix,
extraFields: extraFields, extraFields: extraFields,
port: port, port: port,
}, nil }, nil
@ -118,7 +110,6 @@ type templateFlags struct {
dir string dir string
username string username string
password string password string
indexPrefix string
extraFields fields.Fields extraFields fields.Fields
port int port int
} }

View File

@ -29,33 +29,30 @@ const (
// LogstashPreparer prepares the Logstash Helm chart. // LogstashPreparer prepares the Logstash Helm chart.
type LogstashPreparer struct { type LogstashPreparer struct {
fh file.Handler fh file.Handler
fields map[string]string fields map[string]string
indexPrefix string username string
username string password string
password string port int
port int
templatePreparer templatePreparer
} }
// NewLogstashPreparer returns a new LogstashPreparer. // NewLogstashPreparer returns a new LogstashPreparer.
func NewLogstashPreparer(fields map[string]string, username, password, indexPrefix string, port int) *LogstashPreparer { func NewLogstashPreparer(fields map[string]string, username, password string, port int) *LogstashPreparer {
return &LogstashPreparer{ return &LogstashPreparer{
username: username, username: username,
password: password, password: password,
indexPrefix: indexPrefix, fields: fields,
fields: fields, fh: file.NewHandler(afero.NewOsFs()),
fh: file.NewHandler(afero.NewOsFs()), port: port,
port: port,
} }
} }
// Prepare prepares the Logstash Helm chart by templating the required files and placing them in the specified directory. // Prepare prepares the Logstash Helm chart by templating the required files and placing them in the specified directory.
func (p *LogstashPreparer) Prepare(dir string) error { func (p *LogstashPreparer) Prepare(dir string) error {
templatedPipelineConf, err := p.template(logstashAssets, "templates/pipeline.conf", pipelineConfTemplate{ templatedPipelineConf, err := p.template(logstashAssets, "templates/pipeline.conf", pipelineConfTemplate{
InfoMap: p.fields, InfoMap: p.fields,
Host: openSearchHost, Host: openSearchHost,
IndexPrefix: p.indexPrefix,
Credentials: Credentials{ Credentials: Credentials{
Username: p.username, Username: p.username,
Password: p.password, Password: p.password,
@ -134,7 +131,6 @@ type LogstashHelmValues struct {
type pipelineConfTemplate struct { type pipelineConfTemplate struct {
InfoMap map[string]string InfoMap map[string]string
Host string Host string
IndexPrefix string
Credentials Credentials Credentials Credentials
Port int Port int
} }