input { beats { host => "0.0.0.0" port => {{ .Port }} } } filter { mutate { # Remove some fields that are not needed. remove_field => [ "[agent]", "[journald]", "[syslog]", "[systemd][invocation_id]", "[event][original]", "[log][offset]", "[log][syslog]" ] # Tag with the provided metadata. add_field => { {{ range $key, $value := .InfoMap }} "[metadata][{{ $key }}]" => "{{ $value }}" {{ end }} } } # Parse structured logs for following systemd units. if [systemd][unit] in ["bootstrapper.service", "constellation-bootstrapper.service"] { # skip_on_invalid_json below does not skip the whole filter, so let's use a cheap workaround here. # See: # https://discuss.elastic.co/t/skip-on-invalid-json-skipping-all-filters/215195 # https://discuss.elastic.co/t/looking-for-a-way-to-detect-json/102263 if [message] =~ "\A\{.+\}\z" { json { source => "message" target => "logs" skip_on_invalid_json => true } mutate { replace => { "message" => "%{[logs][msg]}" } remove_field => [ "[logs][msg]", "[logs][ts]" ] } de_dot { fields => ["[logs][peer.address]"] } } } } output { if ([@metadata][beat] == "filebeat") { # Logs, which are output by filebeat, go to the logs-index. opensearch { hosts => "{{ .Host }}" # YYYY doesn't handle rolling over the year, so we use xxxx instead. # See https://github.com/logstash-plugins/logstash-output-elasticsearch/issues/541#issuecomment-270923437. index => "logs-%{+xxxx.ww}" user => "{{ .Credentials.Username }}" password => "{{ .Credentials.Password }}" ssl => true ssl_certificate_verification => true } } else { opensearch { hosts => "{{ .Host }}" # YYYY doesn't handle rolling over the year, so we use xxxx instead. # See https://github.com/logstash-plugins/logstash-output-elasticsearch/issues/541#issuecomment-270923437. index => "metrics-%{+xxxx.ww}" user => "{{ .Credentials.Username }}" password => "{{ .Credentials.Password }}" ssl => true ssl_certificate_verification => true } } }