clients/install-log-server/tools/local-logstash.conf

86 lines
3.5 KiB
Plaintext

input {
file {
path => ""
start_position => beginning
ignore_older => 0
sincedb_path => "/dev/null"
codec => multiline {
pattern => "^%{TIMESTAMP_ISO8601}"
negate => true
what => previous
}
}
}
filter {
# "Grok is currently the best way in logstash to parse log data into something structured and queryable."
# https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html
grok {
match => ["path","%{GREEDYDATA:system_name}(?:/(?<node>(.+?(?=_[0-9]{8}\.[0-9]{6}))))(.+?\b)/%{GREEDYDATA}/%{USERNAME:filename}"]
}
if [filename] =~ "(\s*)nfv\-vim(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE}(\s*)%{NOTSPACE:program}(\s*)%{NOTSPACE:level}(\s*)%{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)libvirtd(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} %{POSINT:pid}: %{LOGLEVEL:level} : %{NOTSPACE:program} %{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)horizon(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} \[(?<level>([a-zA-Z0-9.]*))\](\s*)%{NOTSPACE:program}: %{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)openstack_test(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE}(\s*)%{NOTSPACE:program} %{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)platform(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE}(\s*)%{NOTSPACE:program}(\s*)%{NOTSPACE:level}(\s*)%{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)mtcAgent\_api(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} \[(?<pid>([0-9]*))\](\s*) %{NOTSPACE}(\s*)%{NOTSPACE:program}(\s*)%{GREEDYDATA:message}"]
}
} else if [filename] =~ "(\s*)mtcAgent\_event(.*)" {
grok {
match => [ "message","%{TIMESTAMP_ISO8601:timestamp} \[(?<pid>([0-9]*))\](\s*)%{GREEDYDATA:message}"]
}
} else {
grok {
match => {
"message" => [
# The default break_on_match is used so first successful match by grok will result in the filter being finished.
# Use Titanium Cloud term node instead of host.
"%{TIMESTAMP_ISO8601:timestamp} (\[[a-zA-Z0-9.]*\])(\s*)%{NOTSPACE}(\s*)%{NOTSPACE}(\s*)%{NOTSPACE}(\s*)%{NOTSPACE:program} %{GREEDYDATA:message}",
"%{TIMESTAMP_ISO8601:timestamp} %{POSINT:pid} %{LOGLEVEL:level} %{DATA:program} %{GREEDYDATA:message}",
"%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE} (?<program>(.+?(?=\()))(\(.+?\))(\[)(?<pid>(.*?))(\]\:) %{NOTSPACE:level} %{GREEDYDATA:message}",
"%{TIMESTAMP_ISO8601:timestamp} %{NOTSPACE}(\s*)%{NOTSPACE:program} %{GREEDYDATA:message}"
]
}
}
}
# https://www.elastic.co/guide/en/logstash/current/plugins-filters-date.html
date {
# set <at> timestamp from the grok'd syslog_timestamp and remove the field
match => [ "timestamp", "ISO8601", "yyyy-MM-dd HH:mm:ss.SSS", "yyyy/mm/dd/HH/mm/ss.SSS", "MMM d HH:mm:ss.SSS", "MMM dd HH:mm:ss.SSS" , "yyyy-MM-dd HH:mm:ss.SSS", "MMM dd HH:mm:ss,SSS", "yyyy-MM-dd HH:mm:ss,SSS", "yy-MM-dd HH:mm:ss.SSS", "yyyy-MM-dd HH:mm:ss" ]
target => "@timestamp"
timezone => [ "UTC" ]
remove_field => [ "timestamp" ]
}
# Rename and remove unwanted syslog fields
# https://www.elastic.co/guide/en/logstash/current/plugins-filters-mutate.html
mutate {
rename => [
"host", "system_address"
]
}
if "_grokparsefailure" in [tags] {
drop { }
}
}
output {
elasticsearch { hosts => ["127.0.0.1:9200"] }
stdout {}
}