# The logstash configuration below takes Titanium Cloud syslog input and outputs the custom Titanium Cloud log data to elasticsearch. # Extending the openstack log format, Titanium Cloud syslog messages required the use of grok to parse log data into something structured and queryable. # - Inconsistent formating of log level, pid and program # - custom Titanium Cloud syslog fields and naming input { # Do not use syslog input plugin (or type) #TCP_PARAMS #UDP_PARAMS } filter { # "Grok is currently the best way in logstash to parse log data into something structured and queryable." # https://www.elastic.co/guide/en/logstash/current/plugins-filters-grok.html # The input plugins above convert the grok { match => { "message" => [ # The default break_on_match is used so first successful match by grok will result in the filter being finished. # Use Titanium Cloud term node instead of host. "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}\: %{TIMESTAMP_ISO8601:syslog_trash} %{POSINT:pid} %{LOGLEVEL:level} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program} %{TIMESTAMP_ISO8601:syslog_trash} %{POSINT:pid} %{LOGLEVEL:level} %{DATA:program} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}(?:\[%{POSINT:pid}\])?: %{LOGLEVEL:level}?: %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program} \[%{POSINT:pid}\] %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}: %{LOGLEVEL:level} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}: %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}\[%{POSINT:pid}\]?: %{LOGLEVEL:level} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}\[%{POSINT:pid}\]?: %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program}(?:\[%{POSINT:pid}\])?: %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{NOTSPACE:system_name} %{NOTSPACE:filename} %{SYSLOGHOST:node} %{DATA:program} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{TIMESTAMP_ISO8601:syslog_timestamp} %{POSINT:pid} %{LOGLEVEL:level} %{DATA:program} %{GREEDYDATA:message}", "<%{POSINT:syslog_pri}>%{DATESTAMP:syslog_timestamp} %{POSINT:pid} %{LOGLEVEL:level} %{DATA:program} %{GREEDYDATA:message}" ] } overwrite => [ "message" ] remove_field => [ "syslog_trash" ] add_field => { "host_timestamp" => "%{syslog_timestamp}" } } # https://www.elastic.co/guide/en/logstash/current/plugins-filters-syslog_pri.html syslog_pri { facility_labels => ["kernel", "user-level", "mail", "daemon", "security/authorization", "syslogd", "line printer", "network news", "UUCP", "clock", "security/authorization", "FTP", "NTP", "log audit", "log alert", "clock", "postgres", "platform", "openstack", "sm", "local4", "mtce", "sysinv", "horizon"] severity_labels => [ "emergency", "alert", "crit", "error", "warn", "notice", "info", "debug" ] # syslog_pri has served its purpose, and syslog_facility_code isn't useful remove_field => [ "syslog_pri", "syslog_facility_code", "syslog_severity_code", "severity_label", "syslog_severity" ] } # https://www.elastic.co/guide/en/logstash/current/plugins-filters-date.html date { # set timestamp from the grok'd syslog_timestamp and remove the field match => [ "syslog_timestamp", "MMM d HH:mm:ss.SSS", "MMM dd HH:mm:ss.SSS" , "yyyy-MM-dd HH:mm:ss.SSS"] remove_field => [ "syslog_timestamp" ] timezone => [ "UTC" ] } # Rename and remove unwanted syslog fields # https://www.elastic.co/guide/en/logstash/current/plugins-filters-mutate.html mutate { rename => [ "host", "system_address" ] # https://www.elastic.co/guide/en/logstash/current/plugins-filters-mutate.html#plugins-filters-mutate-remove_field remove_field => [ "type", "syslog_facility" ] } } output { if "_grokparsefailure" in [tags] { file { path => "/var/log/logstash/wrs-grokparsefailure-%{+YYYY-MM-dd}" } } else { elasticsearch { hosts => ["127.0.0.1:9200"] } } }