Nagios logstash yml conf file
input {
file {
# Wildcards work, here :)
path => [ "/var/log/messages" ]
start_position => "beginning"
type => "nagios-alert"
}
}
#input {
#tcp {
#host => "10.10.10.1"
#port => 3333
#type => "nagios-alert"
#}
#}
filter {
if [type] == "nagios-alert" {
if [message] =~ /nagios3 nagios:/ {
if [message] =~ /SERVICE DOWNTIME ALERT/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service_command};%{DATA:nagios_service_stat};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /SERVICE ALERT/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_serivce_up_down};%{DATA:nagios_state};%{DATA:nagios_serivce_stat_up_down};%{DATA:nagios_serivce_retry_up_down};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /HOST DOWNTIME ALERT/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_host_stat};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /HOST ALERT/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_host_up_down};%{DATA:nagios_host_stat_up_down};%{DATA:nagios_host_retry_up_down};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /CURRENT SERVICE STATE/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_service_command};%{DATA:nagios_serivce_up_down};%{DATA:nagios_serivce_stat_up_down};%{DATA:nagios_serivce_retry_up_down};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /CURRENT HOST STATE/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_hostname};%{DATA:nagios_host_up_down};%{DATA:nagios_host_stat_up_down};%{DATA:nagios_host_retry_up_down};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /HOST NOTIFICATION/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /SERVICE NOTIFICATION/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_notifyname};%{DATA:nagios_hostname};%{DATA:nagios_service_command};%{DATA:nagios_state};%{DATA:nagios_contact};%{GREEDYDATA:nagios_message}" }
}
}
if [message] =~ /EXTERNAL COMMAND/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{DATA:nagios_message};%{GREEDYDATA:nagios_hostname}" }
}
}
if [message] =~ /Warning/ {
grok {
match => { "message" => "%{MONTH:MONTH}\s+%{MONTHDAY:MLogstash nagios conf(YML) fileONTHDAY} %{TIME:TIME} %{HOSTNAME:HOSTNAME} %{HOSTNAME:SHORTHOSTNAME}: %{DATA:nagios_type}: %{GREEDYDATA:nagios_message}" }
}
}
if ("" in [MONTH]) {
mutate {
# Replace field
gsub => [ "TIME", ",", "." ]
add_field => { "mytimestamp" => "%{MONTH} %{MONTHDAY} 2016 %{TIME}" }
}
date {
match => [ "mytimestamp", "MMM dd YYYY HH:mm:ss", "MMM d YYYY HH:mm:ss" ]
#timezone => "UTC"
target => "@timestamp"
}
mutate {
remove_field => [ "mytimestamp", "%{MONTH} %{MONTHDAY} %{YEAR} %{TIME}" ]
}
}
}
metrics {
meter => "events"
add_tag => "metric"
flush_interval => 60
}
}
}
output {
## Debug
#stdout { codec => rubydebug }
if [type] == "nagios-alert" {
elasticsearch {
hosts => [ "10.10.3.11:9204", "10.10.3.12:9204", "10.10.3.11:9205", "10.10.3.12:9205" ]
#hosts => [ "10.10.3.11:9200" ]
timeout => 30
index => "nagios-syslog-%{+YYYY.MM.dd}"
##flush_size => 2000
#flush_size => 5
}
} else if "metric" in [tags] {
file {
codec => line {
format => "rate: %{[events][rate_1m]}"
#format => "rate: %{[events][rate_5m]}"
}
path => "/var/tmp/logstash-%{+YYYY-MM-dd}.log"
}
} else {
elasticsearch{
hosts => [ "10.10.3.11:9204", "10.10.3.12:9204", "10.10.3.11:9205", "10.10.3.12:9205" ]
#hosts => [ "10.10.3.36:9200" ]
timeout => 30
#flush_size => 2000
}
}
# For testing only
#if [type] == "nagios-alert" {
#null{}
#}
}
Like this:
Like Loading...
Related
There is any way we can put this on Splunk ITSI
Hi, and welcome to my blog. Just making sure I understand your question. would you like to know if logstash + grok can output data to Splunk ITSI? I have no personal experience with Splunk ITSI, however from a logstash/grok prospective, you can rely doing what every you like (or need is). The way logstash works, it uses input and output routes, then in-between routes it re-process/changes/replaces/etc the data they way you instructed to do so. Now, to your question, the output channel can be in JSON format (as the example shows for elasticsearch), I am not sure what format… Read more »