(No Ratings Yet)
Loading...
Click here for the Full Install ELK and Configure
Create elastic user and group
groupadd elastic useradd -d /export/home/elastic -g elastic -m -s /bin/bash -c "Elastic Search" elastic
Create elastic user home directory
mkdir /export/home/elastic chown elastic:elastic /export/home/elastic
Download logstash gz file and create directory
cd /opt tar xf /var/tmp/logstash-2.2.2.tar.gz mv logstash-2.2.2 logstash
Modify two places and add solaris
if RbConfig::CONFIG['host_os'].downcase =~ /darwin|openbsd|freebsd|netbsd|linux|solaris/ require 'java' result = begin if RbConfig::CONFIG['host_os'].downcase =~ /darwin|openbsd|freebsd|netbsd|solaris/ require File.join(File.dirname(__FILE__), 'bsd_console'
create logstash other directory’s
mkdir /opt/logstash/current mkdir /opt/logstash/current/logs /opt/logstash/current/data /opt/logstash/current/tmp
Create logstash start-up script
cat /opt/logstash/bin/start_logstash.sh
cd /opt/logstash/current/logs ulimit -n 65000 name="logstash" LS_LOG_DIR=/opt/logstash/current/logs #LS_CONF_DIR=/opt/logstash/current/conf LS_LOG_FILE="${LS_LOG_DIR}/$name.log" LS_CONF=/opt/logstash/current/$name.yml # On send side LS_OPTS="-w 4" # On receive side # LS_OPTS="-w 12" program=/opt/logstash/bin/logstash #args="agent -f ${LS_CONF_DIR} -l ${LS_LOG_FILE} ${LS_OPTS}" args="agent -f ${LS_CONF} -l ${LS_LOG_FILE} ${LS_OPTS}" #nohup $program -w $1 -f $2 > "${LS_LOG_DIR}/$name.stdout" 2> "${LS_LOG_DIR}/$name.err" & exec "$program" $args > "${LS_LOG_DIR}/$name.stdout" 2> "${LS_LOG_DIR}/$name.err" & chmod 750 /opt/logstash/bin/start_logstash.sh
Set proper owner
chown -R elastic:elastic /opt/logstash
Create logstash configuration file
cat /opt/logstash/current/logstash.yml
input { file { # Wildcards work, here :) path => [ "/zones/*wapp*/root/devtech101logs/application/sql.log" ] type => "sql-log" } } #input { #file { #path => [ "/var/log/*.log", "/var/log/messages", "/var/log/syslog" ] #type => "syslog-ng" #} #} #input { #tcp { #host => "10.10.3.20" #port => 3333 #type => "sql-log" #} #} filter { if [type] == "sql-log" { if [message] =~ /^s*$/ { drop { } } grok { match => { "message" => "(?m)%{MONTHDAY:MONTHDAY}%{SPACE}%{MONTH:MONTH}%{SPACE}%{YEAR:YEAR}%{SPACE}%{TIME:TIME}%{SPACE}-%{SPACE}%{LOGLEVEL:LOGLEVEL}%{SPACE}-%{SPACE}%{HOSTNAME:HOSTNAME}%{SPACE}::%{SPACE}%{DATA:SESSION_ID}%{SPACE}::%{SPACE}BHSql%{SPACE}::%{SPACE}%{DATA:DURATION}%{SPACE}::%{SPACE}%{GREEDYDATA:SQL_STATEMENT}" } add_field => { "mytimestamp" => "%{MONTHDAY} %{MONTH} %{YEAR} %{TIME}" } } date { match => [ "mytimestamp", "dd MMM YYYY HH:mm:ss.SSS", "dd MMM YYYY HH:mm:ss,SSS" ] #timezone => "UTC" target => "@timestamp" } mutate { remove_field => [ "mytimestamp", "%{MONTHDAY} %{MONTH} %{YEAR} %{TIME}" ] } } metrics { meter => "events" add_tag => "metric" flush_interval => 60 } } output { #debug #stdout { codec => rubydebug } #if "metric" in [tags] { #file { #codec => line { #format => "rate: %{[events][rate_1m]}" ##format => "rate: %{[events][rate_5m]}" #} #path => "/var/tmp/logstash-%{+YYYY-MM-dd}.log" #} #} if [type] == "sql-log" { elasticsearch{ hosts => [ "10.10.3.25:9200", "10.10.3.26:9200", "10.10.3.27:9200" ] timeout => 30 index => "web-%{type}-%{+YYYY.MM.dd}" ##flush_size => 2000 #flush_size => 5 } } else if "metric" in [tags] { file { codec => line { format => "rate: %{[events][rate_1m]}" ##format => "rate: %{[events][rate_5m]}" } path => "/var/tmp/logstash-%{+YYYY-MM-dd}.log" } } else { elasticsearch{ hosts => [ "10.10.3.25:9200", "10.10.3.26:9200", "10.10.3.27:9200" ] timeout => 30 ##flush_size => 2000 } } # For testing only #if [type] == "sql-log" { #null{} #} }
Create a SMF service and import the service
svccfg import /var/tmp/logstash.xml
/var/tmp/logstash.xml example
cat /var/tmp/logstash.xml
Elasticsearch
1
1
vote
Article Rating