ELK單機環境測試

主機環境爲:html

centos 6.8 X64java

各軟件及版本:linux

kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gznginx

filebeat-1.2.3-x86_64.rpm
jdk-8u121-linux-x64.rpmgit

下載地址:redis

http://pan.baidu.com/s/1pLGzoYRvim

 

須要的主機及安裝到的軟件:centos

192.168.40.83   iptables2
kibana-4.5.3-linux-x64.tar.gz
elasticsearch-2.3.4.rpm
logstash-2.3.2.tar.gz
jdk-8u121-linux-x64.rpmtomcat

192.168.40.103  test2
filebeat-1.2.3-x86_64.rpm
192.168.40.101  test1
filebeat-1.2.3-x86_64.rpm
ruby

elk日誌收集架構

image

這個是官網的

https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-overview.html

filebeat收集到的日誌能夠發送到Elasticsearch/logstash/Kafka/redis中,本實驗中收集的日誌發送到logstash,logstash經過fileter段處理後發送到Elasticsearch,用戶經過訪問kibana進行數據展現,這裏不進行filebeat、logstash、Elasticsearch、kibana原理講解

image

iptables2
1.準備好jdk
yum localinstall -y jdk-8u121-linux-x64.rpm
[root@iptables2 ~]# cat /etc/profile.d/java.sh
export JAVA_HOME=/usr/java/latest
export PATH=/usr/java/latest/bin:${PATH}

2.安裝Elasticsearch
[root@iptables2 ~]# yum localinstall -y elasticsearch-2.3.4.rpm
安裝插件
使用ES自帶的命令plugin 
# head
/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
# kopf
/usr/share/elasticsearch/bin/plugin install lmenezes/elasticsearch-kopf
# bigdesk
/usr/share/elasticsearch/bin/plugin install hlstudio/bigdesk

那如何訪問安裝好的插件呢?
http://ES_server_ip:port/_plugin/plugin_name
Example:
http://192.168.40.83:9200/_plugin/head/
http://192.168.40.83:9200/_plugin/kopf/
編輯配置文件
[root@iptables2 ~]# vim /etc/elasticsearch/elasticsearch.yml
# ---------------------------------- Network -----------------------------------
#
# Set the bind address to a specific IP (IPv4 or IPv6):
#
network.host: 0.0.0.0
#
# Set a custom port for HTTP:
#
http.port: 9200

啓動Elasticsearch
[root@iptables2 ~]# service elasticsearch start

安裝logstash
tar xf logstash-2.3.2.tar.gz
wget http://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz
mkdir logstash-2.3.2/conf
unzip GeoLiteCity.dat.gz
cat logstash-2.3.2/conf/ver11.conf
input {
    beats {
        port => 5044
        type => "syslog"
    }
}

filter {
    if [type] == "filebeat" {
        grok {
            match => [ "message", "%{SYSLOGLINE}" ]
            overwrite => [ "message" ]
        }
    }
    date {
        match => [ "timestamp", "MMM dd HH:mm:ss", "MMM  d HH:mm:ss" ]
    }
    if [type] == "nginxacclog" {
        grok {
            match => {
                "message" => "%{IP:client} - (?:%{USERNAME:remote_user}|-) \[%{HTTPDATE:timestamp}\] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" \"%{NUMBER:request_time:float}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\" (?:%{IP:x_forword_for}|-)"
            }
        }
        date {
            match => [ "timestamp","dd/MMM/YYYY:HH:mm:ss Z" ]
        }
        urldecode {
            all_fields => true
        }
    }
    if [type] == "test1log" {
        grok {
            patterns_dir => "/root/logstash-2.3.2/patterns/"
            match => {
                "message" => "%{IP:client} - - \[%{ELKTIMES:log_timestamp} \] \"%{WORD:method} %{URIPATHPARAM:request} HTTP/%{NUMBER:http_version}\" %{INT:status} %{NUMBER:bytes} \"(?:%{URI:referer}|-)\" \"(?:%{GREEDYDATA:user_agent}|-)\""
            }
        }
        date {
            match => [ "log_timestamp","dd/MMM/YYYY:HH:mm:ss" ]
        }
        geoip {
            source => "client"
            target => "geoip"
            database => "/root/GeoLiteCity.dat"
            add_field => ["[geoip][coordinates]","%{[geoip][longitude]}"]
            add_field => ["[geoip][coordinates]","%{[geoip][latitude]}"]
        }
        mutate {
            convert => ["[geoip][coordinates]","float", "bytes","integer", "bytes.raw","integer"]
        }
        urldecode {
            all_fields => true
        }
    }
    if [type] == "loginmsg" {
        grok {
            match => {"message" => "%{SYSLOGPAMSESSION}"}
            match => {"message" => "%{SECURELOG}"}
            match => {"message" => "%{SYSLOGBASE2}"}
        }
        geoip {
            source => "IP"
            fields => ["city_name"]
            database => "/root/GeoLiteCity.dat"
        }
        if ([status] == "Accepted") {
            mutate {
                add_tag => ["Success"]
            }
        }
        else if ([status] == "Failed") {
            mutate {
                add_tag => ["Failed"]
            }
        }
    }
}

output {
    stdout {
        codec => rubydebug
    }
    elasticsearch {
        hosts => "192.168.40.83:9200"
    }
}
mkdir logstash-2.3.2/patterns
cat logstash-2.3.2/patterns/linux-syslog
SECURELOG %{WORD:program}\[%{DATA:pid}\]: %{WORD:status} password for ?(invaliduser)? %{WORD:USER} from %{DATA:IP} port
ELKTIMES %{MONTHDAY}/%{MONTH}/%{YEAR}:%{TIME}
啓動logstash
./logstash-2.3.2/bin/logstash -f logstash-2.3.2/conf/ver11.conf

安裝kibana
tar xf /usr/local/src/kibana-4.5.3-linux-x64.tar.gz -C /usr/local
cd /usr/local/
ln -s kibana-4.5.3-linux-x64 kibana
cp kibana/config/kibana.yml kibaba/config/kibana.yml.bak_$(date +%F_%H:%M)
配置kibana.yml
server.port: 5601
server.host: "0.0.0.0"
其它內容不動
啓動kibana
./kibana/bin/kibana
訪問
http://192.168.40.83:5601/便可

test1
安裝filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
  prospectors:
    -
      paths:
        - /var/log/messages
      input_type: log
      document_type: messages

    -
      paths:
        - /var/log/secure
      input_type: syslog
      document_type: loginmsg

    -
      paths:
        - /var/log/nginx_access.log 
      input_type: log
      document_type: nginxacclog

    -
      paths:
        - /usr/local/tomcat/logs/catalina.out
      input_type: catalina
      document_type: catalinalog
      multiline:
          pattern: '^[[:space:]]'
          negate: true
          match: after

  registry_file: /var/lib/filebeat/registry

##################################################### output #######################################################
output:
  logstash:
    hosts: ["192.168.40.83:5044"]

##################################################### Logging #######################################################
logging:
  files:
    rotateeverybytes: 10485760 # = 10MB
啓動filebeat
service filebeat start

test2
安裝filebeat
yum localinstall -y filebeat-1.2.3-x86_64.rpm
cp /etc/filebeat/filebeat.yml{,$(date +%F_%H:%M)}
# cat /etc/filebeat/filebeat.yml
##################################################### filebeat #######################################################
filebeat:
  prospectors:
    -
      paths:
        - /var/log/messages
      input_type: log
      document_type: messages

    -
      paths:
        - /var/log/secure
      input_type: syslog
      document_type: loginmsg

    -
      paths:
        - /var/log/nginx_access.log 
      input_type: log
      document_type: nginxacclog

    -
      paths:
        - /usr/local/tomcat/logs/catalina.out
      input_type: catalina
      document_type: catalinalog
      multiline:
          pattern: '^[[:space:]]'
          negate: true
          match: after

  registry_file: /var/lib/filebeat/registry

##################################################### output #######################################################
output:
  logstash:
    hosts: ["192.168.40.83:5044"]

##################################################### Logging #######################################################
logging:
  files:
    rotateeverybytes: 10485760 # = 10MB
啓動filebeat
service filebeat start

 

查看Elasticsearch是否接收到了數據:

http://192.168.40.83:9200/_search?pretty

image

查看Elasticsearch中全部索引列表

http://192.168.40.83:9200/_aliases

image

訪問kibana

http://192.168.40.83:5601/

image

參考連接:

logstash快速入門
http://www.2cto.com/os/201411/352015.html
使用 curl 命令發送請求來查看 ES 是否接收到了數據:
# curl 'http://localhost:9200/_search?pretty'
開源實時日誌分析ELK平臺部署
http://www.tuicool.com/articles/QFvARfr

ELK系列一:ELK安裝配置及nginx日誌分析
http://www.myhack58.com/Article/sort099/sort0102/2016/81990_3.htm
ELK系列二:kibana操做及nginx日誌分析圖表建立
http://www.myhack58.com/Article/sort099/sort0102/2016/81991.htm

ELK+Filebeat+Kafka+ZooKeeper 構建海量日誌分析平臺
http://tchuairen.blog.51cto.com/3848118/1861167

相關文章
相關標籤/搜索