日誌收集 - ELK

安裝配置以下:java

1.安裝java
    wget http://download.oracle.com/otn-pub/java/jdk/8u45-b14/jdk-8u45-linux-x64.tar.gz
    mkdir /usr/local/java
    tar -zxf jdk-8u45-linux-x64.tar.gz -C /usr/local/java/
    vim /etc/profile
    -----------
    export JAVA_HOME=/usr/local/jdk-knet-8u45
    export PATH=$PATH:$JAVA_HOME/bin
    exportCLASSPATH=.:$JAVA_HOME/lib/tools.jar:$JAVA_HOME/lib/dt.jar:$CLASSPATH
    -----------
    source /etc/profile //從新加載node

    執行:java -version
    提示:
    java version "1.8.0_45"linux

   Java(TM) SE Runtime Environment (build 1.8.0_45-b14)nginx

   Java HotSpot(TM) 64-Bit Server VM (build 25.45-b02, mixed mode)apache

2.logstash-agent配置(應用服務器節點)
    這裏用到了kafka插件,能夠直接下載包含全部插件的包進行安裝。
    wget https://download.elastic.co/logstash/logstash/logstash-all-plugins-2.3.1.tar.gz
    tar -zxvf logstash-2.3.1.tar.gz -C /usr/local/
    cd /usr/local/
    ln -s logstash-2.3.1/ logstash
    cd /usr/local/logstashjson

    logstash.conf參見一下配置
    上傳修改好的logstash配置文件bootstrap

    啓動logstash-agent:
    ./bin/logstash agent -f conf/logstash.conf -l logs/logstash.log &vim

3.安裝kafka
    公用kafka服務tomcat

4.安裝logstash-index、elasticsearch、kibana
    wget https://download.elastic.co/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/2.3.2/elasticsearch-2.3.2.tar.gz
    wget https://download.elastic.co/logstash/logstash/logstash-all-plugins-2.3.1.tar.gz
    wget https://download.elastic.co/kibana/kibana/kibana-4.5.0-linux-x64.tar.gz服務器

    tar -zxvf elasticsearch-2.3.2.tar.gz -C /usr/local/
    tar -zxvf logstash-all-plugins-2.3.1.tar.gz -C /usr/local/
    tar -zxvf kibana-4.5.0-linux-x64.tar.gz -C /usr/local/
    cd /usr/local/
    ln -s logstash-2.3.1/ logstash
    ln -s elasticsearch-2.3.2 elasticsearch
    ln -s kibana-4.5.0-linux-x64 kibana

5.配置elasticsearch
    cd elasticsearch 
    vim config/elasticsearch.yml
    集羣名稱:cluster.name: elastic-logstash
    定義節點名稱:node.name: "node-192.168.1.202"

    修改ES內存

    vim bin/elasticsearch.in.sh

    ----------

    if [ "x$ES_MIN_MEM" = "x" ]; then
    ES_MIN_MEM=3g
    fi
    if [ "x$ES_MAX_MEM" = "x" ]; then
    ES_MAX_MEM=3g
    fi

    ----------

    安裝 elasticsearch-kopf、elasticsearch-head、marvel插件,方便監控
    cd /usr/local/elasticsearch/bin/
    ./plugin -install lmenezes/elasticsearch-kopf 
    ./plugin -install mobz/elasticsearch-head

    ./plugin install license

    ./plugin install marvel-agent

    cd /usr/local/kibana/

    ./bin/kibana plugin --install elasticsearch/marvel/latest

    啓動elasticsearch:
    cd /usr/local/elasticsearch
    .bin/elasticsearch -d 

    訪問一下地址進行驗證:
    http://xxx.xxx.xxx.xxx:9200/_plugin/kopf/
    http://xxx.xxx.xxx.xxx:9200/_plugin/head/

6.配置logstash
    cd /usr/local/logstash
    上傳已修改好的logstash配置文件
    啓動logstash-agent:
    ./bin/logstash agent -f conf/logstash.conf -l logs/logstash.log &

7.配置kibana
    vim /usr/local/kibana/config/kibana.yml 
   修改es接口地址:
   elasticsearch_url: http://xxx.xxx.xxx.xxx:9200
   cd /usr/local/kibana/bin/
   啓動kibana:
   nohup ./kibana &

   查看kibana數據
   http://xxx.xxx.xxx.xxx:5601

   查看elasticsearch集羣的狀況

   http://xxx.xxx.xxx.xxx:5601/app/marvel

8.安裝nginx
    1.用nginx代理
        http://xxx.xxx.xxx.xxx:5601
        http://xxx.xxx.xxx.xxx:9200/_plugin
    2.爲kibana、_plugin提供用戶認證服務
    3.屏蔽掉5601 9200端口

9.logstash-agent的logstash.conf (應用服務器節點)
    # 不一樣業務配置多個input{},一個logstash.conf可配置多個input{}
    input {
        file {
            codec => multiline {
                pattern => "^%{TIME}|%{SYSLOGTIMESTAMP_EXT}|Hibernate:|%{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY}|%{MONTHDAY}[./-]%{MONTH}[./-]%{YEAR}"

                patterns_dir => ["/usr/local/logstash/local_patterns"]
                negate => true
                what => "previous"
            }
        discover_interval => 10
        # 配置業務類型,建議加前綴方便查詢區分,好比:tomcat_xxx
        type => "tomcat_app"
        # 配置日誌文件的地址,須要修改成實際目錄
        path => ["/home/eqs/server/tomcat/app/logs/catalina.out","/home/eqs/server/tomcat/app/logs/*.log"]
        # 配置讀取位置存放的文件目錄,不一樣業務建議分別存儲
        sincedb_path => "/home/eqs/logstash-1.5.4/sincedb-tomcat_app"
        sincedb_write_interval => 1
        start_position => "beginning"
        stat_interval => 1
    }
}
output {
    kafka {
        # 配置kafka集羣地址和端口,地址端口須要響應的修改
        bootstrap_servers => "192.168.249.64:9092,192.168.249.65:9092,192.168.249.66:9092"
        # logstash-1.5.4版本須要用broker_list參數
        #broker_list => "192.168.249.64:9092,192.168.249.65:9092,192.168.249.66:9092"
        # 指定日誌存放的topic_id
        topic_id => "tomcat-log"
    }
}

10.logstash-index的logstash.conf(中心節點)
    input {
        kafka {
        # 配置kafka集羣的zookeeper集羣的IP和端口,需修改成實際的IP和端口
        zk_connect => "192.168.1.4:2181,192.168.1.5:2181,192.168.1.6:2181"
        group_id => "logstash"
        # 配置同logstash-agent相同的topic_id
        topic_id => "tomcat-log"
        codec => json
        reset_beginning => false
        consumer_threads => 5
        decorate_events => true
        }

    }

    filter {
        grok {

            # 複製下面11local_patterns的內容,上傳到服務器/usr/local/logstash/目錄
            patterns_dir => ["/usr/local/logstash/local_patterns"]
            match => [
                #2016-04-26 18:44:13.885 [localhost-startStop-1] INFO c.k.modules.base.log.LogFileWriter - loginError LogFileWriter init......
                "message", "%{DATESTAMP_EXT:time} \[%{NOTSPACE:thread}\] %{LOGLEVEL:level} %{PROG:class} - %{LOGMSG:log_msg}",
                #2016-04-28 09:28:35.744 [catalina-exec-55] ERROR o.h.e.jdbc.spi.SqlExceptionHelper - java.util.concurrent.CancellationException: Cancelled
                "message", "%{DATESTAMP_EXT:time} \[%{NOTSPACE:thread}\] %{LOGLEVEL:level} %{PROG:class} - %{LOGMSG:log_msg}",
                #2016-04-26 18:44:12.695 INFO net.spy.memcached.auth.AuthThread: Authenticated to /202.173.10.95:11210
                "message", "%{DATESTAMP_EXT:time} %{LOGLEVEL:level} %{PROG:class}: %{LOGMSG:log_msg}",
                #Apr 27, 2016 1:35:43 PM org.apache.catalina.core.StandardWrapperValve invoke
                "message", "%{SYSLOGTIMESTAMP_EXT:time} %{LOGMSG:msg}",
                #18:44:09,389 |-INFO in ch.qos.logback.classic.joran.action.ConfigurationAction - debug attribute not set
                "message", "%{TIME:time} \|-%{LOGLEVEL:level} in %{PROG_EXT:class} - %{LOGMSG:log_msg}"
            ]
        }
     # if [level] =~ "ERROR" {
         grok {
            patterns_dir => ["/usr/local/logstash/local_patterns"]
            match => ["log_msg", "%{EXCEPTIONTYPE:exception_type}"]
        }
        mutate {
             remove_field => [ "log_msg"]
        }

    # }
    }

    output {
        elasticsearch {
            # 須要修改成實際的elasticsearch的IP和端口
            hosts => ["192.168.1.8:9200"]
            codec => "json"
       }
}

11.local_patterns文件(應用服務器節點和中心節點都有配置local_patterns文件)

    DATESTAMP_EXT %{YEAR}[./-]%{MONTHNUM}[./-]%{MONTHDAY} %{TIME}     LOGMSG [\s\S]*     EXCEPTIONTYPE [a-zA-Z.]*Exception     AMPM [AP]M     SYSLOGTIMESTAMP_EXT %{MONTH} %{MONTHDAY}, %{YEAR} %{TIME} %{AMPM}     PROG_EXT (?:[\w._/%-\[\]]+)

相關文章
相關標籤/搜索