**通俗來說,ELK由Elasticsearch、Logstash和Kibana三部分組件組成;html
Elasticsearch是個開源分佈式搜索引擎,它的特色有:分佈式,零配置,自動發現,索引自動分片,索引副本機制,restful風格接口,多數據源,自動搜索負載等。java
Logstash是一個徹底開源的工具,它能夠對你的日誌進行收集、分析,並將其存儲供之後使用node
kibana 是一個開源和免費的工具,它能夠爲 Logstash 和 ElasticSearch 提供的日誌分析友好的 Web 界面,能夠幫助您彙總、分析和搜索重要數據日誌。**nginx
Logstash: logstash server端用來蒐集日誌;web
Elasticsearch: 存儲各種日誌;json
Kibana: web化接口用做查尋和可視化日誌;bootstrap
Logstash Forwarder: logstash client端用來經過lumberjack 網絡協議發送日誌到logstash server;vim
在須要收集日誌的全部服務上部署logstash,做爲logstashcentos
agent用於監控並過濾收集日誌,將過濾後的內容發送到Redis,而後logstashruby
indexer將日誌收集在一塊兒交給全文搜索服務ElasticSearch,能夠用ElasticSearch進行自定義搜索經過Kibana 來結合自定義搜索進行頁面展現。
[root@elk ~]# yum -y install java [root@elk ~]# java -version java version "1.7.0_141" OpenJDK Runtime Environment (rhel-2.6.10.1.el6_9-x86_64 u141-b02) OpenJDK 64-Bit Server VM (build 24.141-b02, mixed mode)
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/elasticsearch.repo [elasticsearch-2.x] name=Elasticsearch repository for 2.x packages baseurl=http://packages.elastic.co/elasticsearch/2.x/centos gpgcheck=1 gpgkey=http://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install elasticsearch
[root@elk ~]# yum -y install java [root@elk ~]# java -version java version "1.7.0_141" OpenJDK Runtime Environment (rhel-2.6.10.1.el6_9-x86_64 u141-b02) OpenJDK 64-Bit Server VM (build 24.141-b02, mixed mode)
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/logstash.repo [logstash-2.3] name=Logstash repository for 2.3.x packages baseurl=https://packages.elastic.co/logstash/2.3/centos gpgcheck=1 gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install logstash
[root@elk ~]# rpm --import https://packages.elastic.co/GPG-KEY-elasticsearch
[root@elk ~]# vim /etc/yum.repos.d/kibana.repo [kibana-4.5] name=kibana repository for 4.5.x packages baseurl=http://packages.elastic.co/kibana/4.5/centos gpgcheck=1 gpgkey=https://packages.elastic.co/GPG-KEY-elasticsearch enabled=1
[root@elk ~]# yum -y install kibana
[root@elk ~]# grep '^[a-Z]' /etc/elasticsearch/elasticsearch.yml node.name: elk path.data: /data/es-data path.logs: /var/log/elasticsearch bootstrap.memory_lock: true network.host: 10.0.0.201 http.port: 9200
[root@elk ~]# mkdir -p /data/es-data
[root@elk ~]# chown -R elasticsearch:elasticsearch /data/es-data
[root@elk ~]# /etc/init.d/elasticsearch start [root@elk ~]# netstat -lntup | grep 9200 tcp 0 0 ::ffff:10.0.0.201:9200 :::* LISTEN 46675/java
[root@elk ~]# curl 10.0.0.201:9200 { "name" : "elk", "cluster_name" : "elasticsearch", "cluster_uuid" : "EK9OKreaRguU91XGDny6DA", "version" : { "number" : "2.4.5", "build_hash" : "c849dd13904f53e63e88efc33b2ceeda0b6a1276", "build_timestamp" : "2017-04-24T16:18:17Z", "build_snapshot" : false, "lucene_version" : "5.5.4" }, "tagline" : "You Know, for Search" }
[root@elk ~]# /usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { stdout{ codec =>rubydebug }}' hello world Settings: Default pipeline workers: 1 Pipeline main started { "message" => "hello world", "@version" => "1", "@timestamp" => "2017-07-03T08:21:16.035Z", "host" => "elk" }
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { file { path =>"/tmp/log-%{+YYYY.MM.dd}messages.log"}}' Settings: Default pipeline workers: 1 Pipeline main started hello world
[root@elk ~]# tailf /tmp/log-2017.07.03messages.log {"message":"hell\\xE5o world","@version":"1","@timestamp":"2017-07-03T08:28:44.672Z","host":"elk"}
[root@elk ~]# /opt/logstash/bin/logstash -e 'input { stdin{} } output { elasticsearch { hosts => ["10.0.0.201"] index => "mytest-%{+YYYY.MM.dd}" }}'
[root@elk ~]# cd /data/es-data/ [root@elk es-data]# ls elasticsearch [root@elk es-data]# tree . └── elasticsearch └── nodes └── 0 ├── indices │ └── mytest-2017.07.03
[root@elk ~]# egrep "^[a-Z]" /opt/kibana/config/kibana.yml server.port: 5601 server.host: "0.0.0.0" elasticsearch.url: "http://10.0.0.201:9200"
[root@elk ~]# /etc/init.d/kibana start kibana started [root@elk ~]# netstat -lntup| grep 5601 tcp 0 0 0.0.0.0:5601 0.0.0.0:* LISTEN 47353/node
[root@elk ~]# cat /etc/logstash/conf.d/system-log.conf input{ file { path => ["/var/log/messages","/var/log/secure"] type => "system-log" start_position => "beginning" } } filter{ } output{ elasticsearch { hosts => ["10.0.0.201:9200"] index => "system-log-%{+YYYY.MM}" } }
[root@elk ~]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/system-log.conf –t Configuration OK
[root@elk ~]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/system-log.conf
◆ 使用編譯安裝Nginx
[root@elk logs]# cat /application/nginx/conf/nginx.conf worker_processes 1; events { worker_connections 1024; } http { include mime.types; default_type application/octet-stream; sendfile on; keepalive_timeout 65; log_format main '$remote_addr - $remote_user [$time_local] "$request" ' '$status $body_bytes_sent "$http_referer" ' '"$http_user_agent" "$http_x_forwarded_for"'; log_format access_log_json '{"@timestamp":"$time_iso8601",' '"host":"$server_addr",' '"clientip":"$remote_addr",' '"size":"$body_bytes_sent",' '"reponsetime":"$request_time",' '"url":"$uri",' '"domain":"$host",' '"http_host":"$host",' '"xff":"$http_x_forwarded_for",' '"referer":"$http_referer",' '"status":"$status"}'; access_log /application/nginx/logs/access_log_json.log access_log_json; server { listen 80; server_name localhost; location / { root html; index index.html index.htm; } } }
[root@elk logs]# cat /application/nginx/html/index.html test
[root@elk logs]# /application/nginx/sbin/nginx [root@elk logs]# curl 10.0.0.201 test
[root@elk logs]# tailf /application/nginx/logs/access_log_json.log {"@timestamp":"2017-07-04T12:25:25+08:00","host":"10.0.0.201","clientip":"10.0.0.201","size":"5","reponsetime":"0.000","url":"/index.html","domain":"10.0.0.201","http_host":"10.0.0.201","xff":"-","referer":"-","status":"200"}
[root@elk logs]# vim /etc/logstash/conf.d/nginx.conf input{ file { path => ["/application/nginx/logs/access_log_json.log"] type => "nginx-access-log" start_position => "beginning" codec => "json" } } filter{ } output{ elasticsearch { hosts => ["10.0.0.201:9200"] index => "nginx-access-log-%{+YYYY.MM}" } }
[root@elk conf.d]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/nginx.conf -t Configuration OK [root@elk conf.d]# /opt/logstash/bin/logstash -f /etc/logstash/conf.d/nginx.conf Settings: Default pipeline workers: 1 Pipeline main started ......
◆ 已驗證