logstash parse mysql log

#mysql-slow.logmysql

# Time: 160623 11:59:21
# User@Host: root[root] @ localhost []  Id:    16
# Schema:   Last_errno: 0  Killed: 0
# Query_time: 7.685155  Lock_time: 0.000082  Rows_sent: 0  Rows_examined: 1000000  Rows_affected: 1000000
# Bytes_sent: 14
SET timestamp=1466654361;
delete from  sbtest.sbtest;
# Time: 160623 12:04:27
# User@Host: root[root] @ localhost []  Id:    22
# Schema:   Last_errno: 0  Killed: 0
# Query_time: 2.471750  Lock_time: 0.000136  Rows_sent: 1  Rows_examined: 9590000  Rows_affected: 0
# Bytes_sent: 69
SET timestamp=1466654667;
select count(*) from sbtest.sbtest;
# Time: 160623 12:08:03
# User@Host: root[root] @ localhost []  Id:    23
# Schema:   Last_errno: 0  Killed: 0

#mysql-error.logsql

InnoDB: 127 rollback segment(s) active.
150830 20:27:48  InnoDB: Waiting for the background threads to start
150831  9:27:49 Percona XtraDB (http://www.percona.com) 1.1.8-rel24.0 started; log sequence number 1836571746540
150831  9:27:49 [ERROR] Missing system table mysql.proxies_priv; please run mysql_upgrade to create it

#dmesgshell

PCI: pci_cache_line_size set to 64 bytes
NetLabel: Initializing
NetLabel:  domain hash size = 128
NetLabel:  protocols = UNLABELED CIPSOv4
NetLabel:  unlabeled traffic allowed by default

#logstash version: 2.3.2ruby

input {
    beats {
        port => 5140
        host => "127.0.0.1"
    }
}

filter {
    if [type] == "mysql-slow" {
        multiline {
            pattern => "^# User@Host:"
            negate => true
            what => "previous"
        }
        mutate {
            rename => { "host" => "hostname" }
        }
        grok {
            match => { "message" => "(?m)^#\s+User@Host:\s+%{USER:user}\[[^\]]+\]\s+@\s+(?:(?<userhost>\S*) )?\[(?:%{IP:clientip})?\]\s+Id:\s+%{NUMBER:id:int}\n#\s+Schema:\s+(?:%{WORD:schema}?)\s+Last_errno:\s+%{NUMBER:last_errno}\s+Killed:\s+%{NUMBER:killed}\n#\s+Query_time:\s+%{NUMBER:query_time:float}\s+Lock_time:\s+%{NUMBER:lock_time:float}\s+Rows_sent:\s+%{NUMBER:rows_sent:int}\s+Rows_examined:\s+%{NUMBER:rows_examined:int}\s+Rows_affected:\s+%{NUMBER:rows_affected:int}\n# Bytes_sent:\s+%{NUMBER:bytes_sent:int}\nSET\s+timestamp=%{NUMBER:timestamp};\n(?<query>(?<action>\w+)\s+.*)\n#\s+Time:.*$" }
            remove_field => [ "message", "beat", "input_type", "offset", "time", "source" ]
        }
        if "_grokparsefailure" in [tags] {
            drop {}
        }
        date {
          match => [ "timestamp", "UNIX", "YYYY-MM-dd HH:mm:ss"]
          remove_field => [ "timestamp" ]
        }
    }
    if [type] == "dmesg" {
        grok {
            match => { "message" => "(?<dmesg>.*)" }
            remove_field => [ "message", "beat", "input_type", "offset", "source" ]
        }
    }
    if [type] == "mysql-error" {
        grok {
            match => { "message" => "(?:(?<time>\d{6}\s+\d+:\d+:\d+)\s+(?:\[(?<loglevel>\w+)\]\s+|)|)(?<errmsg>.*)" }
            remove_field => [ "message", "beat", "input_type", "offset", "source" ]
        }
        date {
            match => [ "time", "yyMMdd HH:mm:ss", "yyMMdd  H:mm:ss" ]
        }
    }
}

output {
    stdout {
        codec => rubydebug
    }
#    elasticsearch {
#        bind_host => "127.0.0.1"
#        port => "9200"
#        protocol => http
#    }
#    if [type] == "mysql-error" and "[error]" in [loglevel] {
#        email {
#            from => "logstash_alert@company.local"
#            subject => "mysql error"
#            to => "yangg@163.com"
#            via => "smtp"
#            body => "%{errmsg}"
#        }
#    }
}
相關文章
相關標籤/搜索