elasticsearch - 7.5.1
web
logstash - 7.5.1
spring
kibana - 7.5.1
sql
logback.propertiesapache
# 應用名稱 appName=data-center # 日誌級別 logLevel=debug # logstash服務器ip和開發端口 logstash=localhost:5044 logstash.host=127.0.0.1 logstash.port=9600
logback.xmljson
<?xml version="1.0" encoding="UTF-8"?> <configuration scan="true" scanPeriod="1 seconds"> <property scope="context" resource="config/logback.properties"/> <contextName>${appName}</contextName> <!-- 文件輸出 --> <appender name="file" class="ch.qos.logback.core.rolling.RollingFileAppender"> <file>d:/logs/${appName}-all.log</file> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <FileNamePattern> d:/logs/%d{yyyy-MM,aux}/${appName}-all.%d.%i.gz </FileNamePattern> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>1024MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> <maxHistory>30</maxHistory> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n</pattern> </encoder> </appender> <!--錯誤文件輸出--> <appender name="errorFile" class="ch.qos.logback.core.rolling.RollingFileAppender"> <file>${user.home}/logs/${appName}-error.log</file> <filter class="ch.qos.logback.classic.filter.LevelFilter"> <level>ERROR</level> <onMatch>ACCEPT</onMatch> <onMismatch>DENY</onMismatch> </filter> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <FileNamePattern> d:/logs/error/${appName}-error.%d{yyyy-MM-dd}.log </FileNamePattern> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>1024MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> <maxHistory>30</maxHistory> </rollingPolicy> <encoder> <pattern> %d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n </pattern> </encoder> </appender> <!-- SQL文件輸出 --> <appender name="sql" class="ch.qos.logback.core.rolling.RollingFileAppender"> <file>${user.home}/logs/${appName}-sql.log</file> <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"> <FileNamePattern> d:/logs/%d{yyyy-MM,aux}-sql/${appName}-sql.%d.%i.gz </FileNamePattern> <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP"> <maxFileSize>1024MB</maxFileSize> </timeBasedFileNamingAndTriggeringPolicy> <maxHistory>30</maxHistory> </rollingPolicy> <encoder> <pattern>%d{yyyy-MM-dd HH:mm:ss}[%t]%-5p %c{36}.%M\(%L\) %m%n</pattern> </encoder> </appender> <!--日誌輸出到logstash--> <appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender"> <destination>${logstash}</destination> <!-- encoder必須配置,有多種可選 --> <encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder"> <customFields>{"appname":"${appName}"}</customFields> </encoder> <connectionStrategy> <roundRobin> <connectionTTL>5 minutes</connectionTTL> </roundRobin> </connectionStrategy> <!--無用日誌過濾--> <encoder class="net.logstash.logback.encoder.LogstashEncoder"> <throwableConverter class="net.logstash.logback.stacktrace.ShortenedThrowableConverter"> <!--<exclude>sun\.reflect\..*\.invoke.*</exclude>--> <exclude>o\.s\.w\.s\.h\.*</exclude> <exclude>o\.s\.b\.f\.s\.*</exclude> <rootCauseFirst>true</rootCauseFirst> <inlineHash>true</inlineHash> </throwableConverter> </encoder> </appender> <!--日誌的異步輸出配置--> <appender name="ASYNCConsole" class="ch.qos.logback.classic.AsyncAppender"> <includeCallerData>true</includeCallerData> <appender-ref ref="console"/> </appender> <appender name="ASYNCFile" class="ch.qos.logback.classic.AsyncAppender"> <includeCallerData>true</includeCallerData> <appender-ref ref="file"/> </appender> <appender name="ASYNCErrorFile" class="ch.qos.logback.classic.AsyncAppender"> <includeCallerData>true</includeCallerData> <appender-ref ref="errorFile"/> </appender> <appender name="ASYNCSql" class="ch.qos.logback.classic.AsyncAppender"> <includeCallerData>true</includeCallerData> <appender-ref ref="sql"/> </appender> <appender name="ASYNCLogstash" class="ch.qos.logback.classic.AsyncAppender"> <includeCallerData>true</includeCallerData> <appender-ref ref="logstash"/> </appender> <logger name="org.apache" level="${logLevel}"/> <logger name="org.springframework" level="${logLevel}"/> <logger name="org.springframework.web" level="${logLevel}"/> <logger name="dao" level="${logLevel}" additivity="false"> <appender-ref ref="ASYNCSql"/> <appender-ref ref="ASYNCConsole"/> </logger> <root level="${logLevel}"> <appender-ref ref="ASYNCConsole"/> <appender-ref ref="ASYNCFile"/> <appender-ref ref="ASYNCErrorFile"/> <appender-ref ref="ASYNCLogstash"/> </root> </configuration>
依賴ruby
<dependency> <groupId>net.logstash.logback</groupId> <artifactId>logstash-logback-encoder</artifactId> <version>5.3</version> </dependency> <dependency> <groupId>net.logstash.log4j</groupId> <artifactId>jsonevent-layout</artifactId> <version>1.6</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId> <exclusions> <exclusion> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> </exclusion> </exclusions> <version>1.1.8</version> </dependency> <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-core</artifactId> <version>1.1.8</version> </dependency>
添加logstash配置文件服務器
input { tcp { host =>"127.0.0.1" port => 5044 } } output { stdout { codec => rubydebug } elasticsearch { action => "index" hosts => ["127.0.0.1:9200"] index => "data-center" } }