1、maven工程引入druid包java
1.在pom.xml中增長:mysql
<dependency> <groupId>com.alibaba</groupId> <artifactId>druid</artifactId> <version>1.0.18</version> </dependency>
2.在spring-mybatis.xml中增長druid數據庫配置及日誌配置:git
<bean id="stat-filter" class="com.alibaba.druid.filter.stat.StatFilter"> <property name="slowSqlMillis" value="30000" /> <property name="logSlowSql" value="true" /> <property name="mergeSql" value="true" /> <property name="dbType" value="mysql" /> </bean> <bean id="log-filter" class="com.alibaba.druid.filter.logging.Log4jFilter"> <property name="statementExecutableSqlLogEnable" value="true" /> </bean> <bean id="wall-filter" class="com.alibaba.druid.wall.WallFilter"> <property name="config" ref="wall-config" /> </bean> <bean id="wall-config" class="com.alibaba.druid.wall.WallConfig"> <property name="multiStatementAllow" value="true" /> </bean> <bean name="dataSource" class="com.alibaba.druid.pool.DruidDataSource" init-method="init" destroy-method="close"> <property name="url" value="${jdbc.url}" /> <property name="username" value="${jdbc.username}" /> <property name="password" value="${jdbc.password}" /> <!-- 配置初始化大小、最小、最大 --> <property name="initialSize" value="${druid.initialSize}" /> <property name="minIdle" value="${druid.minIdle}" /> <property name="maxActive" value="${druid.maxActive}" /> <!-- 配置獲取鏈接等待超時的時間 --> <property name="maxWait" value="${druid.maxWait}" /> <!-- 配置間隔多久才進行一次檢測,檢測須要關閉的空閒鏈接,單位是毫秒 --> <property name="timeBetweenEvictionRunsMillis" value="${druid.timeBetweenEvictionRunsMillis}" /> <!-- 配置一個鏈接在池中最小生存的時間,單位是毫秒 --> <property name="minEvictableIdleTimeMillis" value="${druid.minEvictableIdleTimeMillis}" /> <property name="validationQuery" value="${druid.validationQuery}" /> <property name="testWhileIdle" value="${druid.testWhileIdle}" /> <property name="testOnBorrow" value="${druid.testOnBorrow}" /> <property name="testOnReturn" value="${druid.testOnReturn}" /> <!-- 打開PSCache,而且指定每一個鏈接上PSCache的大小 若是用Oracle,則把poolPreparedStatements配置爲true,mysql能夠配置爲false。 --> <property name="poolPreparedStatements" value="${druid.poolPreparedStatements}" /> <property name="maxPoolPreparedStatementPerConnectionSize" value="${druid.maxPoolPreparedStatementPerConnectionSize}" /> <property name="removeAbandoned" value="${druid.removeAbandoned}" /> <!-- 打開removeAbandoned功能 --> <property name="removeAbandonedTimeout" value="${druid.removeAbandonedTimeout}" /> <!-- 1800秒,也就是30分鐘 --> <property name="logAbandoned" value="${druid.logAbandoned}" /> <!-- 關閉abanded鏈接時輸出錯誤日誌 --> <!-- 配置監控統計攔截的filters --> <property name="filters" value="${druid.filters}" /> <property name="proxyFilters"> <list> <ref bean="stat-filter" /> <ref bean="wall-filter" /> <ref bean="log-filter" /> </list> </property> <!-- <property name="connectionProperties" value="${druid.connectionProperties}" /> --> <property name="useGlobalDataSourceStat" value="${druid.useGlobalDataSourceStat}" /> </bean>
三、在web.xml中增長log4j配置註釋掉github
<!--<context-param> <param-name>log4jConfigLocation</param-name> <param-value>classpath:log4j.xml</param-value> </context-param> <listener> <listener-class>org.springframework.web.util.Log4jConfigListener</listener-class> </listener>-->
<filter> <filter-name>urlFilter</filter-name> <filter-class>com.zdnst.core.filter.UrlFilter</filter-class> </filter> <filter-mapping> <filter-name>urlFilter</filter-name> <url-pattern>/*</url-pattern> </filter-mapping>
四、log4j.xml中配置web
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE log4j:configuration PUBLIC "-//Apache//DTD Log4j 1.2//EN" "http://logging.apache.org/log4j/docs/api/org/apache/log4j/xml/log4j.dtd"> <log4j:configuration> <appender class="org.apache.log4j.ConsoleAppender" name="RootConsoleAppender"> <param name="Threshold" value="debug" /> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="%d{ABSOLUTE} %5p [%t] %40.40c:%4L - %m%n" /> </layout> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="?" /> <param name="AcceptOnMatch" value="false" /> </filter> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="Parameters" /> <param name="AcceptOnMatch" value="false" /> </filter> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="Types" /> <param name="AcceptOnMatch" value="false" /> </filter> </appender> <appender class="org.apache.log4j.RollingFileAppender" name="SQLFileAppender"> <param name="Threshold" value="debug" /> <param name="File" value="${catalina.base}/logs/jsf-sql.log" /> <param name="MaxFileSize" value="10MB" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout"> <param name="ConversionPattern" value="%d{ABSOLUTE} %5p [%t] %40.40c:%4L - %m%n" /> </layout> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="?" /> <param name="AcceptOnMatch" value="false" /> </filter> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="SELECT" /> <param name="AcceptOnMatch" value="false" /> </filter> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="select" /> <param name="AcceptOnMatch" value="false" /> </filter> <!--<filter class="org.apache.log4j.varia.StringMatchFilter">--> <!--<param name="StringToMatch" value="clearParameters" />--> <!--<param name="AcceptOnMatch" value="false" />--> <!--</filter>--> <!----> <!--<filter class="org.apache.log4j.varia.StringMatchFilter">--> <!--<param name="StringToMatch" value="delete" />--> <!--<param name="AcceptOnMatch" value="false" />--> <!--</filter>--> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="Parameters" /> <param name="AcceptOnMatch" value="false" /> </filter> <filter class="org.apache.log4j.varia.StringMatchFilter"> <param name="StringToMatch" value="Types" /> <param name="AcceptOnMatch" value="false" /> </filter> </appender> <logger name="druid.sql.Statement" additivity="false"> <level value="debug"/> <appender-ref ref="SQLFileAppender"/> <appender-ref ref="RootConsoleAppender"/> </logger> <logger name="com.zdnst" > <level value="debug" /> </logger> <logger name="org.apache.commons.beanutils.converters"> <level value="info"/> </logger> <logger name="org.springframework.data"> <level value="debug" /> </logger> <logger name="org.springframework.web"> <level value="info" /> </logger> <logger name="druid.sql.ResultSet"> <level value="ERROR" /> </logger> <!--<logger name="ns.beanutils.converters">--> <!--<level value="ERROR" />--> <!--</logger>--> <logger name="org.dozer"> <level value="ERROR" /> </logger> <logger name="org.apache.cxf"> <level value="info" /> </logger> <logger name="org.apache.kafka.clients.consumer.internals"> <level value="info"/> </logger> <logger name="org.springframework.kafka.listener"> <level value="info"/> </logger> <root> <level value="debug" /> <appender-ref ref="RootConsoleAppender"/> </root> </log4j:configuration>
五、UrlFilter增長spring
package com.zdnst.core.filter; import com.github.sd4324530.fastweixin.util.JSONUtil; import jodd.io.StreamUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.json.Json; import javax.json.JsonObject; import javax.json.JsonReader; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequestWrapper; import javax.servlet.http.HttpServletResponse; import java.io.*; import java.util.HashMap; import java.util.Map; /** * Created by yi.lu on 2017/3/14. */ public class UrlFilter implements Filter { private static Logger logger = LoggerFactory.getLogger(UrlFilter.class); public void init(FilterConfig filterConfig) throws ServletException { } public void destroy() { } public void doFilter(ServletRequest servletRequest,ServletResponse servletResponse,FilterChain filterChain) throws IOException, ServletException { try { /** * 1,doFilter的第一個參數爲ServletRequest對象。此對象給過濾器提供了對進入的信息(包括 * 表單數據、cookie和HTTP請求頭)的徹底訪問。第二個參數爲ServletResponse,一般在簡單的過 * 濾器中忽略此參數。最後一個參數爲FilterChain,此參數用來調用servlet或JSP頁。 */ HttpServletRequest localRequest = (HttpServletRequest) servletRequest; HttpServletResponse localResponse = (HttpServletResponse) servletResponse; /** * 若是處理HTTP請求,而且須要訪問諸如getHeader或getCookies等在ServletRequest中 * 沒法獲得的方法,就要把此request對象構形成HttpServletRequest */ if(localRequest.getContentType()!=null&&localRequest.getContentType().indexOf("application/json") != -1){ String currentURL = localRequest.getRequestURI(); // 取得根目錄所對應的絕對路徑: HttpServletRequestWrapper2 newReq = new HttpServletRequestWrapper2(localRequest); Map<String, Object> value = convertParams(newReq); StringBuffer logSb=new StringBuffer("-------------------訪問請求"+ localRequest.getMethod()+":"); logSb.append(currentURL+"&"); if (value != null && value.size() > 0) { int i=0; for (String key : value.keySet()) { if(i++>0){ logSb.append("&"); } logSb.append(key + "=" + value.get(key)); } } long startTime = System.currentTimeMillis(); // 加入filter鏈繼續向下執行 JsonReader jsonReader = Json.createReader(new StringReader(JSONUtil.toJson(value))); JsonObject json = jsonReader.readObject(); ByteArrayOutputStream out = new ByteArrayOutputStream(); Json.createWriter(out).writeObject(json); byte[] body = out.toByteArray(); newReq.setBody(body); filterChain.doFilter(newReq,localResponse); long endTime = System.currentTimeMillis(); logger.info(logSb.toString()); logger.info("-------------------訪問總共用時:"+(endTime-startTime)+"毫秒"); }else{ filterChain.doFilter(servletRequest, servletResponse); } }catch (Exception e){ logger.error("過濾器運行錯誤"); filterChain.doFilter(servletRequest, servletResponse); } } /** * 攔截request中的全部參數(GET/POST) * @param request * @return */ private Map<String,Object> convertParams(HttpServletRequestWrapper2 request){ Map<String, String[]> params = request.getParameterMap(); Map<String, Object> param = new HashMap<String, Object>(); if(params.size() == 0){ StringBuffer json = new StringBuffer(); String line = new String(request.getBody()); Map<String,Object> result = JSONUtil.toMap(line); // if(null != result){ // for (Map.Entry<String, Object> entry : result.entrySet()) { // param.put(entry.getKey(),String.valueOf(entry.getValue())); // } // } param = result; }else{ for (Map.Entry<String, String[]> entry : params.entrySet()) { param.put(entry.getKey(), entry.getValue()[0]); } } return param; } private class HttpServletRequestWrapper2 extends HttpServletRequestWrapper { private byte[] body; public byte[] getBody() { return body; } public void setBody(byte[] body) { this.body = body; } /** * Constructs a request object wrapping the given request. * * @param request * @throws IllegalArgumentException if the request is null */ public HttpServletRequestWrapper2(HttpServletRequest request) throws IOException { super(request); body = StreamUtil.readBytes(request.getReader(), "UTF-8"); } @Override public BufferedReader getReader() throws IOException { return new BufferedReader(new InputStreamReader(getInputStream())); } @Override public ServletInputStream getInputStream() throws IOException { final ByteArrayInputStream bais = new ByteArrayInputStream(body); return new ServletInputStream() { @Override public int read() throws IOException { return bais.read(); } }; } } }