resources.logback.xml.j2 10.3 KB
<?xml version="1.0" encoding="UTF-8"?>  
  
<!-- 从高到地低 OFF 、 FATAL 、 ERROR 、 WARN 、 INFO 、 DEBUG 、 TRACE 、 ALL  -->  
<!-- 日志输出规则  根据当前ROOT 级别,日志输出时,级别高于root默认的级别时  会输出 -->  
<!-- 以下  每个配置的 filter 是过滤掉输出文件里面,会出现高级别文件,依然出现低级别的日志信息,通过filter 过滤只记录本级别的日志-->  
  

<!-- 属性描述 scan:性设置为true时,配置文件如果发生改变,将会被重新加载,默认值为true scanPeriod:设置监测配置文件是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。当scan为true时,此属性生效。默认的时间间隔为1分钟。   
    debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->  
<configuration scan="true" scanPeriod="60 seconds" debug="false">

    <!-- DEBUG、INFO、WARN、ERROR级别日志 appender -->
    <appender name="DEBUG_LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/debug-log.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/debug-log/debug-log.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>[%X{X-B3-TraceId:-},%X{X-B3-SpanId:-},%X{X-Span-Export:-}] %d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>

    <appender name="TRACE_LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/trace.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/trace/trace.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
           <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS}|%X{X-B3-TraceId:-}|%X{X-B3-SpanId:-}|%X{X-Span-Export:-}|%thread|%msg%n</pattern>
        </encoder>
    </appender>

    <!-- WARN、ERROR级别日志 appender -->
    <appender name="WARN_LOG" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/warn-log.log</file>
        <!-- 过滤器,过滤掉 TRACE 和 DEBUG 和 INFO 级别的日志 -->
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>WARN</level>
        </filter>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/warn-log/warn-log.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>


    <!-- HTTP请求超时 appender -->
    <appender name="REQUEST_TIMEOUT" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/request-timeout.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/request-timeout/request-timeout.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>

    <!-- 数据库超时日志 appender -->
    <appender name="DATABASE_TIMEOUT" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/database-timeout.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/database-timeout/database-timeout.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>

    <!-- 数据库请求统计 appender -->
    <appender name="DATABASE_STAT" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/database-stat.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/database-stat/database-stat.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>

    <!-- 服务请求统计 appender -->
    <appender name="REQUEST_STAT" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>/Data/logs/resources/request-stat.log</file>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <!-- daily rollover -->
            <fileNamePattern>/Data/logs/resources/archived/request-stat/request-stat.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
                <!-- or whenever the file size reaches 100MB -->
                <maxFileSize>100MB</maxFileSize>
            </timeBasedFileNamingAndTriggeringPolicy>
            <!-- keep 30 days' worth of history -->
            <maxHistory>30</maxHistory>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger:%line - %msg%n</pattern>
        </encoder>
    </appender>	

    <!-- 数据库操作日志 -->
    <logger name="java.sql.PreparedStatement" value="{{log_level}}" />    
    <logger name="java.sql.Connection" value="{{log_level}}" />    
    <logger name="java.sql.Statement" value="{{log_level}}" />    
    <logger name="com.ibatis" value="{{log_level}}" />    
    <logger name="com.ibatis.common.jdbc.SimpleDataSource" value="{{log_level}}" />    
    <logger name="com.ibatis.common.jdbc.ScriptRunner" level="{{log_level}}"/>    
    <logger name="com.ibatis.sqlmap.engine.impl.SqlMapClientDelegate" value="{{log_level}}" />
    <logger name="org.springframework" level="{{log_level}}"/>

    <!-- root级别   DEBUG -->  
    <root level="{{log_level}}">
        <!-- 文件输出 -->
        <appender-ref ref="DEBUG_LOG" />
		<appender-ref ref="WARN_LOG" />
    </root>  
	
	<!-- http请求处理超时日志 -->
	<logger name="com.yoho.core.common.monitor.ThreadProfile" additivity="true">
        <level value="INFO"/>
        <appender-ref ref="REQUEST_TIMEOUT"/>
    </logger>
	
	<!-- 数据库处理超时日志 -->
	<logger name="com.yoho.core.dal.datasource.intercepor.DaoInterceptor" additivity="true">
        <level value="INFO"/>
        <appender-ref ref="DATABASE_TIMEOUT"/>
    </logger>

    <!--数据库操作统计-->
    <logger name="com.yoho.core.dal.datasource.DatabaseOperationProfile" additivity="true">
        <level value="{{log_level}}"/>
        <appender-ref ref="DATABASE_STAT"/>
    </logger>
	
	<!-- 服务请求统计 -->
	<logger name="com.yoho.core.common.monitor" additivity="true">
        <level value="{{log_level}}"/>
        <appender-ref ref="REQUEST_STAT"/>
    </logger>

    <!-- trace -->
    <logger name="TRACE" additivity="false">
        <level value="{{trace_log_level}}"/>
        <appender-ref ref="TRACE_LOG_ASYNC"/>
    </logger>
    
    <appender name="TRACE_LOG_ASYNC" class= "ch.qos.logback.classic.AsyncAppender">
        <discardingThreshold >0</discardingThreshold>
        <queueSize>3000</queueSize>
        <neverBlock>true</neverBlock>
        <includeCallerData>true</includeCallerData>
        <maxFlushTime>3000</maxFlushTime>
        <appender-ref ref ="TRACE_LOG"/>
    </appender>


</configuration>