1、pom.xml引入logback-kafka-appender依赖
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.3</version>
</dependency>
<dependency>
<groupId>com.github.danielwegener</groupId>
<artifactId>logback-kafka-appender</artifactId>
<version>0.2.0-RC2</version>
</dependency>
2、编辑logback-spring.xml
<!-- 动态配置日志根路径 -->
<!-- logback 的配置文件会比 springboot 的配置文件先加载,所以一开始logback是读取不到application.yml中的配置 -->
<property name="logs-path" value="${LOG_PATH:-.}"/>
<!-- 彩色日志依赖的渲染类 -->
<conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
<conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
<!-- 彩色日志格式 -->
<!-- 日志输出格式:
%d、%date: 表示日期时间,
%5p、%5le、%5level: 日志级别从左显示5个字符宽度,
${PID:- }: pid
%15.20t、%15.20thread: 线程名字(如果宽度不足15,左侧补空白;如果宽度超过20,从左侧截断)
%c{50}、%lo{50}、%logger{50}: 表示logger名字最长50个字符,否则按照句点分割
%F、%file: 输出执行记录请求的java源文件名。尽量避免使用,除非执行速度不造成任何问题
%L、%line: 输出执行日志请求的行号。尽量避免使用,除非执行速度不造成任何问题
%M、%method: 输出执行日志请求的方法名。尽量避免使用,除非执行速度不造成任何问题
%m、%msg、%message: 日志消息
%n: 换行符
${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}:
-->
<!--<property name="CONSOLE_LOG_PATTERN"-->
<!-- value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />-->
<property name="CONSOLE_LOG_PATTERN"
value="%d %p ${PID:- } --- [%thread] %logger{10} : %m%n" />
<!-- Console log output -->
<!-- ConsoleAppender把日志添加到控制台 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${CONSOLE_LOG_PATTERN}</pattern>
</encoder>
</appender>
<!-- 连接kafka -->
<appender name="kafka" class="com.github.danielwegener.logback.kafka.KafkaAppender">
<!-- encoder负责两件事,一是把日志信息转换成字节数组,二是把字节数组写入到输出流 -->
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{35} - %msg %n</pattern>
</encoder>
<topic>esbus-boss</topic>
<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.NoKeyKeyingStrategy" />
<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />
<producerConfig>bootstrap.servers=localhost:9092</producerConfig>
<producerConfig>acks=0</producerConfig>
<!-- wait up to 1000ms and collect log message before sending them as a batch -->
<producerConfig>linger.ms=1000</producerConfig>
<!-- even if the producer buffer runs full, do not block the application but start to drop messages -->
<producerConfig>max.block.ms=0</producerConfig>
<!-- this is the fallback appender if kafka is not available -->
<appender-ref ref="console" />
</appender>
<!-- Log file info output -->
<!-- RollingFileAppender滚动记录文件,先将日志记录到指定文件,当符合某个条件时,将日志记录到其他文件 -->
<appender name="info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${logs-path}/info.log</file>
<rollingPolicy
class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${logs-path}/%d{yyyy-MM}/info.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<!--保留时间,单位:天 -->
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%date [%thread] %-5level [%logger{50}] %file:%line - %msg%n</pattern>
</encoder>
</appender>
<!-- Log file error output -->
<appender name="error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${logs-path}/error.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${logs-path}/%d{yyyy-MM}/error.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<!--保留时间,单位:天 -->
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%date [%thread] %-5level [%logger{50}] %file:%line - %msg%n</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
</appender>
<!--Level: OFF、FATAL、ERROR、WARN、INFO、DEBUG、ALL-->
<!--root也是logger元素,它是根loger,是所有loger的上级,输出日志是从子节点开始,子节点如果有输出源直接输入,否则向上级root传递,采用root的输出源-->
<root level="INFO">
<appender-ref ref="console" />
<appender-ref ref="kafka" />
<appender-ref ref="info" />
<appender-ref ref="error" />
</root>
3、编辑application.xml
# logging.path已经被废弃,建议用logging.file.path
logging:
file:
path: newlogs