kafka总结-logback集成kafka

加入依赖

<dependency>

    <groupId>com.github.danielwegener</groupId>

     <artifactId>logback-kafka-appender</artifactId>

</dependency>

 

logback.xml中配置

<!-- This is the kafkaAppender -->
    <appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender">
            <!-- This is the default encoder that encodes every log message to an utf8-encoded string  -->
            <encoder class="com.github.danielwegener.logback.kafka.encoding.LayoutKafkaMessageEncoder">
                <layout class="ch.qos.logback.classic.PatternLayout">
                    <pattern>%msg</pattern>
                </layout>
            </encoder>
            <topic>bbs_opt_topic_test</topic><!--指定topic-->
            <keyingStrategy class="com.github.danielwegener.logback.kafka.keying.RoundRobinKeyingStrategy" />
            <deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />
            <!-- each <producerConfig> translates to regular kafka-client config (format: key=value) -->
            <!-- producer configs are documented here: https://kafka.apache.org/documentation.html#newproducerconfigs -->
            <!-- bootstrap.servers is the only mandatory producerConfig -->
            <producerConfig>bootstrap.servers=127.0.0.1:9092</producerConfig><!--指定kafka服务器地址-->
            <producerConfig>acks=0</producerConfig>
         <producerConfig>linger.ms=5000</producerConfig>
         <producerConfig>batch.size=512000</producerConfig>
         <producerConfig>block.on.buffer.full=false</producerConfig>
            <!-- this is the fallback appender if kafka is not available. -->
            <appender-ref ref="BIGDATA_FILE_USER_INFO"/>
      </appender>


<!-- 用户行为 大数据备份日志文件 -->
<appender name="BIGDATA_FILE_USER_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<filter class="ch.qos.logback.classic.filter.LevelFilter">
    <level>WARN</level>
    <onMatch>ACCEPT</onMatch>
    <onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
    <!--日志文件输出的文件名-->
    <FileNamePattern>${LOG_HOME}/userAction_%d{yyyy-MM-dd}.log</FileNamePattern>

<!--日志文件保留天数-->
    <MaxHistory>300</MaxHistory>
    </rollingPolicy>
    
    <encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
    <!--格式化输出:%d表示日期,%thread表示线程名,%-5level:级别从左显示5个字符宽度%msg:日志消息,%n是换行符,%X输出MDC信息-->
        <Pattern>%msg%n</Pattern>
    </encoder>

    <!--日志文件最大的大小-->
    <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
        <MaxFileSize>30MB</MaxFileSize>
    </triggeringPolicy>
</appender>

<!--用户行为收集 additivity="false"-->
<logger name="com.fifedu.kyxl.statkc.web.OptController" level="warn" ><!--指定要传入哪里的日志-->
    <appender-ref ref="BIGDATA_FILE_USER_INFO" />
    <appender-ref ref="kafkaAppender" />
</logger>

 

posted @ 2022-08-15 14:57  星光闪闪  阅读(749)  评论(0)    收藏  举报