SpringBoot集成kafka
直接从https://github.com/sealire/arch下载示例,将其导入到我的Idea开发工具中。

由于我已在本机安装好了zookeeper和kafka,具体可以参考《kafka在windows上的安装、运行》,所以对下载下来的kafka的一些配置进行简单修改。
application-home.yml
spring:
profiles: home
application:
name: kafka
kafka:
# kafka服务器地址(可以多个)
bootstrap-servers: localhost:9092
consumer:
# 指定一个默认的组名
group-id: kafka2
# earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
# latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
# none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
auto-offset-reset: earliest
# key/value的反序列化
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
producer:
# key/value的序列化
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
# 批量抓取
batch-size: 65536
# 缓存容量
buffer-memory: 524288
# 服务器地址
bootstrap-servers: localhost:9092
app:
topic:
common: common
application-work.yml
spring:
profiles: work
application:
name: kafka
kafka:
# kafka服务器地址(可以多个)
bootstrap-servers: localhost:9092
consumer:
# 指定一个默认的组名
group-id: kafka2
# earliest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,从头开始消费
# latest:当各分区下有已提交的offset时,从提交的offset开始消费;无提交的offset时,消费新产生的该分区下的数据
# none:topic各分区都存在已提交的offset时,从offset后开始消费;只要有一个分区不存在已提交的offset,则抛出异常
auto-offset-reset: earliest
# key/value的反序列化
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
producer:
# key/value的序列化
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
# 批量抓取
batch-size: 65536
# 缓存容量
buffer-memory: 524288
# 服务器地址
bootstrap-servers: localhost:9092
app:
topic:
common: common
kafka下的pom.xml
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>arch</artifactId> <groupId>org.leesia</groupId> <version>1.0-SNAPSHOT</version> <relativePath>../pom.xml</relativePath> </parent> <modelVersion>4.0.0</modelVersion> <groupId>org.leesia</groupId> <artifactId>kafka</artifactId> <version>1.0-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-web</artifactId> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> <version>2.1.10.RELEASE</version> </dependency> <dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka-test</artifactId> <version>2.1.10.RELEASE</version> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> <version>3.1.0</version> <configuration> <archive> <manifest> <addClasspath>true</addClasspath> <classpathPrefix>lib/</classpathPrefix> <mainClass>org.leesia.kafka.Application</mainClass> </manifest> </archive> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-dependency-plugin</artifactId> <version>2.10</version> <executions> <execution> <id>copy-dependencies</id> <phase>package</phase> <goals> <goal>copy-dependencies</goal> </goals> <configuration> <outputDirectory>${project.build.directory}/lib</outputDirectory> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <version>2.21.0</version> <configuration> <skip>true</skip> </configuration> </plugin> </plugins> </build> </project>
外面的arch工程的pom.xml
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.leesia</groupId> <artifactId>arch</artifactId> <version>1.0-SNAPSHOT</version> <packaging>pom</packaging> <parent> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-parent</artifactId> <version>2.0.1.RELEASE</version> </parent> <modules> <module>distributed-lock</module> <module>dynamic-proxy</module> <module>multi-datasource</module> <module>kafka</module> </modules> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <java.version>1.8</java.version> <spring.version>5.0.5.RELEASE</spring.version> <mysql.version>8.0.11</mysql.version> </properties> <dependencies> <dependency> <groupId>org.springframework.boot</groupId> <artifactId>spring-boot-starter-test</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.mybatis.spring.boot</groupId> <artifactId>mybatis-spring-boot-starter</artifactId> <version>1.3.0</version> </dependency> </dependencies> </project>
发送消息的KafkaController.java
package org.leesia.kafka.controller; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; @RestController @RequestMapping(value = "kafka") public class KafkaController { private static final Logger logger = LoggerFactory.getLogger(KafkaController.class); @Autowired private KafkaTemplate<String, String> kafkaTemplate; @Value("${app.topic.common}") private String topic; @RequestMapping(value = "send", method = RequestMethod.GET) @ResponseBody public void send(String key, String data) { kafkaTemplate.send(topic, key, data); } }
消息监听器KafkaListener.java
package org.leesia.kafka.listener; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Component; @Component public class KafkaListener { private static final Logger logger = LoggerFactory.getLogger(KafkaListener.class); @org.springframework.kafka.annotation.KafkaListener(topics = "${app.topic.common}") public void receive(ConsumerRecord<?, ?> consumer) { logger.info("{} - {} : {}", consumer.topic(), consumer.key(), consumer.value()); } }
Application.java
package org.leesia.kafka; import org.mybatis.spring.annotation.MapperScan; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration; import org.springframework.cache.annotation.EnableCaching; import org.springframework.context.annotation.ComponentScan; @EnableCaching @EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class}) @ComponentScan(basePackages = {"org.leesia"}) @SpringBootApplication public class Application { public static void main(String[] args) throws Exception { SpringApplication.run(Application.class, args); } }
打开cmd窗口,输入zkserver,运行Zookeeper。
进入kafka安装目录D:\study\bigData\kafka_2.12-2.1.0,按下shift+鼠标右键,选择"在此处打开命令窗口",打开命令行,在命令行中输入:.\bin\windows\kafka-server-start.bat .\config\server.properties回车,运行kafka。
在idea中运行Application启动程序,在浏览器中调用send接口,发送消息,观察消息监听器输出。
如调用在浏览器中访问:http://localhost:8080/kafka/send?key=kafka&data=bijian
在控制台中可以看到监听器的消息输出:
20190106_15:47:31.631 [o.a.k.c.u.AppInfoParser][http-nio-8080-exec-1] [66698] [ ][INFO] Kafka version : 1.0.2 20190106_15:47:31.631 [o.a.k.c.u.AppInfoParser][http-nio-8080-exec-1] [66698] [ ][INFO] Kafka commitId : 2a121f7b1d402825 20190106_15:47:31.770 [o.l.k.l.KafkaListener][org.springframework.kafka.KafkaListenerEndpointContainer#0-0-C-1] [66837] [ ][INFO] common - kafka : bijian
可以用D:\study\bigData\kafka_2.12-2.1.0\bin\windows>kafka-topics.bat --list --zookeeper localhost:2181查看topic,发现common的topic确实已建立。

posted on 2019-01-06 15:57 bijian1013 阅读(309) 评论(0) 收藏 举报
浙公网安备 33010602011771号