java向kafka发数据(非实时)
依赖:
<!-- MySQL Connector --> <dependency> <groupId>mysql</groupId> <artifactId>mysql-connector-java</artifactId> <version>8.0.28</version> </dependency> <!-- Kafka Client --> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> <version>2.13.3</version> <!-- 使用最新版本 --> </dependency>
配置加载:
import java.io.IOException; import java.io.InputStream; import java.util.Properties; public class ConfigLoader { public static Properties loadConfig(String configFile) throws IOException { Properties config = new Properties(); try (InputStream input = ConfigLoader.class.getClassLoader().getResourceAsStream(configFile)) { if (input == null) { throw new IOException("Unable to find config file: " + configFile); } config.load(input); } return config; } }
单例的json处理器;
import com.fasterxml.jackson.databind.ObjectMapper; public class JacksonUtils { private static final ObjectMapper objectMapper = new ObjectMapper(); private JacksonUtils() { // 私有构造函数,防止实例化 } public static ObjectMapper getObjectMapper() { return objectMapper; } }
核心代码:
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.kafka.clients.producer.*; import org.apache.kafka.common.serialization.StringSerializer; import java.io.IOException; import java.sql.*; import java.util.Properties; import java.util.concurrent.ExecutionException; public class DataPipeline { private final String bootstrapServers; private final String topic; private final String jdbcUrl; private final String jdbcUser; private final String jdbcPassword; public DataPipeline(Properties config) { this.bootstrapServers = config.getProperty("kafka.bootstrap.servers"); this.topic = config.getProperty("kafka.topic"); this.jdbcUrl = config.getProperty("mysql.url"); this.jdbcUser = config.getProperty("mysql.user"); this.jdbcPassword = config.getProperty("mysql.password"); } public void run() { // 创建Kafka Producer Properties kafkaProps = new Properties(); kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); // try-with-resource自动关闭资源 try (Producer<String, String> sender = new KafkaProducer<>(kafkaProps); Connection conn = DriverManager.getConnection(jdbcUrl, jdbcUser, jdbcPassword); Statement statement = conn.createStatement()) { // 执行查询 String query = "SELECT id, name, age FROM users"; ResultSet resultSet = statement.executeQuery(query); // 获取ObjectMapper实例 ObjectMapper objectMapper = JacksonUtils.getObjectMapper(); // 遍历结果集并发送到Kafka while (resultSet.next()) { // 创建JSON对象 ObjectNode json = objectMapper.createObjectNode(); ResultSetMetaData meta = resultSet.getMetaData(); // 添加表名 json.put("table", "users"); // 添加每一列的数据 for (int c = 1; c <= meta.getColumnCount(); c++) { json.put(meta.getColumnName(c), resultSet.getString(c)); } // 创建ProducerRecord ProducerRecord<String, String> record = new ProducerRecord<>(topic, json.get("id").asText(), json.toString()); // 发送消息 try { RecordMetadata metadata = sender.send(record).get(); System.out.printf("Message sent to topic %s partition %d with offset %d%n", metadata.topic(), metadata.partition(), metadata.offset()); } catch (InterruptedException | ExecutionException e) { e.printStackTrace(); } } } catch (SQLException e) { e.printStackTrace(); } } public static void main(String[] args) { try { Properties config = ConfigLoader.loadConfig("application.properties"); DataPipeline pipeline = new DataPipeline(config); pipeline.run(); } catch (IOException e) { e.printStackTrace(); } } }

浙公网安备 33010602011771号