Flink消费kafak小例子

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.CheckpointingMode;
import org.apache.flink.streaming.api.TimeCharacteristic;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Properties;

/**
 * 用Flink消费kafka
 */

public class KafkaSource{

    private static final Logger logger = LoggerFactory.getLogger(KafkaSource.class);

    public static void main(String[] args) throws Exception {

        String KAFKA_BROKER = "master1:9092,master2:9092,master3:9092";

        String TRANSACTION_GROUP = "local";

        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
        env.enableCheckpointing(1000);
        env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.EXACTLY_ONCE);

        Properties kafkaProps = new Properties();
        kafkaProps.setProperty("bootstrap.servers", KAFKA_BROKER);
        kafkaProps.setProperty("group.id", TRANSACTION_GROUP);

        DataStreamSource<String> transaction = env
                .addSource(
                        new FlinkKafkaConsumer<String>("testEnvironment", new SimpleStringSchema(), kafkaProps)
        );
        transaction.print();
        env.execute();
    }
}

posted @ 2021-07-30 16:07  Family_zp  阅读(16)  评论(0)    收藏  举报