flink-java版学习-3-kafka

 

    flink的source使用比较多的是kafka,本地测试连接公司的集群没有成功,先记录下

package com.shihuo.apitest_source;

import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011;

import java.util.Properties;

public class SourceTest2_Kafka {
    public static void main(String[] args) throws Exception{
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        env.setParallelism(1);

        //kafka的连接
        Properties properties = new Properties();
        properties.setProperty("------:9092","9092");
        properties.setProperty("auto.offset.reset","latest");

        DataStream<String> dataStream = env.addSource( new FlinkKafkaConsumer011<String>("------",new SimpleStringSchema(),properties));

        //打印输出
        dataStream.print();

        // 执行
        env.execute();
    }
}

posted @ 2021-01-17 23:41  活不明白  阅读(41)  评论(0)    收藏  举报