flink1.18流批一体DataStream代码编写

package com.xiaohu.wc;

import org.apache.flink.api.common.functions.FlatMapFunction;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.RestOptions;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.LocalStreamEnvironment;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.util.Collector;

/*
    优先级:
        算子 > env设置 > 提交命令-t > 配置文件默认1
 */
public class WordCountStreamUnboundedDemo {
    public static void main(String[] args) throws Exception {
        //本地测试
        Configuration configuration = new Configuration();
        configuration.set(RestOptions.BIND_PORT,"8081");
        LocalStreamEnvironment env = StreamExecutionEnvironment.createLocalEnvironment(configuration);


        // 打包的话用下面的
        //        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();

        DataStreamSource<String> socketDS = env.socketTextStream("master", 7777);

        socketDS.flatMap(new FlatMapFunction<String, Tuple2<String,Long>>() {
            @Override
            public void flatMap(String s, Collector<Tuple2<String, Long>> collector) throws Exception {
                String[] words = s.split(" ");
                for (String word : words) {
                    Tuple2<String, Long> tuple2 = Tuple2.of(word, 1L);
                    collector.collect(tuple2);
                }
            }
        }).keyBy(new KeySelector<Tuple2<String, Long>, String>() {
            @Override
            public String getKey(Tuple2<String, Long> stringLongTuple2) throws Exception {
                return stringLongTuple2.f0;
            }
        }).sum(1).print();

        env.execute("DataStreamApi测试无界流读取socket数据");
    }
}
posted @ 2025-02-26 20:20  Xiaohu_BigData  阅读(50)  评论(0)    收藏  举报