flink datastream 2 table

https://nightlies.apache.org/flink/flink-docs-release-1.14/zh/docs/dev/table/data_stream_api/

import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.table.api.Table; import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; import org.apache.flink.types.Row; // create environments of both APIs StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env); // create a DataStream DataStream<String> dataStream = env.fromElements("Alice", "Bob", "John"); // interpret the insert-only DataStream as a Table Table inputTable = tableEnv.fromDataStream(dataStream); // register the Table object as a view and query it tableEnv.createTemporaryView("InputTable", inputTable); Table resultTable = tableEnv.sqlQuery("SELECT UPPER(f0) FROM InputTable"); // interpret the insert-only Table as a DataStream again DataStream<Row> resultStream = tableEnv.toDataStream(resultTable); // add a printing sink and execute in DataStream API resultStream.print(); env.execute(); // prints: // +I[Alice] // +I[Bob] // +I[John]




=================================================
flink map

DataStream<String> input = ...;

DataStream<Integer> parsed = input.map(new MapFunction<String, Integer>() {
    @Override
    public Integer map(String value) {
        return Integer.parseInt(value);
    }
});
posted @ 2021-12-15 16:13  yjy888  阅读(32)  评论(0编辑  收藏  举报