import java.util.Properties;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
/**
* Desc 演示DataStream-Sink-基于控制台和文件
*/
public class SinkDemoFileToKafka {
public static void main(String[] args) throws Exception {
//TODO 0.env
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);
//TODO 1.source
DataStream<String> ds = env.readTextFile("C:\\Users\\K21\\Desktop\\temp\\1200.unl");
//TODO 2.transformation
//TODO 3.sink
Properties props2 = new Properties();
props2.setProperty("bootstrap.servers", "192.168.78.203:9092,192.168.78.204:9092,192.168.78.205:9092");
FlinkKafkaProducer<String> kafkaSink = new FlinkKafkaProducer<>("FileToKafka", new SimpleStringSchema(), props2);
ds.addSink(kafkaSink).setParallelism(4);
//TODO 4.execute
env.execute();
}
}