flinkSinkES

import java.util

import it.bigdata.flink.study.SensorReding
import org.apache.flink.api.common.functions.RuntimeContext
import org.apache.flink.streaming.api.scala._
import org.apache.flink.streaming.connectors.elasticsearch.{ElasticsearchSinkFunction, RequestIndexer}
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
import org.apache.http.HttpHost
import org.elasticsearch.client.Requests

object EsSinkTest {
  def main(args: Array[String]): Unit = {
    //创建环境
    val env = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    //读取数据
    val inputPath ="D:\\ideaDemo\\maven_flink\\src\\main\\resources\\sensor.txt"
    val inputStream = env.readTextFile(inputPath)

    //简单的转换为
    val dataStream = inputStream.map(data => {
      val arr = data.split(",")
      SensorReding(arr(0), arr(1).toLong, arr(2).toDouble)
    })

    //定义HttpHosts
    val httpHosts = new util.ArrayList[HttpHost]()
    httpHosts.add(new HttpHost("127.0.0.0",9200))

    //自定义写入es的EsSinkFunction
    val myEsSinkFunc = new ElasticsearchSinkFunction[SensorReding] {
      override def process(t: SensorReding, runtimeContext: RuntimeContext, requestIndexer: RequestIndexer): Unit = {
        //包装也给Map作为data source
        val dataSource = new util.HashMap[String,String]()
        dataSource.put("id",t.id)
        dataSource.put("temperature",t.temperature.toString)
        dataSource.put("ts",t.timestamp.toString)

        //创建index reques,用于发送http请求
        val indexRequest = Requests.indexRequest()
          .index("sensor")
          .`type`("readingdata")
          .source(dataSource)

        //用index发送请求
        requestIndexer.add(indexRequest)
      }
    }

    dataStream.addSink(new ElasticsearchSink
    .Builder[SensorReding](httpHosts,myEsSinkFunc)
        .build()
    )

    env.execute("es sink test")
  }
}

 

posted @ 2021-06-24 19:11  nohert  阅读(279)  评论(0编辑  收藏  举报