SparkStreaming中的例子OutDemo1

 

SparkStreaming中的数据插入jdbc

 

object OutDemo1 {

  val props = new Properties()
  props.setProperty("user","root")
  props.setProperty("password","123456")
//  props.setProperty()

  def main(args: Array[String]): Unit = {

    val conf = new SparkConf().setMaster("local[*]").setAppName("WordCount1")
    val ssc = new StreamingContext(conf,Seconds(3))

    ssc.checkpoint("ck1")
    //2、从数据源创建一个流:socket,rdd队列,自定义接收器,kafka(重点)

    val sourceStream = ssc.socketTextStream("hadoop103", 9999)

    //3、对流做各种转换

    val result =
      sourceStream
        .flatMap(_.split(" "))
        .map((_,1))
//          .reduceByKey(_ + _)
      .updateStateByKey((seq:Seq[Int], opt:Option[Int]) => Some(seq.sum + opt.getOrElse(0)))
//          .saveAsTextFiles("word","log")
          /*.foreachRDD(rdd => {

            //连接到mysql

            //写数据

            //关闭mysql

          })*/
          .foreachRDD(rdd => {

            //把rdd转成 df
            //1、先创建sparkSession
            val spark = SparkSession.builder()
              .config(rdd.sparkContext.getConf)
              .getOrCreate()

            import spark.implicits._
            //2、转换
            val df = rdd.toDF("word", "count")

            //3、
            df.write.mode("append").jdbc("jdbc:mysql://hadoop102:3306/rdd","word1015",props)

          })
        //      .reduceByKeyAndWindow(_ + _,Seconds(6))



    ssc.start()
    ssc.awaitTermination()

  }

}

 

posted @ 2022-02-03 21:51  xingmeng1  阅读(24)  评论(0)    收藏  举报