spark streaming Receive

/*package com.shujia.spark.streaming

import kafka.serializer.StringDecoder
import org.apache.spark.SparkConf
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.dstream.ReceiverInputDStream
import org.apache.spark.streaming.kafka.KafkaUtils
import org.apache.spark.streaming.{Durations, StreamingContext}

object Demo6Receiver {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf()
      .setAppName("streaming")
      .setMaster("local[4]")


    /**
      * 创建streaming 上下文对象,指定batch的间隔时间,多久计算一次
      *
      */
    val ssc = new StreamingContext(conf, Durations.seconds(5))


    val topics = Map("test_topic2" -> 3)

    /**
      * 通过 Receiver模式链接kafka
      *
      */

    val kafkaParams: Map[String, String] = Map[String, String](
      "zookeeper.connect" -> "master:2181",
      "group.id" -> "asdasdsad",
      "auto.offset.reset" -> "smallest",
      "enable.auto.commit" -> "true",
      "auto.commit.interval.ms" -> "10000"
    )

    val kafkaDS: ReceiverInputDStream[(String, String)] = KafkaUtils.createStream[String, String, StringDecoder, StringDecoder](
      ssc, kafkaParams, topics, StorageLevel.MEMORY_AND_DISK_2)


    kafkaDS.print()


    ssc.start()
    ssc.awaitTermination()
    ssc.stop()

  }

}*/

 

posted @ 2021-07-25 16:57  坤坤无敌  阅读(40)  评论(0)    收藏  举报