package com.shujia.spark.streaming
import org.apache.spark.SparkConf
import org.apache.spark.streaming.dstream.{DStream, ReceiverInputDStream}
import org.apache.spark.streaming.{Durations, StreamingContext}
object Demo9Window {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf()
.setAppName("streaming")
.setMaster("local[2]")
val ssc = new StreamingContext(conf, Durations.seconds(5))
ssc.checkpoint("data/checkpoint")
val linesDS: ReceiverInputDStream[String] = ssc.socketTextStream("master", 8888)
val wordsDS: DStream[String] = linesDS.flatMap(_.split(","))
val kvDS: DStream[(String, Int)] = wordsDS.map((_, 1))
/**
* 统计最新15秒单词的数量,每隔10秒统计一次
*
* reduceByKeyAndWindow;滑动窗口,有状态算子
*
* 热门商品统计
*
*/
/* val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
(x: Int, y: Int) => x + y,
Durations.seconds(15), //窗口大小
Durations.seconds(10) //滑动时间
)*/
//优化重复计算
//需要指定checkpoint,保存之前计算结果
val countDS: DStream[(String, Int)] = kvDS.reduceByKeyAndWindow(
(x: Int, y: Int) => x + y,
(x1: Int, y1: Int) => x1 - y1,
Durations.seconds(15), //窗口大小
Durations.seconds(10) //滑动时间
)
//可以将结果为0的过滤掉
countDS.filter(_._2 != 0).print()
//启动spark streaming
ssc.start()
ssc.awaitTermination()
ssc.stop()
}
}