第五章_Spark核心编程_Rdd_转换算子_Value型_distinct算子
1. 定义
/* * 1. 定义 * def distinct(): RDD[T] * 2. 功能 * 将Rdd 元素去重,返回去重后的Rdd * * */
object distinctTest extends App { val sparkconf: SparkConf = new SparkConf().setMaster("local").setAppName("distinctTest") val sc: SparkContext = new SparkContext(sparkconf) val rdd = sc.makeRDD(List(1, 2, 3, 4, 5, 6, 7, 8, 8, 2), 2) private val rdd1: RDD[Int] = rdd.distinct() println(s"当前分区数 : ${rdd1.getNumPartitions}") println(rdd1.collect().mkString(",")) sc.stop() }
2. 思考 : 不用distinct 算子对List 去重
object distinctTestWithGroupby extends App { val sparkconf: SparkConf = new SparkConf().setMaster("local").setAppName("distinctTest") val sc: SparkContext = new SparkContext(sparkconf) val rdd = sc.makeRDD(List(1, 2, 3, 4, 5, 6, 7, 8, 8, 2), 2) private val rdd1: RDD[(Int, Iterable[Int])] = rdd.groupBy( e => e ) private val rdd2: RDD[Int] = rdd1.map(_._1) println(s"当前分区数 : ${rdd1.getNumPartitions}") println(rdd2.collect().mkString(",")) sc.stop() }