交并集合
import org.apache.spark.rdd.RDD import org.apache.spark.{SparkConf, SparkContext} object UnionRDD { def main(args: Array[String]): Unit = { val conf: SparkConf = new SparkConf().setAppName("My scala word count").setMaster("local") val sc = new SparkContext(conf) val rdd1: RDD[Int] = sc.makeRDD(1 to 5) val rdd2: RDD[Int] = sc.makeRDD(4 to 10) val rdd5: RDD[Int] = rdd1.intersection(rdd2) val rdd3: RDD[Int] = rdd1.union(rdd2) val rdd4: RDD[Int] = rdd1.subtract(rdd2) rdd3.collect().foreach(println) rdd4.collect().foreach(println) rdd5.collect().foreach(println) } }
posted on 2020-09-21 17:55 happygril3 阅读(59) 评论(0) 收藏 举报
浙公网安备 33010602011771号