mapValues

 

 

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}


object KeyValueRDD {
  def main(args: Array[String]): Unit = {

    val conf: SparkConf = new SparkConf().setAppName("My scala word count").setMaster("local")
    val sc = new SparkContext(conf)

    //mapValues;保持key不变,对value操作
    val rdd: RDD[(Int, String)] = sc.makeRDD(List((3,"a"),(2,"a"),(4,"c"),(6,"c"),(8,"c")),3)
    val mapValuesRDD: RDD[(Int, String)] = rdd.mapValues(_+" hello")
    mapValuesRDD.collect().foreach(println)

//    (3,a hello)
//    (2,a hello)
//    (4,c hello)
//    (6,c hello)
//    (8,c hello)


  }

}

//

 

posted on 2020-09-24 09:43  happygril3  阅读(286)  评论(0)    收藏  举报

导航