spark--transform算子--map

import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by liupeng on 2017/6/15.
  */
object T_map {

  System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("map_test").setMaster("local")
    val sc = new SparkContext(conf)

    val numbers = Array(1, 2, 3, 4, 5)
    val numberRDD = sc.parallelize(numbers)

    //map 遍历元素,对每个元素进行操作
    val resultRDD = numberRDD.map(a => a * 10)
      .foreach(println)
  }
}
运行结果:

posted @ 2017-07-18 21:43  书灯  阅读(3)  评论(0)    收藏  举报  来源