package com.bjsxt.scala.spark.operator
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object CollectOperator {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("CollectOperator")
.setMaster("local")
val sc = new SparkContext(conf)
val numberArray = Array(1, 2, 3, 4, 5)
//parallelize通过一个本地集合生成一个RDD
val numbers = sc.parallelize(numberArray, 2)
val doubleNumbers = numbers.map(_ * 2)
//collect action类算子
val doubleNumbersArray = doubleNumbers.collect()
for (num <- doubleNumbersArray) {
println(num)
}
sc.stop()
}
}