spark--actions算子--count
import org.apache.spark.{SparkConf, SparkContext}
/**
* Created by liupeng on 2017/6/16.
*/
object A_count {
System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("count_test").setMaster("local")
val sc = new SparkContext(conf)
//准备一下数据
val nameList : List[Int] = List(1,2,3,4,5)
val numbers = sc.parallelize(nameList)
//计算数据集的数据个数,一般都是统计内部元素的个数。
val num = numbers.count()
println(num)
}
}
运行结果:
5

浙公网安备 33010602011771号