spark--Actions算子--countByKey
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkConf, SparkContext}
/**
  * Created by liupeng on 2017/6/17.
  */
object A_countByKey {
  System.setProperty("hadoop.home.dir","F:\\hadoop-2.6.5")
  Logger.getLogger("org").setLevel(Level.ERROR)
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("countByKey_test").setMaster("local")
    val sc = new SparkContext(conf)
    //准备一下数据
    val nameList = List(("A",1),("A",2),("B",1))
    val data = sc.parallelize(nameList)
    //仅适用于类型(K,V)的RDD,返回与每个键的计数的(K,Int)对的hashmap
    val num = data.countByKey()
    for (x <- num) {
      println(x)
    }
  }
}
  运行结果: 
 
  (B,1) 
 
(A,2)
 
                
(A,2)
 
                                
 
                
            
         
         浙公网安备 33010602011771号
浙公网安备 33010602011771号