Spark--wordcount(词频降序)

 

import org.apache.spark.{SparkConf, SparkContext}

object wc2 {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setMaster("local").setAppName("wc2")
    val sc = new SparkContext(conf)

    val inputRdd = sc.textFile("/root/The_Man_of_Property.txt")
    val wc = inputRdd.flatMap(_.split(" "))
      .map((_,1))
      .reduceByKey(_+_)
    val sortWords = wc.map(x=>(x._2,x._1))
      .sortByKey(false)
      .map(x=>(x._2,x._1))
      .foreach(println(_))
  }
}

 

posted @ 2019-12-03 15:22  Assange  阅读(293)  评论(0编辑  收藏  举报