Spark—WordCount

 

 

 

本地模式

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf

object WordCount {
  def main(args: Array[String]) {
    val conf = new SparkConf().setAppName("WordCount").setMaster("local[2]")
    val sc = new SparkContext(conf)   
    val inputFile = sc.textFile("/user/wd")
    val wordCount = inputFile.flatMap(_.split(" ")).map(word => (word, 1)).reduceByKey((a, b) => a + b)
    wordCount.foreach(println)
    sc.stop()
  }
}

 

posted @ 2021-09-02 19:50  Theext  阅读(32)  评论(0)    收藏  举报