AccumulatorOperator

package com.bjsxt.scala.spark.operator

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

/**
 * 注意点:
 * 	 1、累计器必须在Driver端定义
 * 	 2、累计器只能在Executor端操作,不能读取
 * 	 3、累计器中的值 只能在Driver端读取
 */
object AccumulatorOperator {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("AccumulatorOperator").setMaster("local")
    val sc = new SparkContext(conf)
    val rdd = sc.textFile("cs")
    val count = sc.accumulator(0)
    rdd.foreach { x =>
      {
        count.add(1)
      }
    }
    println(count.value)
    sc.stop()
  }
}

  

posted @ 2018-06-18 14:08  uuhh  阅读(72)  评论(0)    收藏  举报