/**
* Created by root on 9/7/15.
*/
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
object RDDTest {
def main(args: Array[String]) {
val conf = new SparkConf().setAppName("RDDTest").setMaster("local")
val sc = new SparkContext(conf)
val lines = sc.textFile("/home/slh/data/rddtest.txt")
//count the word
val lineLengths = lines.map(s => s.length) //rdd
val totalLength = lineLengths.reduce((a, b) => a + b)
println("total length: " + totalLength)
//get the word count
val word_count = lines.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey((a, b) => a + b)
//word_count.saveAsTextFile("/home/slh/data/rddresult0")
//get the sum
val sum = lines.flatMap(line => line.split(" ")).map(word => (1, word)).reduceByKey((a, b) => a + b)
//sum.saveAsTextFile("/home/slh/data/rddresult1")
//the result is (1,3343566777879717727)
//println("sum: " + sum)
//accumulator
val accum = sc.accumulator(0, "My Accumulator")
sc.parallelize(Array(1,2,3,4)).foreach(x => accum += x)
println("Accumulator of Array(1,2,3,4) : " + accum.value)
}
}