PipelineTest

package com.bjsxt.scala.spark.test

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object PipelineTest {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local")
    conf.setAppName("CacheTest")
    conf.set("spark.speculation", "true")

    val sc = new SparkContext(conf)
    
    val list = List("Angelabbay","Dilireba","wangfei","hanhong")
    
    val rdd = sc.parallelize(list)
    val filrerRDD = rdd.filter { x => {
      println("filter" + x)
      true
    } }
    val mapRDD = filrerRDD.map { x => {
      println("map"+x)
      x
    } }
    
    mapRDD.count()
    
    sc.stop()

  }
}

  

posted @ 2018-06-23 16:49  uuhh  阅读(176)  评论(0)    收藏  举报