RandomSplitoperator

package com.bjsxt.scala.spark.high.operator

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext

object RandomSplitoperator {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
      .setMaster("local")
      .setAppName("RandomSplitoperator")
      val v = "bjsxt"
    val sc = new SparkContext(conf)
    val rdd = sc.makeRDD(1 to 10)
    val splitRDD = rdd.randomSplit(Array(0.1, 0.2, 0.3, 0.4))
    println("splitRDD.size:" + splitRDD.size)
  }
}

  

posted @ 2018-06-18 14:03  uuhh  阅读(90)  评论(0)    收藏  举报