package com.bjsxt.scala.spark.operator
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
object CommonMLlibOperator {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("CommonMLlibOperator").setMaster("local")
val sc = new SparkContext(conf)
val rdd1 = sc.makeRDD(Array(1,2,3,4,5))
val rdd2 = sc.parallelize(List(6,7,8,9))
val result = rdd1.cartesian(rdd2)
result.foreach(println)
/**
* takeSample是一个action类算法
*/
val sampleARR = rdd1.takeSample(false, 100, 1)
println(sampleARR.length)
val orderedArr = rdd1.top(3)(new MyOrdering())
for(elem <- orderedArr){
println(elem)
}
sc.stop()
}
}
class MyOrdering extends Ordering[Int] {
def compare(param1:Int,param2:Int):Int = {
-(param1-param2)
}
}