package com.shujia.spark.core
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object Demo10Sort {
def main(args: Array[String]): Unit = {
val conf: SparkConf = new SparkConf()
.setAppName("map")
.setMaster("local")
//spark 上下文对象
val sc = new SparkContext(conf)
val listRDD: RDD[Int] = sc.parallelize(List(1, 2, 123, 3, 4, 23, 5, 6, 7, 8))
/**
* sortBy: 指定一个排序的列,默认是升序
*/
val sortRDD: RDD[Int] = listRDD.sortBy(i => i, false)
sortRDD.foreach(println)
val kvRDD: RDD[(String, Int)] = sc.parallelize(List(("java", 100), ("spark", 20), ("hadoop", 12321)))
/**
* sortByKey: 通过key进行排序
*
*/
val sortByKeyRDD: RDD[(String, Int)] = kvRDD.sortByKey()
sortByKeyRDD.foreach(println)
}
}