2025/1/9
import org.apache.spark.{SparkConf, SparkContext}
// 初始化SparkContext
val conf = new SparkConf().setAppName("Spark Basics").setMaster("local")
val sc = new SparkContext(conf)
// 创建RDD并操作
val data = Array(1, 2, 3, 4, 5)
val distData = sc.parallelize(data)
val sum = distData.reduce(_ + _)
println(s"Sum: $sum")
sc.stop()