2025/1/9

import org.apache.spark.{SparkConf, SparkContext}

// 初始化SparkContext
val conf = new SparkConf().setAppName("Spark Basics").setMaster("local")
val sc = new SparkContext(conf)

// 创建RDD并操作
val data = Array(1, 2, 3, 4, 5)
val distData = sc.parallelize(data)
val sum = distData.reduce(_ + _)
println(s"Sum: $sum")

sc.stop()

posted @ 2025-01-09 20:23  为20岁努力  阅读(5)  评论(0)    收藏  举报