第一个PySpark程序

# coding:utf8
from pyspark import SparkConf, SparkContext
import os


if __name__ == '__main__':
os.environ["JAVA_HOME"] = "/opt/jdk1.8.0_144" # 这里的路径为java的bin目录所在路径

conf = SparkConf().setMaster("spark://node1:7077").setAppName("wordcountHelloWord")
sc = SparkContext(conf=conf)

files = sc.textFile("hdfs://node1:9000/input/words")

words = files.flatMap(lambda line: line.split(" "))

result = words.map(lambda x: (x, 1)).reduceByKey(lambda a, b: a + b)

print(result.collect())
posted @ 2022-03-08 15:17  明天过丶後  阅读(60)  评论(0)    收藏  举报