每日总结
今天清洗了数据
from pyspark.sql import SparkSession
spark = SparkSession.builder \
.appName("RemoteSparkConnection") \
.master("yarn") \
.config("spark.pyspark.python", "/opt/apps/anaconda3/envs/myspark/bin/python") \
.config("spark.sql.warehouse.dir", "/hive/warehouse") \
.config("hive.metastore.uris", "thrift://node01:9083") \
.config("spark.sql.parquet.writeLegacyFormat", "true") \
.enableHiveSupport() \
.getOrCreate()