from pyspark.sql import SparkSession
from pyspark.sql.functions import col
# 创建SparkSession
spark = SparkSession.builder \
.appName("SparkMonitoringExample") \
.master("local[*]") \
.getOrCreate()
# 设置日志级别
spark.sparkContext.setLogLevel("INFO")
# 示例数据
data = [("Alice", 1), ("Bob", 2), ("Cathy", 3)]
df = spark.createDataFrame(data, ["Name", "Value"])
# 添加日志输出
spark.sparkContext._jvm.org.apache.log4j.LogManager.getLogger("SparkMonitoringExample").info("DataFrame created")
# 数据处理操作
df_filtered = df.filter(col("Value") > 1)
df_filtered.show()
# 定义一个监听器以监控任务状态
class MyListener:
def __init__(self):
self.stages = []
def onStageCompleted(self, stageInfo):
self.stages.append(stageInfo)
listener = MyListener()
spark.sparkContext.addSparkListener(listener)
# 触发一个操作
df_filtered.groupBy("Name").count().collect()
# 打印任务状态
for stage in listener.stages:
print(f"Stage ID: {stage.id}, Status: {stage.status}")
# 停止SparkSession
spark.stop()