2.3.0
将hadoop/*jar 和$hadoop_home/lib/*jar 复制到 $seatunnel_home/lib/, 重新启动seatunnel-culster
要求spark
2.3.1
2023-03-27 20:15:48,038 INFO org.apache.seatunnel.engine.server.SeaTunnelServer - master node check interrupted
2023-03-27 20:15:48,110 WARN org.apache.seatunnel.engine.server.TaskExecutionService - [localhost]:5801 [seatunnel-133534] [5.1] The Imap acquisition failed due to the hazelcast node being offline or restarted, and will be retried next time
com.hazelcast.core.HazelcastInstanceNotActiveException: HazelcastInstance[[localhost]:5801] is not active!
2.1.2
# dolphinscheduler
23/03/27 21:21:57 ERROR Seatunnel: Fatal Error,
23/03/27 21:21:57 ERROR Seatunnel: Please submit bug report in https://github.com/apache/incubator-seatunnel/issues
23/03/27 21:21:57 ERROR Seatunnel: Reason:Incomplete HDFS URI, no host: hdfs:///spark2-history
23/03/27 21:21:57 ERROR Seatunnel: Exception StackTrace:java.io.IOException: Incomplete HDFS URI, no host: hdfs:///spark2-history
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:169)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3303)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479)
at org.apache.spark.util.Utils$.getHadoopFileSystem(Utils.scala:1897)
at org.apache.spark.scheduler.EventLoggingListener.<init>(EventLoggingListener.scala:74)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:520)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2498)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925)
at org.apache.seatunnel.spark.SparkEnvironment.prepare(SparkEnvironment.java:106)
at org.apache.seatunnel.spark.SparkEnvironment.prepare(SparkEnvironment.java:43)
at org.apache.seatunnel.core.base.config.EnvironmentFactory.getEnvironment(EnvironmentFactory.java:61)
at org.apache.seatunnel.core.base.config.ExecutionContext.<init>(ExecutionContext.java:49)
at org.apache.seatunnel.core.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:62)
at org.apache.seatunnel.core.base.Seatunnel.run(Seatunnel.java:39)
at org.apache.seatunnel.core.spark.SeatunnelSpark.main(SeatunnelSpark.java:32)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:900)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:192)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:217)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
23/03/27 21:21:57 ERROR Seatunnel:
===============================================================================
Exception in thread "main" java.io.IOException: Incomplete HDFS URI, no host: hdfs:///spark2-history
at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:169)
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3303)
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:124)
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3352)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3320)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:479)
at org.apache.spark.util.Utils$.getHadoopFileSystem(Utils.scala:1897)
at org.apache.spark.scheduler.EventLoggingListener.<init>(EventLoggingListener.scala:74)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:520)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2498)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925)
at org.apache.seatunnel.spark.SparkEnvironment.prepare(SparkEnvironment.java:106)
at org.apache.seatunnel.spark.SparkEnvironment.prepare(SparkEnvironment.java:43)
at org.apache.seatunnel.core.base.config.EnvironmentFactory.getEnvironment(EnvironmentFactory.java:61)
at org.apache.seatunnel.core.base.config.ExecutionContext.<init>(ExecutionContext.java:49)
at org.apache.seatunnel.core.spark.command.SparkTaskExecuteCommand.execute(SparkTaskExecuteCommand.java:62)
at org.apache.seatunnel.core.base.Seatunnel.run(Seatunnel.java:39)
at org.apache.seatunnel.core.spark.SeatunnelSpark.main(SeatunnelSpark.java:32)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:900)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:192)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:217)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
23/03/27 21:21:57 INFO ShutdownHookManager: Shutdown hook called