dolphinscheduler独立集群部署
1、上传解压
tar -xvf apache-dolphinscheduler-3.2.0-bin.tar.gz
mv apache-dolphinscheduler-3.2.0-bin/ dolphinscheduler-3.2.0
2、配置mysql作为默认的数据库
# 1、将mysql(mysql-connector-java-8.0.16.jar)驱动上传到
# api-server/libs
# alert-server/libs
# master-server/libs
# standalone-server/libs/standalone-server/
# 2、初始化数据库
mysql -uroot -p123456
CREATE DATABASE dolphinscheduler DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci;
# 3、在环境变量中配置mysql信息
# for mysql
export DATABASE=${DATABASE:-mysql}
export SPRING_PROFILES_ACTIVE=${DATABASE}
export SPRING_DATASOURCE_URL="jdbc:mysql://cdh01:3306/dolphinscheduler?useUnicode=true&characterEncoding=UTF-8&useSSL=false"
export SPRING_DATASOURCE_USERNAME=root
export SPRING_DATASOURCE_PASSWORD=shujia666888..
# 4、执行脚本初始化数据库
bash tools/bin/upgrade-schema.sh
3、启动dolphinscheduler
# Start Standalone Server
bash ./bin/dolphinscheduler-daemon.sh start standalone-server
# Stop Standalone Server
bash ./bin/dolphinscheduler-daemon.sh stop standalone-server
# Check Standalone Server status
bash ./bin/dolphinscheduler-daemon.sh status standalone-server
http://cdh01:12345/dolphinscheduler/ui
admin/dolphinscheduler123
4、配置环境
export JAVA_HOME=/usr/local/soft/jdk1.8.0_171
export HADOOP_HOME=/usr/local/soft/hadoop-3.1.1
export HADOOP_CONF_DIR=/usr/local/soft/hadoop-3.1.1/etc/hadoop
export SPARK_HOME=/usr/local/soft/spark-3.1.3
export PYTHON_LAUNCHER=/usr/bin/python
export HIVE_HOME=/usr/local/soft/hive-3.1.2
export FLINK_HOME=/usr/local/soft/flink-1.15.3
export DATAX_LAUNCHER=/usr/local/soft/datax/bin/datax.py
export PATH=$PATH:$HADOOP_HOME/bin:$SPARK_HOME/bin:$PYTHON_LAUNCHER:$JAVA_HOME/bin:$HIVE_HOME/bin:$FLINK_HOME/bin:$DATAX_LAUNCHER