package com.bjsxt.scala.spark.jdbc
import org.apache.spark.sql.SQLContext
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import java.util.HashMap
import org.apache.spark.sql.Row
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.types.DataTypes
import java.util.ArrayList
import org.apache.spark.sql.types.StructType
import org.apache.spark.sql.types.StructField
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.DataFrame
import com.mysql.jdbc.Connection
import com.mysql.jdbc.Statement
import java.sql.DriverManager
object JDBCDataSource{
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName("JDBCDataSource").setMaster("local[1]")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
var options = new HashMap[String, String]();
options.put("url", "jdbc:mysql://hadoop1:3306/testdb");
options.put("user", "spark");
options.put("password", "spark2016");
options.put("dbtable", "student_info");
var studentInfosDF = sqlContext.read.format("jdbc").options(options).load()
options.put("dbtable", "student_score");
var studentScoresDF = sqlContext.read.format("jdbc").options(options).load()
studentInfosDF.registerTempTable("student_info")
studentScoresDF.registerTempTable("student_score")
val sql = "SELECT student_info.name,student_info.age,student_score.score"
.+(" FROM student_info JOIN student_score ON (student_info.name = student_score.name)")
.+(" WHERE student_score.score > 80")
val studentsDF = sqlContext.sql(sql)
// 将DataFrame数据保存到MySQL表中 RDD
studentsDF.foreach { row =>
{
var sql = "insert into good_student_info values(".+("'").+(row.getString(0)).+("',").+(row.getInt(1)).+(",").+(row.getInt(2)).+(")")
//println(sql)
Class.forName("com.mysql.jdbc.Driver");
var conn = DriverManager.getConnection("jdbc:mysql://hadoop1:3306/testdb", "spark", "spark2016");
var stat = conn.createStatement();
stat.executeUpdate(sql);
if (stat != null) {
stat.close();
}
if (conn != null) {
conn.close();
}
}
}
}
}