1.1   从 jdbc 读数据
object JDBCRead {
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .master("local[*]")
      .appName("JDBCRead")
      .getOrCreate()
    import spark.implicits._
    val url = "jdbc:mysql://localhost:3306/scrapy"/*"jdbc:mysql://mysql://hadoop102/rdd"*/
    val user = "root"
    val pw = "123456"
    //方法 1
   /* val df = spark.read.option("url", url)
      .option("root", user)
      .option("password",pw)
      .option("dbtable","user")
      .format("jdbc").load()*/
    //方法2
    val props = new Properties()
    props.put("user",user)
    props.put("password",pw)
    props.put("url",url)
    val df = spark.read.jdbc(url, "userlo", props)
    df.show()
    spark.close()
  }
}
 
 
1.2   从 jdbc 写数据
object JDBCWrite {
  val url = "jdbc:mysql://localhost:3306/scrapy"/*"jdbc:mysql://mysql://hadoop102/rdd"*/
  val user = "root"
  val pw = "123456"
  def main(args: Array[String]): Unit = {
    val spark = SparkSession.builder()
      .appName("JDBCWrite")
      .master("local[*]")
      .getOrCreate()
    import spark.implicits._
    //写道jdbc中
    val df = spark.read.json("F:/BaiduNetdiskDownload/15-spark/spark-coreData/users.json")
    /*df.write
        .format("jdbc")
        .option("url",url)
        .option("user",user)
        .option("password",pw)
        .option("dbtable","userlo")
        .mode("append")
        .save()*/
    val props = new Properties()
    props.put("user",user)
    props.put("password",pw)
    props.put("url",url)
    df.write.jdbc(url, "userlo", props)
    spark.close()
    
  }
}