package com.spark.imports
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.{SparkConf, SparkContext}
object ImportMysql {
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setAppName(s"${this.getClass.getSimpleName}").setMaster("local")
val sc = new SparkContext(conf)
val sqlContext = new HiveContext(sc)
val frame: DataFrame = sqlContext.read.format("jdbc")
.option("url", "jdbc:mysql://192.168.0.151:3306/spark")
.option("user", "root")
.option("password", "123456")
.option("dbtable", "tdealinfo")
.option("driver", "com.mysql.jdbc.Driver")
.load()
frame.show()
readChar()
sc.stop()
}
}
网友评论