一、方法1:分別將兩張表中的數據加載爲DataFramejava
/* * 方法1:分別將兩張表中的數據加載爲DataFrame * */ /*
Map<String,String> options = new HashMap<String,String>(); options.put("url","jdbc:mysql://localhost:3306/tset"); options.put("driver","com.mysql.jdbc.Driver"); options.put("user","root"); options.put("password","admin"); options.put("dbtable","information"); Dataset myinfromation = sqlContext.read().format("jdbc").options(options).load(); //若是須要多張表,則須要再put一遍 options.put("dbtable","score"); Dataset scores = sqlContext.read().format("jdbc").options(options).load();*/
二、方法2:分別將mysql中兩張表的數據加載爲DataFramemysql
//方法2:分別將mysql中兩張表的數據加載爲DataFrame DataFrameReader reader = sqlContext.read().format("jdbc"); reader.option("url","jdbc:mysql://127.0.0.1:3306/test?serverTimezone=GMT"); reader.option("driver","com.mysql.cj.jdbc.Driver"); reader.option("user","root"); reader.option("password","admin"); reader.option("dbtable","information"); Dataset myinformation = reader.load(); reader.option("dbtable","score"); Dataset scores = reader.load();
三、問題:sql
在程序運行過程報錯apache
(1)api
解決:ide
在idea中加入jar包url
(2)運行報錯idea
The server time zone value 'Öйú±ê׼ʱ¼ä' is unrecognized or represents .....
解決:添加信息。spa
dbc:mysql://127.0.0.1:3306/test?serverTimezone=GMT
四、成功運行3d
附:程序源碼:
package sparkSQl; import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.DataFrameReader; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.SQLContext; import java.util.HashMap; import java.util.Map; public class mysqlToDataFrame { public static void main(String[] args) { //首先新建一個sparkconf定義參數 SparkConf conf = new SparkConf().setMaster("local").setAppName("JDBCDataSource"); //建立sparkContext,是通往spark集羣的惟一通道 JavaSparkContext sc = new JavaSparkContext(conf); //新建一個sparksql SQLContext sqlContext = new SQLContext(sc); //sparksql鏈接mysql /* * 方法1:分別將兩張表中的數據加載爲DataFrame * */ /*Map<String,String> options = new HashMap<String,String>(); options.put("url","jdbc:mysql://localhost:3306/tset"); options.put("driver","com.mysql.jdbc.Driver"); options.put("user","root"); options.put("password","admin"); options.put("dbtable","information"); Dataset myinfromation = sqlContext.read().format("jdbc").options(options).load(); //若是須要多張表,則須要再put一遍 options.put("dbtable","score"); Dataset scores = sqlContext.read().format("jdbc").options(options).load();*/ //方法2:分別將mysql中兩張表的數據加載爲DataFrame DataFrameReader reader = sqlContext.read().format("jdbc"); reader.option("url","jdbc:mysql://127.0.0.1:3306/test?serverTimezone=GMT"); reader.option("driver","com.mysql.cj.jdbc.Driver"); reader.option("user","root"); reader.option("password","admin"); reader.option("dbtable","information"); Dataset myinformation = reader.load(); reader.option("dbtable","score"); Dataset scores = reader.load(); //將兩個DataFrame轉換爲javapairrdd,執行join操做 myinformation.registerTempTable("info"); scores.registerTempTable("score"); //定義sql語句 String sql = "select info.name,age" +" from info join score" +" on(info.name=score.name)" +" where score.score>90"; Dataset sql2 = sqlContext.sql(sql); sql2.show(); } }