头歌 SparkSQL数据源 粘贴复制即可

第1关 SparkSQL加载和保存

package com.educoder.bigData.sparksql2;
import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
public class Test1 {
public static void main(String[] args) throws AnalysisException {
SparkSession spark = SparkSession
.builder()
.appName("test1")
.master("local")
.getOrCreate();
/********** Begin **********/
spark.read().format("json").load("people.json").write().mode(SaveMode.Append).save("people");
spark.read().format("json").load("people1.json").write().mode(SaveMode.Append).save("people");
spark.read().load("people").show();
/********** End **********/
}
}

第2关 Parquet文件介绍

package com.educoder.bigData.sparksql2;

import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.SparkSession;


public class Test2 {

	public static void main(String[] args) throws AnalysisException {
SparkSession spark = SparkSession
.builder()
.appName("test1")
.master("local")
.getOrCreate();
spark.read().format("json").load("people.json").write().parquet("people/id=1");
spark.read().format("json").load("people1.json").write().parquet("people/id=2");
spark.read().load("people").show();
}

	

}

第3关 json文件介绍

package com.educoder.bigData.sparksql2;

import org.apache.spark.sql.AnalysisException;
import org.apache.spark.sql.SparkSession;


public class Test3 {
	
	public static void main(String[] args) throws AnalysisException {
SparkSession spark = SparkSession
.builder()
.appName("test1")
.master("local")
.getOrCreate();
spark.read().format("json").load("people.json").createOrReplaceTempView("people");
spark.read().format("json").load("people1.json").createOrReplaceTempView("people1");
spark.sql("select avg(salary) from ( select salary from people union all select salary from people1) a").show();
}

	

}

第4关 JDBC读取数据源

package com.educoder.bigData.sparksql2;

import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;


public class Test4 {

	public static void case4(SparkSession spark) {
Dataset<Row> load = spark.read().format("json").load("people.json");
load.write()
.format("jdbc")
.option("url", "jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf-8")
.option("dbtable", "people")
.option("user", "root")
.option("password", "123123")
.mode(SaveMode.Overwrite)
.save();
Dataset<Row> load1 = spark.read().format("json").load("people1.json");
load1.write()
.format("jdbc")
.option("url", "jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf-8")
.option("dbtable", "people")
.option("user", "root")
.option("password", "123123")
.mode(SaveMode.Append)
.save();
Dataset<Row> load2 = spark.read()
.format("jdbc")
.option("url", "jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf-8")
.option("dbtable", "people")
.option("user", "root")
.option("password", "123123").load();
load2.show();
}
	

}

  • 14
    点赞
  • 13
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值