package cn.spark.test01;
import java.util.Properties;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrameReader;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SaveMode;
import org.apache.spark.sql.SparkSession;
public class SparkSqlTest01 {
public static void main(String[] args) {
SparkConf sparkConf = new SparkConf().
setAppName(SparkSqlTest01.class.getName())
.setMaster("local");
JavaSparkContext sc = new JavaSparkContext(sparkConf);
SparkSession spark = SparkSession.builder()
.config("spark.some.config.option", "some-value")
.getOrCreate();
String url = "jdbc:mysql://localhost:3306/test";
/** 读取mysql数据方法一 */
Properties properties = new Properties();
properties.setProperty("user", "root");
properties.setProperty("password", "123456");
Dataset<Row> data01 = spark.read().
jdbc(url, "person", properties)
.select("name","age")
.filter("name='meng'");
data01.show();
/** 读取mysql数据方法二 */
DataFrameReader reader = spark.read().
format("jdbc").option("url", url)
.option("driver", "com.mysql.jdbc.Driver")
.option("user", "root")
.option("password", "123456")
.option("dbtable", "person");
Dataset<Row> data02 = reader.load();
data02.show();
/** 将Dataset数据写入mysql */
data01.write().mode(SaveMode.Append).jdbc(url, "person", properties);
spark.close();
}
}