package spark
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.log4j.Logger
import org.apache.spark.sql.SparkSession
object MainSpark {
val LOG = Logger.getLogger(MainSpark.getClass.getName);
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder()
.appName("Spark SQL basic example")
.master("local")
//.config("spark.some.config.option", "some-value")
.getOrCreate()
val conf = Map("url" -> "jdbc:mysql://127.0.0.1:3306/test", "driver" -> "com.mysql.jdbc.Driver", "dbtable" -> "people", "user" -> "root", "password" -> "123456")
val peopleDF = spark.read.format("jdbc").options(conf).load()
peopleDF.show
// read text file
var file = "file:///C:\\github\\data\\spark.java"
var fileRdd = spark.sparkContext.textFile(file, 1)
var count = fileRdd.count()
LOG.info(s"#len is {$count}")
}
}
pom文件配置(加上spark-core和spark-sql):
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
4.0.0
houyijun.github.io
spark
0.0.1-SNAPSHOT
1.8
UTF-8
3.8.1
1.6.4
3.1
2.11.8
2.3.2
org.apache.spark
spark-core_2.11
2.2.0
provided
org.apache.spark
spark-sql_2.11
2.2.0
provided
mysql
mysql-connector-java
5.1.27
org.scala-lang
scala-library
${scala.version}
org.slf4j
slf4j-api
${slf4j-api.version}
org.slf4j
slf4j-log4j12
1.7.25
org.apache.maven.plugins
maven-compiler-plugin
3.6.1
1.7
1.7
net.alchim31.maven
scala-maven-plugin
3.2.2
org.apache.maven.plugins
maven-jar-plugin
3.0.2
true
lib/
spark.example.Main
org.apache.maven.plugins
maven-dependency-plugin
3.0.0
package
copy-dependencies
${project.build.directory}/lib