Spark------Idea连接Hive
package spark.day03
import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession}
object _04TestHive {
def main(args: Array[String]): Unit = {
System.setProperty("HADOOP_USER_NAME", "root")
write
read
}
def write: Unit = {
val spark=SparkSession.builder()
.master("local[*]")
.appName("testload")
.enableHiveSupport()
.getOrCreate()
val df: DataFrame = spark.read.csv("file:///F:\\IdeaProjects\\spark_sql\\sql\\country.csv")
df.write.mode(SaveMode.Ignore).saveAsTable("mydb2.country1")
//df.show()
spark.stop()
}
def read: Unit = {
val spark: SparkSession = SparkSession.builder()
.master("local[*]")
.appName("testload")
.enableHiveSupport()
.getOrCreate()
val df: DataFrame = spark.read.table("mydb2.country1")
df.show()
println(df.count())
spark.stop()
}
}
需要将hive-site.xml以及core-site.xml,hdfs-site.xml拷贝到resources目录下