package sparkUtil
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
object SparkHive {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf()
sparkConf.setAppName("SparkHive")
sparkConf.setMaster("local")
val sparkSession = SparkSession.builder().config(sparkConf).getOrCreate()
val birthDay = 20190919
val DBName = ""
val tableName = ""
//read hive
val readDF = sparkSession.table(DBName + "." + tableName)
//write to hive
val createTableSql =
"""
|create table if not exists person (
| id string,
| age int
|)
|partitioned by (birthDay int)
|stored as parquet
""".stripMargin
sparkSession.sql(createTableSql)
val insertSql =
s"""
|insert overwrite table person partition(birthDay=20190919)
|select
| id,age
|from
| person_record
""".stripMargin
val writeDF = sparkSession.sql(insertSql)
sparkSession.close()
}
}