项目场景:
在spark编写hudi的时候,运行程序把数据落在hive上,但是hudi无法加载到hive
报错:
org.apache.thrift.TApplicationException: Required field 'client_protocol' is unset!
问题描述
package com.qf.bigdata.util
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
/**
* @constructor 根据环境变量参数创建Spark Session
* @author QF
* @date 2020/6/9 2:59 PM
* @version V1.0
*/
object SparkHelper {
def getSparkSession(env: String) = {
env match {
// 集群运行模式
case "prod" => {
val conf = new SparkConf()
.setAppName("Log2Hudi")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.sql.hive.metastore.version","1.2.1")
.set("spark.sql.cbo.enabled",