newExecuteStatementOperation single session

var udfNotInited = true


override def newExecuteStatementOperation(
parentSession: HiveSession,
statement: String,
confOverlay: JMap[String, String],
async: Boolean): ExecuteStatementOperation = synchronized {
val sqlContext = sessionToContexts.get(parentSession.getSessionHandle)
require(sqlContext != null, s"Session handle: ${parentSession.getSessionHandle} has not been" +
s" initialized or had already closed.")

if(udfNotInited) {
val configFilePath = sqlContext.sparkContext.conf.getOption("spark.app.confpath")
.getOrElse("/sparklib/conf/udf.config")
println(s"openSession configFilePath: $configFilePath ")
UdfLoadUtils.udfRegister(configFilePath, sqlContext.sparkSession)
udfNotInited = false
}


val conf = sqlContext.sessionState.conf
val runInBackground = async && conf.getConf(HiveUtils.HIVE_THRIFT_SERVER_ASYNC)
val operation = new SparkExecuteStatementOperation(parentSession, statement, confOverlay,
runInBackground)(sqlContext, sessionToActivePool)
handleToOperation.put(operation.getHandle, operation)
logDebug(s"Created Operation for $statement with session=$parentSession, " +
s"runInBackground=$runInBackground")
operation
}


org.apache.spark.sql.hive.thriftserver.SparkSQLSessionManager#openSession


override def openSession(
protocol: TProtocolVersion,
username: String,
passwd: String,
ipAddress: String,
sessionConf: java.util.Map[String, String],
withImpersonation: Boolean,
delegationToken: String): SessionHandle = {
val sessionHandle =
super.openSession(protocol, username, passwd, ipAddress, sessionConf, withImpersonation,
delegationToken)
val session = super.getSession(sessionHandle)
HiveThriftServer2.listener.onSessionCreated(
session.getIpAddress, sessionHandle.getSessionId.toString, session.getUsername)
// println(s" open session : single ${sqlContext.conf.hiveThriftServerSingleSession} ")
val ctx = sqlContext
// val ctx = if (sqlContext.conf.hiveThriftServerSingleSession) {
// sqlContext
// } else {
// sqlContext.newSession()
// }
// if (!sqlContext.conf.hiveThriftServerSingleSession) {
// val configFilePath = ctx.sparkContext.conf.getOption("spark.app.confpath")
// .getOrElse("/sparklib/conf/udf.config")
// println(s"openSession configFilePath: $configFilePath ")
// UdfLoadUtils.udfRegister(configFilePath, ctx.sparkSession)
// }
ctx.setConf("spark.sql.hive.version", HiveUtils.hiveExecutionVersion)
if (sessionConf != null && sessionConf.containsKey("use:database")) {
ctx.sql(s"use ${sessionConf.get("use:database")}")
}
sparkSqlOperationManager.sessionToContexts.put(sessionHandle, ctx)
sessionHandle
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值