spark2.0 升级至spark2.3时代码问题

将 spark 从 2.0升级到2.3 过程中,出现无法识别找到表问题,后经过排查,是由于代码中 出现了 new sparkContext()原因导致
错误如下

I0828 18:05:25.272563   134 sched.cpp:743] Framework registered with 33c370c3-d41c-43aa-92b2-d43280ae9a32-1687-driver-20180828180451-0107
Exception in thread "main" org.apache.spark.sql.AnalysisException: Table or view not found: `yz_rpt`.`rpt_zhk_pro_order_shop`; line 9 pos 5;
'Distinct
+- 'Project ['s.self_shop_id, 's.user_code AS org_code#0, 'u.user_id, 'u.real_name AS user_name#1, 'u.org_id]
   +- 'Join LeftOuter, ('s.user_code = 'u.user_code)
      :- 'SubqueryAlias s
      :  +- 'UnresolvedRelation `yz_rpt`.`rpt_zhk_pro_order_shop`
      +- 'SubqueryAlias u
         +- 'UnresolvedRelation `yz_dw`.`dim_user`

    at org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:82)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:80)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:127)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at scala.collection.immutable.List.foreach(List.scala:381)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at scala.collection.immutable.List.foreach(List.scala:381)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at scala.collection.immutable.List.foreach(List.scala:381)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$foreachUp$1.apply(TreeNode.scala:126)
    at scala.collection.immutable.List.foreach(List.scala:381)
    at org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:126)
    at org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:80)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:92)
    at org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:105)
    at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:57)
    at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:55)
    at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:47)
    at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:74)
    at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:641)
    at com.spark.datacenter.report.RptZHKProBDTradeRefundMO$.getData(RptZHKProBDTradeRefundMO.scala:33)
    at com.spark.datacenter.main.Tanqingyong$$anonfun$main$1.apply$mcVI$sp(Tanqingyong.scala:73)
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
    at com.spark.datacenter.main.Tanqingyong$.main(Tanqingyong.scala:59)
    at com.spark.datacenter.main.Tanqingyong.main(Tanqingyong.scala)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:894)
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
I0828 18:05:34.916119   164 sched.cpp:1987] Asked to stop the driver
I0828 18:05:34.916317   140 sched.cpp:1187] Stopping framework '33c370c3-d41c-43aa-92b2-d43280ae9a32-1687-driver-20180828180451-0107'

其中代码片段如下:

package com.spark.datacenter.main

import com.spark.datacenter.export._
import com.spark.datacenter.fact.{FactYzcrmYzKoubeiSupplierIndicatorReport, factKBKDiscountActivityReport}
import com.spark.datacenter.report._
import com.spark.datacenter.tmp._
import com.spark.datacenter.utils._
import com.spark.datacenter.utils.{CommonUtils, DateUtils}
import org.apache.spark.sql.SparkSession
import org.apache.spark.{SparkConf, SparkContext}
import  za.co.absa.spline.core.SparkLineageInitializer._


object MainDMFDataBriefing {

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    val sc = new SparkContext(conf)

    val ssb = SparkSession.builder().config(conf).enableHiveSupport().getOrCreate()

    ssb.sql(
      """
        |select
        |distinct
        |s.self_shop_id,
        |s.user_code as org_code,
        |u.user_id,
        |u.real_name as user_name,
        |u.org_id
        |from yz_rpt.rpt_zhk_pro_order_shop s
        |left outer join yz_dw.dim_user u on s.user_code=u.user_code
      """.stripMargin).show()

  }

}

修改方案

去除 val sc = new SparkContext(conf) 代码

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值