spark在yarn上运行作业报错

spark在yarn上运行作业报错:
java.lang.RuntimeException: Error in configuring object
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:112)
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:78)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
at org.apache.spark.rdd.HadoopRDD.getInputFormat(HadoopRDD.scala:188)
at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:201)
at org.apache.spark.rdd.RDDKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲partitions$2.ap…anonfun$partitions 2. a p p l y ( R D D . s c a l a : 250 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . p a r t i t i o n s ( R D D . s c a l a : 250 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . g e t P a r t i t i o n s ( M a p P a r t i t i o n s R D D . s c a l a : 35 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:250) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.partitions(RDD.scala:250) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:250)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:250)atorg.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 252 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:252) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:252)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 250 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . p a r t i t i o n s ( R D D . s c a l a : 250 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . g e t P a r t i t i o n s ( M a p P a r t i t i o n s R D D . s c a l a : 35 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:250) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.partitions(RDD.scala:250) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:250)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:250)atorg.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 252 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:252) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:252)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 250 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . p a r t i t i o n s ( R D D . s c a l a : 250 ) a t o r g . a p a c h e . s p a r k . r d d . M a p P a r t i t i o n s R D D . g e t P a r t i t i o n s ( M a p P a r t i t i o n s R D D . s c a l a : 35 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:250) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.partitions(RDD.scala:250) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:250)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:250)atorg.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 252 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:252) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:252)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfunpartitions 2. a p p l y ( R D D . s c a l a : 250 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . p a r t i t i o n s ( R D D . s c a l a : 250 ) a t o r g . a p a c h e . s p a r k . S h u f f l e D e p e n d e n c y . < i n i t > ( D e p e n d e n c y . s c a l a : 91 ) a t o r g . a p a c h e . s p a r k . r d d . S h u f f l e d R D D . g e t D e p e n d e n c i e s ( S h u f f l e d R D D . s c a l a : 91 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:250) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.partitions(RDD.scala:250) at org.apache.spark.ShuffleDependency.<init>(Dependency.scala:91) at org.apache.spark.rdd.ShuffledRDD.getDependencies(ShuffledRDD.scala:91) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:250)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.rdd.RDD.partitions(RDD.scala:250)atorg.apache.spark.ShuffleDependency.<init>(Dependency.scala:91)atorg.apache.spark.rdd.ShuffledRDD.getDependencies(ShuffledRDD.scala:91)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfundependencies 2. a p p l y ( R D D . s c a l a : 239 ) a t o r g . a p a c h e . s p a r k . r d d . R D D 2.apply(RDD.scala:239) at org.apache.spark.rdd.RDD 2.apply(RDD.scala:239)atorg.apache.spark.rdd.RDD a n o n f u n anonfun anonfundependencies 2. a p p l y ( R D D . s c a l a : 237 ) a t s c a l a . O p t i o n . g e t O r E l s e ( O p t i o n . s c a l a : 121 ) a t o r g . a p a c h e . s p a r k . r d d . R D D . d e p e n d e n c i e s ( R D D . s c a l a : 237 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r . g e t S h u f f l e D e p e n d e n c i e s ( D A G S c h e d u l e r . s c a l a : 424 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r . g e t O r C r e a t e P a r e n t S t a g e s ( D A G S c h e d u l e r . s c a l a : 373 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r . c r e a t e R e s u l t S t a g e ( D A G S c h e d u l e r . s c a l a : 360 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r . h a n d l e J o b S u b m i t t e d ( D A G S c h e d u l e r . s c a l a : 838 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r E v e n t P r o c e s s L o o p . d o O n R e c e i v e ( D A G S c h e d u l e r . s c a l a : 1613 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r E v e n t P r o c e s s L o o p . o n R e c e i v e ( D A G S c h e d u l e r . s c a l a : 1605 ) a t o r g . a p a c h e . s p a r k . s c h e d u l e r . D A G S c h e d u l e r E v e n t P r o c e s s L o o p . o n R e c e i v e ( D A G S c h e d u l e r . s c a l a : 1594 ) a t o r g . a p a c h e . s p a r k . u t i l . E v e n t L o o p 2.apply(RDD.scala:237) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.dependencies(RDD.scala:237) at org.apache.spark.scheduler.DAGScheduler.getShuffleDependencies(DAGScheduler.scala:424) at org.apache.spark.scheduler.DAGScheduler.getOrCreateParentStages(DAGScheduler.scala:373) at org.apache.spark.scheduler.DAGScheduler.createResultStage(DAGScheduler.scala:360) at org.apache.spark.scheduler.DAGScheduler.handleJobSubmitted(DAGScheduler.scala:838) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1613) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594) at org.apache.spark.util.EventLoop 2.apply(RDD.scala:237)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.rdd.RDD.dependencies(RDD.scala:237)atorg.apache.spark.scheduler.DAGScheduler.getShuffleDependencies(DAGScheduler.scala:424)atorg.apache.spark.scheduler.DAGScheduler.getOrCreateParentStages(DAGScheduler.scala:373)atorg.apache.spark.scheduler.DAGScheduler.createResultStage(DAGScheduler.scala:360)atorg.apache.spark.scheduler.DAGScheduler.handleJobSubmitted(DAGScheduler.scala:838)atorg.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1613)atorg.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1605)atorg.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1594)atorg.apache.spark.util.EventLoop$anon 1. r u n ( E v e n t L o o p . s c a l a : 48 ) C a u s e d b y : j a v a . l a n g . r e f l e c t . I n v o c a t i o n T a r g e t E x c e p t i o n a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e 0 ( N a t i v e M e t h o d ) a t s u n . r e f l e c t . N a t i v e M e t h o d A c c e s s o r I m p l . i n v o k e ( N a t i v e M e t h o d A c c e s s o r I m p l . j a v a : 62 ) a t s u n . r e f l e c t . D e l e g a t i n g M e t h o d A c c e s s o r I m p l . i n v o k e ( D e l e g a t i n g M e t h o d A c c e s s o r I m p l . j a v a : 43 ) a t j a v a . l a n g . r e f l e c t . M e t h o d . i n v o k e ( M e t h o d . j a v a : 498 ) a t o r g . a p a c h e . h a d o o p . u t i l . R e f l e c t i o n U t i l s . s e t J o b C o n f ( R e f l e c t i o n U t i l s . j a v a : 109 ) . . . 37 m o r e C a u s e d b y : j a v a . l a n g . I l l e g a l A r g u m e n t E x c e p t i o n : C o m p r e s s i o n c o d

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值