Spark使用yarn提交任务式遇到Exception in thread "main" java.lang.AbstractMethodError
20/03/10 11:18:13 INFO Client: Setting up the launch environment for our AM container
20/03/10 11:18:13 INFO Client: Preparing resources for our AM container
Exception in thread "main" java.lang.AbstractMethodError
at org.apache.spark.internal.Logging$class.initializeLogIfNecessary(Logging.scala:99)
at org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.initializeLogIfNecessary(HBaseCredentialProvider.scala:31)
at org.apache.spark.internal.Logging$class.log(Logging.scala:46)
at org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.log(HBaseCredentialProvider.scala:31)
at org.apache.spark.internal.Logging$class.logDebug(Logging.scala:79)
at org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.logDebug(HBaseCredentialProvider.scala:31)
at org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.hbaseConf(HBaseCredentialProvider.scala:71)
at org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.credentialsRequired(HBaseCredentialProvider.scala:59)
at org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(YARNHadoopDelegationTokenManager.scala:59)
at org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager$$anonfun$obtainDelegationTokens$2.apply(YARNHadoopDelegationTokenManager.scala:58)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
at scala.collection.Iterator$class.foreach(Iterator.scala:893)
at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:206)
at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
at org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager.obtainDelegationTokens(YARNHadoopDelegationTokenManager.scala:58)
at org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:388)
at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:882)
at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:169)
at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:57)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:164)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:500)
at org.apache.spark.streaming.StreamingContext$.createNewSparkContext(StreamingContext.scala:838)
at org.apache.spark.streaming.StreamingContext.<init>(StreamingContext.scala:85)
at org.apache.spark.streaming.api.java.JavaStreamingContext.<init>(JavaStreamingContext.scala:138)
at demo.SparkKafkaDemo.sparkToKafka(SparkKafkaDemo.java:38)
at demo.SparkKafkaDemo.main(SparkKafkaDemo.java:28)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:906)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:197)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:227)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:136)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
20/03/10 11:18:14 INFO DiskBlockManager: Shutdown hook called
20/03/10 11:18:14 INFO ShutdownHookManager: Shutdown hook called
20/03/10 11:18:14 INFO ShutdownHookManager: Deleting directory /tmp/spark-d4b91543-dc5b-48ec-8d17-de7154fca816
20/03/10 11:18:14 INFO ShutdownHookManager: Deleting directory /tmp/spark-f68676c5-cecc-4339-baff-4abd27de74e6
20/03/10 11:18:14 INFO ShutdownHookManager: Deleting directory /tmp/spark-d4b91543-dc5b-48ec-8d17-de7154fca816/userFiles-2adc1a8d-53d4-4b12-b396-948b0057646a
百度了一下说是版本不一致导致的,通过检查发现spark版本跟服务器的版本集群的版本是一致的都是2.3.0,但就是报“Exception in thread "main" java.lang.AbstractMethodError”
最终通过再次检查错误信息发现“org.apache.spark.deploy.yarn.security.HBaseCredentialProvider.initializeLogIfNecessary(HBaseCredentialProvider.scala:31)”怀疑是yarn版本有问题,再次检查开发环境发现spark-yarn的版本是2.2.0与集群环境不一致重新修改为2.3.0重新提交。启停正常