SparkStreaming版本引发的问题

Driver stacktrace:
	at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1499)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1487)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1486)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
	at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1486)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
	at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:814)
	at scala.Option.foreach(Option.scala:257)
	at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:814)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1714)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1669)
	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1658)
	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
Caused by: java.lang.NoSuchMethodError: org.apache.spark.rpc.RpcEndpointRef.askWithRetry(Ljava/lang/Object;Lscala/reflect/ClassTag;)Ljava/lang/Object;
	at org.apache.spark.streaming.receiver.ReceiverSupervisorImpl.onReceiverStart(ReceiverSupervisorImpl.scala:185)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:146)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:131)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:607)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:597)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
19/11/21 15:24:36 INFO ReceiverTracker: Restarting Receiver 0
19/11/21 15:24:36 INFO ReceiverTracker: Receiver 0 started
19/11/21 15:24:36 INFO DAGScheduler: Got job 382 (start at StreamWorldCount.scala:27) with 1 output partitions
19/11/21 15:24:36 INFO DAGScheduler: Final stage: ResultStage 382 (start at StreamWorldCount.scala:27)
19/11/21 15:24:36 INFO DAGScheduler: Parents of final stage: List()
19/11/21 15:24:36 INFO DAGScheduler: Missing parents: List()
19/11/21 15:24:36 INFO DAGScheduler: Submitting ResultStage 382 (Receiver 0 ParallelCollectionRDD[382] at start at StreamWorldCount.scala:27), which has no missing parents
19/11/21 15:24:36 INFO MemoryStore: Block broadcast_382 stored as values in memory (estimated size 59.1 KB, free 891.0 MB)
19/11/21 15:24:36 INFO MemoryStore: Block broadcast_382_piece0 stored as bytes in memory (estimated size 20.0 KB, free 891.0 MB)
19/11/21 15:24:36 INFO BlockManagerInfo: Added broadcast_382_piece0 in memory on 192.168.95.1:58703 (size: 20.0 KB, free: 891.9 MB)
19/11/21 15:24:36 INFO SparkContext: Created broadcast 382 from broadcast at DAGScheduler.scala:1006
19/11/21 15:24:36 INFO DAGScheduler: Submitting 1 missing tasks from ResultStage 382 (Receiver 0 ParallelCollectionRDD[382] at start at StreamWorldCount.scala:27) (first 15 tasks are for partitions Vector(0))
19/11/21 15:24:36 INFO TaskSchedulerImpl: Adding task set 382.0 with 1 tasks
19/11/21 15:24:36 INFO TaskSetManager: Starting task 0.0 in stage 382.0 (TID 382, localhost, executor driver, partition 0, PROCESS_LOCAL, 5410 bytes)
19/11/21 15:24:36 INFO Executor: Running task 0.0 in stage 382.0 (TID 382)
19/11/21 15:24:36 INFO RecurringTimer: Started timer for BlockGenerator at time 1574321077000
19/11/21 15:24:36 INFO BlockGenerator: Started BlockGenerator
19/11/21 15:24:36 ERROR Executor: Exception in task 0.0 in stage 382.0 (TID 382)
java.lang.NoSuchMethodError: org.apache.spark.rpc.RpcEndpointRef.askWithRetry(Ljava/lang/Object;Lscala/reflect/ClassTag;)Ljava/lang/Object;
	at org.apache.spark.streaming.receiver.ReceiverSupervisorImpl.onReceiverStart(ReceiverSupervisorImpl.scala:185)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:146)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:131)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:607)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:597)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
19/11/21 15:24:36 INFO BlockGenerator: Started block pushing thread
19/11/21 15:24:36 WARN TaskSetManager: Lost task 0.0 in stage 382.0 (TID 382, localhost, executor driver): java.lang.NoSuchMethodError: org.apache.spark.rpc.RpcEndpointRef.askWithRetry(Ljava/lang/Object;Lscala/reflect/ClassTag;)Ljava/lang/Object;
	at org.apache.spark.streaming.receiver.ReceiverSupervisorImpl.onReceiverStart(ReceiverSupervisorImpl.scala:185)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:146)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:131)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:607)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:597)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

19/11/21 15:24:36 ERROR TaskSetManager: Task 0 in stage 382.0 failed 1 times; aborting job
19/11/21 15:24:36 INFO TaskSchedulerImpl: Removed TaskSet 382.0, whose tasks have all completed, from pool 
19/11/21 15:24:36 INFO TaskSchedulerImpl: Cancelling stage 382
19/11/21 15:24:36 INFO DAGScheduler: ResultStage 382 (start at StreamWorldCount.scala:27) failed in 0.008 s due to Job aborted due to stage failure: Task 0 in stage 382.0 failed 1 times, most recent failure: Lost task 0.0 in stage 382.0 (TID 382, localhost, executor driver): java.lang.NoSuchMethodError: org.apache.spark.rpc.RpcEndpointRef.askWithRetry(Ljava/lang/Object;Lscala/reflect/ClassTag;)Ljava/lang/Object;
	at org.apache.spark.streaming.receiver.ReceiverSupervisorImpl.onReceiverStart(ReceiverSupervisorImpl.scala:185)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.startReceiver(ReceiverSupervisor.scala:146)
	at org.apache.spark.streaming.receiver.ReceiverSupervisor.start(ReceiverSupervisor.scala:131)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:607)
	at org.apache.spark.streaming.scheduler.ReceiverTracker$ReceiverTrackerEndpoint$$anonfun$9.apply(ReceiverTracker.scala:597)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.SparkContext$$anonfun$34.apply(SparkContext.scala:2173)
	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87)
	at org.apache.spark.scheduler.Task.run(Task.scala:108)
	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:335)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)

当出现这个错误的时候要去查看spark的版本!

在用spark的时候一定要把版本统一!

  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值