1,环境
<spark.version>2.3.4</spark.version>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-yarn_2.11</artifactId>
<version>${spark.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>${spark.version}</version>
</dependency>
2、报错信息
2021-01-06 09:26:54.087 ERROR 5340 --- [ main] org.apache.spark.SparkContext : Error initializing SparkContext.
java.lang.RuntimeException: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE
at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:131) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.network.util.NettyUtils.createPooledByteBufAllocator(NettyUtils.java:118) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.network.server.TransportServer.init(TransportServer.java:94) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.network.server.TransportServer.<init>(TransportServer.java:73) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.network.TransportContext.createServer(TransportContext.java:114) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.rpc.netty.NettyRpcEnv.startServer(NettyRpcEnv.scala:119) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:465) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:464) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2275) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160) ~[scala-library-2.11.8.jar:na]
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2267) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:469) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:57) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:249) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:175) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.SparkContext.<init>(SparkContext.scala:423) ~[spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493) [spark-core_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934) [spark-sql_2.11-2.3.4.jar:2.3.4]
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925) [spark-sql_2.11-2.3.4.jar:2.3.4]
at scala.Option.getOrElse(Option.scala:121) [scala-library-2.11.8.jar:na]
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925) [spark-sql_2.11-2.3.4.jar:2.3.4]
at Servers.TextSpark.GetEsDataSpark(TextSpark.java:21) [classes/:na]
at com.example.spark.demo.DemoApplication.main(DemoApplication.java:13) [classes/:na]
Caused by: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE
at java.lang.Class.getDeclaredField(Class.java:2070) ~[na:1.8.0_181]
at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:127) ~[spark-network-common_2.11-2.3.4.jar:2.3.4]
... 23 common frames omitted
2021-01-06 09:26:54.093 INFO 5340Exception in thread "main" --- [ main] org.apache.spark.SparkContext : Successfully stopped SparkContext
java.lang.RuntimeException: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE
at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:131)
at org.apache.spark.network.util.NettyUtils.createPooledByteBufAllocator(NettyUtils.java:118)
at org.apache.spark.network.server.TransportServer.init(TransportServer.java:94)
at org.apache.spark.network.server.TransportServer.<init>(TransportServer.java:73)
at org.apache.spark.network.TransportContext.createServer(TransportContext.java:114)
at org.apache.spark.rpc.netty.NettyRpcEnv.startServer(NettyRpcEnv.scala:119)
at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:465)
at org.apache.spark.rpc.netty.NettyRpcEnvFactory$$anonfun$4.apply(NettyRpcEnv.scala:464)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:2275)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:160)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:2267)
at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:469)
at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:57)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:249)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:175)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:256)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:423)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:934)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:925)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:925)
at Servers.TextSpark.GetEsDataSpark(TextSpark.java:21)
at com.example.spark.demo.DemoApplication.main(DemoApplication.java:13)
Caused by: java.lang.NoSuchFieldException: DEFAULT_TINY_CACHE_SIZE
at java.lang.Class.getDeclaredField(Class.java:2070)
at org.apache.spark.network.util.NettyUtils.getPrivateStaticField(NettyUtils.java:127)
... 23 more
3、解决办法
在https://mvnrepository.com/ 仓库里查看spark版本为2.3.4的所有依赖,找到netty-all 查看2.3.4依赖的版本。修改为依赖版本。