spark foreachPartition Could not initialize class

 

val drive_param = "xxx"
      val dfs = Seq("xx").toDF("user")
      dfs.repartition(100).foreachPartition(par => {
        //drive_param 无法使用, foreachPartition 会把每个rdd 放到executor 上执行,无法读取driver 上的参数,
        //  解决办法  广播参数,或者重新定义参数
        val executor_param = drive_param
        
        
        
      })
已标记关键词 清除标记
环境 Ubuntu 16.04 hadoop 2.7.3 scala 2.11.8 spark 2.1.0 已经安装好了hadoop scala,之后配置了下 spark 运行 spark-shell 就爆出来下面的错误 ``` 18/05/22 15:43:30 ERROR spark.SparkContext: Error initializing SparkContext. java.lang.IllegalArgumentException: For input string: "true #是否记录Spark事件,用于应用程序在完成后重构webUI" at scala.collection.immutable.StringLike$class.parseBoolean(StringLike.scala:290) at scala.collection.immutable.StringLike$class.toBoolean(StringLike.scala:260) at scala.collection.immutable.StringOps.toBoolean(StringOps.scala:29) at org.apache.spark.SparkConf$$anonfun$getBoolean$2.apply(SparkConf.scala:407) at org.apache.spark.SparkConf$$anonfun$getBoolean$2.apply(SparkConf.scala:407) at scala.Option.map(Option.scala:146) at org.apache.spark.SparkConf.getBoolean(SparkConf.scala:407) at org.apache.spark.SparkContext.isEventLogEnabled(SparkContext.scala:238) at org.apache.spark.SparkContext.<init>(SparkContext.scala:407) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2313) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:868) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:860) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:860) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:95) at $line3.$read$$iw$$iw.<init>(<console>:15) at $line3.$read$$iw.<init>(<console>:42) at $line3.$read.<init>(<console>:44) at $line3.$read$.<init>(<console>:48) at $line3.$read$.<clinit>(<console>) at $line3.$eval$.$print$lzycompute(<console>:7) at $line3.$eval$.$print(<console>:6) at $line3.$eval.$print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:38) at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:37) at org.apache.spark.repl.SparkILoop.loadFiles(SparkILoop.scala:105) at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:920) at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) at org.apache.spark.repl.Main$.doMain(Main.scala:68) at org.apache.spark.repl.Main$.main(Main.scala:51) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:738) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) java.lang.IllegalArgumentException: For input string: "true #是否记录Spark事件,用于应用程序在完成后重构webUI" at scala.collection.immutable.StringLike$class.parseBoolean(StringLike.scala:290) at scala.collection.immutable.StringLike$class.toBoolean(StringLike.scala:260) at scala.collection.immutable.StringOps.toBoolean(StringOps.scala:29) at org.apache.spark.SparkConf$$anonfun$getBoolean$2.apply(SparkConf.scala:407) at org.apache.spark.SparkConf$$anonfun$getBoolean$2.apply(SparkConf.scala:407) at scala.Option.map(Option.scala:146) at org.apache.spark.SparkConf.getBoolean(SparkConf.scala:407) at org.apache.spark.SparkContext.isEventLogEnabled(SparkContext.scala:238) at org.apache.spark.SparkContext.<init>(SparkContext.scala:407) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2313) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:868) at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:860) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:860) at org.apache.spark.repl.Main$.createSparkSession(Main.scala:95) ... 47 elided <console>:14: error: not found: value spark import spark.implicits._ ^ <console>:14: error: not found: value spark import spark.sql ```
©️2020 CSDN 皮肤主题: 大白 设计师:CSDN官方博客 返回首页