cmd中输入spark-shell
Microsoft Windows [Version 10.0.17134.885]
© 2018 Microsoft Corporation. All rights reserved.
C:\WINDOWS\system32>spark-shell
Using Spark’s default log4j profile: org/apache/spark/log4j-defaults.properties
Setting default log level to “WARN”.
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
19/08/01 17:03:01 ERROR SparkContext: Error initializing SparkContext.
org.apache.spark.SparkException: Invalid Spark URL: spark://HeartbeatReceiver@windows10.microdone.cn127.0.0.1:52455
at org.apache.spark.rpc.RpcEndpointAddress
.
a
p
p
l
y
(
R
p
c
E
n
d
p
o
i
n
t
A
d
d
r
e
s
s
.
s
c
a
l
a
:
66
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
n
e
t
t
y
.
N
e
t
t
y
R
p
c
E
n
v
.
a
s
y
n
c
S
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
B
y
U
R
I
(
N
e
t
t
y
R
p
c
E
n
v
.
s
c
a
l
a
:
134
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
R
p
c
E
n
v
.
s
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
B
y
U
R
I
(
R
p
c
E
n
v
.
s
c
a
l
a
:
101
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
R
p
c
E
n
v
.
s
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
(
R
p
c
E
n
v
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
u
t
i
l
.
R
p
c
U
t
i
l
s
.apply(RpcEndpointAddress.scala:66) at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134) at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101) at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109) at org.apache.spark.util.RpcUtils
.apply(RpcEndpointAddress.scala:66)atorg.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134)atorg.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101)atorg.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109)atorg.apache.spark.util.RpcUtils.makeDriverRef(RpcUtils.scala:32)
at org.apache.spark.executor.Executor.(Executor.scala:184)
at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:59)
at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:127)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:183)
at org.apache.spark.SparkContext.(SparkContext.scala:501)
at org.apache.spark.SparkContext
.
g
e
t
O
r
C
r
e
a
t
e
(
S
p
a
r
k
C
o
n
t
e
x
t
.
s
c
a
l
a
:
2520
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
.getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession
.getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun
7.
a
p
p
l
y
(
S
p
a
r
k
S
e
s
s
i
o
n
.
s
c
a
l
a
:
926
)
a
t
s
c
a
l
a
.
O
p
t
i
o
n
.
g
e
t
O
r
E
l
s
e
(
O
p
t
i
o
n
.
s
c
a
l
a
:
121
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession
7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at
l
i
n
e
3.
line3.
line3.read
i
w
iw
iwiw.(:15)
at
l
i
n
e
3.
line3.
line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.<in…anonfun$loadAndRunReq
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
645
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:645) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:645)atscala.tools.nsc.interpreter.IMainWrappedRequestKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲loadAndRunReq$1…anonfun$quietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain
a
n
o
n
f
u
n
anonfun
anonfunquietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
q
u
i
e
t
R
u
n
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231) at org.apache.spark.repl.SparkILoop
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
c
o
l
l
e
c
t
i
o
n
.
i
m
m
u
t
a
b
l
e
.
L
i
s
t
.
f
o
r
e
a
c
h
(
L
i
s
t
.
s
c
a
l
a
:
392
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.collection.immutable.List.foreach(List.scala:392)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
s
a
v
i
n
g
R
e
p
l
a
y
S
t
a
c
k
(
I
L
o
o
p
.
s
c
a
l
a
:
91
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
i
n
i
t
i
a
l
i
z
e
S
p
a
r
k
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
108
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)atorg.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
211
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:211) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:211)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
1.apply(SparkILoop.scala:199) at scala.tools.nsc.interpreter.ILoop
1.apply(SparkILoop.scala:199)atscala.tools.nsc.interpreter.ILoop
a
n
o
n
f
u
n
anonfun
anonfunmumly
1.
a
p
p
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
189
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
m
u
m
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
186
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(ILoop.scala:189) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186) at org.apache.spark.repl.SparkILoop
1.apply(ILoop.scala:189)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
o
r
g
1.org
1.orgapache
s
p
a
r
k
spark
sparkrepl
S
p
a
r
k
I
L
o
o
p
SparkILoop
SparkILoop
a
n
o
n
f
u
n
anonfun
anonfun$loopPostInit
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
267
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:267) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:267)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.withSuppressedSettings
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
235
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:235) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:235)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.startup
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
a
p
p
l
y
1.apply
1.applymcZ
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
282
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
r
u
n
C
l
o
s
u
r
e
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
159
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
p
r
o
c
e
s
s
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
182
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
sp(SparkILoop.scala:282) at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182) at org.apache.spark.repl.Main
sp(SparkILoop.scala:282)atorg.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)atorg.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)atorg.apache.spark.repl.Main.doMain(Main.scala:78)
at org.apache.spark.repl.Main
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
:
58
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
0
(
N
a
t
i
v
e
M
e
t
h
o
d
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
62
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
43
)
a
t
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
M
e
t
h
o
d
.
i
n
v
o
k
e
(
M
e
t
h
o
d
.
j
a
v
a
:
498
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
J
a
v
a
M
a
i
n
A
p
p
l
i
c
a
t
i
o
n
.
s
t
a
r
t
(
S
p
a
r
k
A
p
p
l
i
c
a
t
i
o
n
.
s
c
a
l
a
:
52
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
o
r
g
.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org
.main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache
s
p
a
r
k
spark
sparkdeploy
S
p
a
r
k
S
u
b
m
i
t
SparkSubmit
SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain
1
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
167
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
s
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
195
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
86
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit
1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon
2.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
924
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit
2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/08/01 17:03:01 ERROR Utils: Uncaught exception in thread main
java.lang.NullPointerException
at org.apache.spark.scheduler.local.LocalSchedulerBackend.org
a
p
a
c
h
e
apache
apachespark
s
c
h
e
d
u
l
e
r
scheduler
schedulerlocal
L
o
c
a
l
S
c
h
e
d
u
l
e
r
B
a
c
k
e
n
d
LocalSchedulerBackend
LocalSchedulerBackend
s
t
o
p
(
L
o
c
a
l
S
c
h
e
d
u
l
e
r
B
a
c
k
e
n
d
.
s
c
a
l
a
:
162
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
c
h
e
d
u
l
e
r
.
l
o
c
a
l
.
L
o
c
a
l
S
c
h
e
d
u
l
e
r
B
a
c
k
e
n
d
.
s
t
o
p
(
L
o
c
a
l
S
c
h
e
d
u
l
e
r
B
a
c
k
e
n
d
.
s
c
a
l
a
:
138
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
c
h
e
d
u
l
e
r
.
T
a
s
k
S
c
h
e
d
u
l
e
r
I
m
p
l
.
s
t
o
p
(
T
a
s
k
S
c
h
e
d
u
l
e
r
I
m
p
l
.
s
c
a
l
a
:
653
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
c
h
e
d
u
l
e
r
.
D
A
G
S
c
h
e
d
u
l
e
r
.
s
t
o
p
(
D
A
G
S
c
h
e
d
u
l
e
r
.
s
c
a
l
a
:
2042
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
S
p
a
r
k
C
o
n
t
e
x
t
stop(LocalSchedulerBackend.scala:162) at org.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSchedulerBackend.scala:138) at org.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:653) at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2042) at org.apache.spark.SparkContext
stop(LocalSchedulerBackend.scala:162)atorg.apache.spark.scheduler.local.LocalSchedulerBackend.stop(LocalSchedulerBackend.scala:138)atorg.apache.spark.scheduler.TaskSchedulerImpl.stop(TaskSchedulerImpl.scala:653)atorg.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:2042)atorg.apache.spark.SparkContext
a
n
o
n
f
u
n
anonfun
anonfunstop
6.
a
p
p
l
y
6.apply
6.applymcV
s
p
(
S
p
a
r
k
C
o
n
t
e
x
t
.
s
c
a
l
a
:
1949
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
u
t
i
l
.
U
t
i
l
s
sp(SparkContext.scala:1949) at org.apache.spark.util.Utils
sp(SparkContext.scala:1949)atorg.apache.spark.util.Utils.tryLogNonFatalError(Utils.scala:1340)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1948)
at org.apache.spark.SparkContext.(SparkContext.scala:585)
at org.apache.spark.SparkContext
.
g
e
t
O
r
C
r
e
a
t
e
(
S
p
a
r
k
C
o
n
t
e
x
t
.
s
c
a
l
a
:
2520
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
.getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession
.getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun
7.
a
p
p
l
y
(
S
p
a
r
k
S
e
s
s
i
o
n
.
s
c
a
l
a
:
926
)
a
t
s
c
a
l
a
.
O
p
t
i
o
n
.
g
e
t
O
r
E
l
s
e
(
O
p
t
i
o
n
.
s
c
a
l
a
:
121
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession
7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at
l
i
n
e
3.
line3.
line3.read
i
w
iw
iwiw.(:15)
at
l
i
n
e
3.
line3.
line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.<in…anonfun$loadAndRunReq
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
645
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:645) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:645)atscala.tools.nsc.interpreter.IMainWrappedRequestKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲loadAndRunReq$1…anonfun$quietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain
a
n
o
n
f
u
n
anonfun
anonfunquietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
q
u
i
e
t
R
u
n
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231) at org.apache.spark.repl.SparkILoop
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
c
o
l
l
e
c
t
i
o
n
.
i
m
m
u
t
a
b
l
e
.
L
i
s
t
.
f
o
r
e
a
c
h
(
L
i
s
t
.
s
c
a
l
a
:
392
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.collection.immutable.List.foreach(List.scala:392)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
s
a
v
i
n
g
R
e
p
l
a
y
S
t
a
c
k
(
I
L
o
o
p
.
s
c
a
l
a
:
91
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
i
n
i
t
i
a
l
i
z
e
S
p
a
r
k
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
108
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)atorg.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
211
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:211) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:211)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
1.apply(SparkILoop.scala:199) at scala.tools.nsc.interpreter.ILoop
1.apply(SparkILoop.scala:199)atscala.tools.nsc.interpreter.ILoop
a
n
o
n
f
u
n
anonfun
anonfunmumly
1.
a
p
p
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
189
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
m
u
m
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
186
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(ILoop.scala:189) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186) at org.apache.spark.repl.SparkILoop
1.apply(ILoop.scala:189)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
o
r
g
1.org
1.orgapache
s
p
a
r
k
spark
sparkrepl
S
p
a
r
k
I
L
o
o
p
SparkILoop
SparkILoop
a
n
o
n
f
u
n
anonfun
anonfun$loopPostInit
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
267
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:267) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:267)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.withSuppressedSettings
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
235
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:235) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:235)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.startup
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
a
p
p
l
y
1.apply
1.applymcZ
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
282
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
r
u
n
C
l
o
s
u
r
e
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
159
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
p
r
o
c
e
s
s
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
182
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
sp(SparkILoop.scala:282) at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182) at org.apache.spark.repl.Main
sp(SparkILoop.scala:282)atorg.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)atorg.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)atorg.apache.spark.repl.Main.doMain(Main.scala:78)
at org.apache.spark.repl.Main
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
:
58
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
0
(
N
a
t
i
v
e
M
e
t
h
o
d
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
62
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
43
)
a
t
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
M
e
t
h
o
d
.
i
n
v
o
k
e
(
M
e
t
h
o
d
.
j
a
v
a
:
498
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
J
a
v
a
M
a
i
n
A
p
p
l
i
c
a
t
i
o
n
.
s
t
a
r
t
(
S
p
a
r
k
A
p
p
l
i
c
a
t
i
o
n
.
s
c
a
l
a
:
52
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
o
r
g
.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org
.main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache
s
p
a
r
k
spark
sparkdeploy
S
p
a
r
k
S
u
b
m
i
t
SparkSubmit
SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain
1
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
167
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
s
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
195
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
86
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit
1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon
2.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
924
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit
2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
19/08/01 17:03:01 WARN MetricsSystem: Stopping a MetricsSystem that is not running
19/08/01 17:03:01 ERROR Main: Failed to initialize Spark session.
org.apache.spark.SparkException: Invalid Spark URL: spark://HeartbeatReceiver@windows10.microdone.cn127.0.0.1:52455
at org.apache.spark.rpc.RpcEndpointAddress
.
a
p
p
l
y
(
R
p
c
E
n
d
p
o
i
n
t
A
d
d
r
e
s
s
.
s
c
a
l
a
:
66
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
n
e
t
t
y
.
N
e
t
t
y
R
p
c
E
n
v
.
a
s
y
n
c
S
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
B
y
U
R
I
(
N
e
t
t
y
R
p
c
E
n
v
.
s
c
a
l
a
:
134
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
R
p
c
E
n
v
.
s
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
B
y
U
R
I
(
R
p
c
E
n
v
.
s
c
a
l
a
:
101
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
p
c
.
R
p
c
E
n
v
.
s
e
t
u
p
E
n
d
p
o
i
n
t
R
e
f
(
R
p
c
E
n
v
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
u
t
i
l
.
R
p
c
U
t
i
l
s
.apply(RpcEndpointAddress.scala:66) at org.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134) at org.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101) at org.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109) at org.apache.spark.util.RpcUtils
.apply(RpcEndpointAddress.scala:66)atorg.apache.spark.rpc.netty.NettyRpcEnv.asyncSetupEndpointRefByURI(NettyRpcEnv.scala:134)atorg.apache.spark.rpc.RpcEnv.setupEndpointRefByURI(RpcEnv.scala:101)atorg.apache.spark.rpc.RpcEnv.setupEndpointRef(RpcEnv.scala:109)atorg.apache.spark.util.RpcUtils.makeDriverRef(RpcUtils.scala:32)
at org.apache.spark.executor.Executor.(Executor.scala:184)
at org.apache.spark.scheduler.local.LocalEndpoint.(LocalSchedulerBackend.scala:59)
at org.apache.spark.scheduler.local.LocalSchedulerBackend.start(LocalSchedulerBackend.scala:127)
at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:183)
at org.apache.spark.SparkContext.(SparkContext.scala:501)
at org.apache.spark.SparkContext
.
g
e
t
O
r
C
r
e
a
t
e
(
S
p
a
r
k
C
o
n
t
e
x
t
.
s
c
a
l
a
:
2520
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
.getOrCreate(SparkContext.scala:2520) at org.apache.spark.sql.SparkSession
.getOrCreate(SparkContext.scala:2520)atorg.apache.spark.sql.SparkSessionBuilderKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲7.apply(SparkSe…anonfun
7.
a
p
p
l
y
(
S
p
a
r
k
S
e
s
s
i
o
n
.
s
c
a
l
a
:
926
)
a
t
s
c
a
l
a
.
O
p
t
i
o
n
.
g
e
t
O
r
E
l
s
e
(
O
p
t
i
o
n
.
s
c
a
l
a
:
121
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
s
q
l
.
S
p
a
r
k
S
e
s
s
i
o
n
7.apply(SparkSession.scala:926) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.sql.SparkSession
7.apply(SparkSession.scala:926)atscala.Option.getOrElse(Option.scala:121)atorg.apache.spark.sql.SparkSessionBuilder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at
l
i
n
e
3.
line3.
line3.read
i
w
iw
iwiw.(:15)
at
l
i
n
e
3.
line3.
line3.readKaTeX parse error: Can't use function '$' in math mode at position 36: …43) at $̲line3.$read.<in…anonfun$loadAndRunReq
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
645
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:645) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:645)atscala.tools.nsc.interpreter.IMainWrappedRequestKaTeX parse error: Can't use function '$' in math mode at position 8: anonfun$̲loadAndRunReq$1…anonfun$quietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain
a
n
o
n
f
u
n
anonfun
anonfunquietRun
1.
a
p
p
l
y
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
q
u
i
e
t
R
u
n
(
I
M
a
i
n
.
s
c
a
l
a
:
231
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(IMain.scala:231) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231) at org.apache.spark.repl.SparkILoop
1.apply(IMain.scala:231)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunapply
m
c
V
mcV
mcVsp
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
c
o
l
l
e
c
t
i
o
n
.
i
m
m
u
t
a
b
l
e
.
L
i
s
t
.
f
o
r
e
a
c
h
(
L
i
s
t
.
s
c
a
l
a
:
392
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.collection.immutable.List.foreach(List.scala:392) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.collection.immutable.List.foreach(List.scala:392)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfuninitializeSpark
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
109
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
s
a
v
i
n
g
R
e
p
l
a
y
S
t
a
c
k
(
I
L
o
o
p
.
s
c
a
l
a
:
91
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
i
n
i
t
i
a
l
i
z
e
S
p
a
r
k
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
108
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:109) at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91) at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:109)atscala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)atorg.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
1.apply
1.applymcV
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
211
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
sp(SparkILoop.scala:211) at org.apache.spark.repl.SparkILoop
sp(SparkILoop.scala:211)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunorg
a
p
a
c
h
e
apache
apachespark
r
e
p
l
repl
replSparkILoop
a
n
o
n
f
u
n
anonfun
anonfunloopPostInit$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
1.apply(SparkILoop.scala:199) at scala.tools.nsc.interpreter.ILoop
1.apply(SparkILoop.scala:199)atscala.tools.nsc.interpreter.ILoop
a
n
o
n
f
u
n
anonfun
anonfunmumly
1.
a
p
p
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
189
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
M
a
i
n
.
b
e
Q
u
i
e
t
D
u
r
i
n
g
(
I
M
a
i
n
.
s
c
a
l
a
:
221
)
a
t
s
c
a
l
a
.
t
o
o
l
s
.
n
s
c
.
i
n
t
e
r
p
r
e
t
e
r
.
I
L
o
o
p
.
m
u
m
l
y
(
I
L
o
o
p
.
s
c
a
l
a
:
186
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(ILoop.scala:189) at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221) at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186) at org.apache.spark.repl.SparkILoop
1.apply(ILoop.scala:189)atscala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)atscala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
o
r
g
1.org
1.orgapache
s
p
a
r
k
spark
sparkrepl
S
p
a
r
k
I
L
o
o
p
SparkILoop
SparkILoop
a
n
o
n
f
u
n
anonfun
anonfun$loopPostInit
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
199
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:199) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:199)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
267
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:267) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:267)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1
1
1
a
n
o
n
f
u
n
anonfun
anonfunstartup$1
1.
a
p
p
l
y
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1.apply(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1.apply(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.withSuppressedSettings
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
235
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:235) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:235)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess$1.startup
1
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
247
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
1(SparkILoop.scala:247) at org.apache.spark.repl.SparkILoop
1(SparkILoop.scala:247)atorg.apache.spark.repl.SparkILoop
a
n
o
n
f
u
n
anonfun
anonfunprocess
1.
a
p
p
l
y
1.apply
1.applymcZ
s
p
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
282
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
r
u
n
C
l
o
s
u
r
e
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
159
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
S
p
a
r
k
I
L
o
o
p
.
p
r
o
c
e
s
s
(
S
p
a
r
k
I
L
o
o
p
.
s
c
a
l
a
:
182
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
sp(SparkILoop.scala:282) at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182) at org.apache.spark.repl.Main
sp(SparkILoop.scala:282)atorg.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)atorg.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)atorg.apache.spark.repl.Main.doMain(Main.scala:78)
at org.apache.spark.repl.Main
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
:
58
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
r
e
p
l
.
M
a
i
n
.
m
a
i
n
(
M
a
i
n
.
s
c
a
l
a
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
0
(
N
a
t
i
v
e
M
e
t
h
o
d
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
N
a
t
i
v
e
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
62
)
a
t
s
u
n
.
r
e
f
l
e
c
t
.
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
i
n
v
o
k
e
(
D
e
l
e
g
a
t
i
n
g
M
e
t
h
o
d
A
c
c
e
s
s
o
r
I
m
p
l
.
j
a
v
a
:
43
)
a
t
j
a
v
a
.
l
a
n
g
.
r
e
f
l
e
c
t
.
M
e
t
h
o
d
.
i
n
v
o
k
e
(
M
e
t
h
o
d
.
j
a
v
a
:
498
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
J
a
v
a
M
a
i
n
A
p
p
l
i
c
a
t
i
o
n
.
s
t
a
r
t
(
S
p
a
r
k
A
p
p
l
i
c
a
t
i
o
n
.
s
c
a
l
a
:
52
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
o
r
g
.main(Main.scala:58) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org
.main(Main.scala:58)atorg.apache.spark.repl.Main.main(Main.scala)atsun.reflect.NativeMethodAccessorImpl.invoke0(NativeMethod)atsun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)atsun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)atjava.lang.reflect.Method.invoke(Method.java:498)atorg.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)atorg.apache.spark.deploy.SparkSubmit.orgapache
s
p
a
r
k
spark
sparkdeploy
S
p
a
r
k
S
u
b
m
i
t
SparkSubmit
SparkSubmit$runMain(SparkSubmit.scala:849)
at org.apache.spark.deploy.SparkSubmit.doRunMain
1
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
167
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
s
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
195
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
86
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
1(SparkSubmit.scala:167) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86) at org.apache.spark.deploy.SparkSubmit
1(SparkSubmit.scala:167)atorg.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:195)atorg.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)atorg.apache.spark.deploy.SparkSubmit$anon
2.
d
o
S
u
b
m
i
t
(
S
p
a
r
k
S
u
b
m
i
t
.
s
c
a
l
a
:
924
)
a
t
o
r
g
.
a
p
a
c
h
e
.
s
p
a
r
k
.
d
e
p
l
o
y
.
S
p
a
r
k
S
u
b
m
i
t
2.doSubmit(SparkSubmit.scala:924) at org.apache.spark.deploy.SparkSubmit
2.doSubmit(SparkSubmit.scala:924)atorg.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala:933)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)