spark-zk 服务起不来

情况1

场景

在这里插入图片描述

[root@linux01 logs]# start-all.sh 
starting org.apache.spark.deploy.master.Master, logging to /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.master.Master-1-linux01.out
failed to launch: nice -n 0 /opt/servers/spark-2.4.5/bin/spark-class org.apache.spark.deploy.master.Master --host linux01 --port 7077 --webui-port 8080
  Spark Command: /opt/servers/jdk1.8/bin/java -cp /opt/servers/spark-2.4.5/conf/:/opt/servers/spark-2.4.5/jars/*:/opt/servers/hadoop-2.9.2/etc/hadoop/:/opt/servers/hadoop-2.9.2/share/hadoop/common/lib/*:/opt/seroop/common/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/:/opt/serversyarn/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/*:/opt/servers/hadoop-2.9.2/contrib/capacity-schedecoveryMode=ZOOKEEPER - Dspark.deploy.zookeeper.url=linux01,linux02,linux03 -Dspark.deploy.zookeeper.dir=/spark -Xmx1g org.apache.spark.deploy.master.Master --host linux01 --port 7077 --webui-port 8080
  ========================================
  Unrecognized option: -
  Error: Could not create the Java Virtual Machine.
  Error: A fatal exception has occurred. Program will exit.
full log in /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.master.Master-1-linux01.out
linux03: starting org.apache.spark.deploy.worker.Worker, logging to /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux03.out
linux01: starting org.apache.spark.deploy.worker.Worker, logging to /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out
linux02: starting org.apache.spark.deploy.worker.Worker, logging to /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux02.out
linux01: failed to launch: nice -n 0 /opt/servers/spark-2.4.5/bin/spark-class org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux03: failed to launch: nice -n 0 /opt/servers/spark-2.4.5/bin/spark-class org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux02: failed to launch: nice -n 0 /opt/servers/spark-2.4.5/bin/spark-class org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux01:   Spark Command: /opt/servers/jdk1.8/bin/java -cp /opt/servers/spark-2.4.5/conf/:/opt/servers/spark-2.4.5/jars/*:/opt/servers/hadoop-2.9.2/etc/hadoop/:/opt/servers/hadoop-2.9.2/share/hadoop/common/lib/*share/hadoop/common/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/:/ope/hadoop/yarn/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/*:/opt/servers/hadoop-2.9.2/contrib/capac.deploy.recoveryMode=ZOOKEEPER - Dspark.deploy.zookeeper.url=linux01,linux02,linux03 -Dspark.deploy.zookeeper.dir=/spark -Xmx1g org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux01:   ========================================
linux01:   Unrecognized option: -
linux01:   Error: Could not create the Java Virtual Machine.
linux01:   Error: A fatal exception has occurred. Program will exit.
linux03:   Spark Command: /opt/servers/jdk1.8/bin/java -cp /opt/servers/spark-2.4.5/conf/:/opt/servers/spark-2.4.5/jars/*:/opt/servers/hadoop-2.9.2/etc/hadoop/:/opt/servers/hadoop-2.9.2/share/hadoop/common/lib/*share/hadoop/common/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/:/ope/hadoop/yarn/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/*:/opt/servers/hadoop-2.9.2/contrib/capac.deploy.recoveryMode=ZOOKEEPER - Dspark.deploy.zookeeper.url=linux01,linux02,linux03 -Dspark.deploy.zookeeper.dir=/spark -Xmx1g org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux03:   ========================================
linux03:   Unrecognized option: -
linux03:   Error: Could not create the Java Virtual Machine.
linux03:   Error: A fatal exception has occurred. Program will exit.
linux02:   Spark Command: /opt/servers/jdk1.8/bin/java -cp /opt/servers/spark-2.4.5/conf/:/opt/servers/spark-2.4.5/jars/*:/opt/servers/hadoop-2.9.2/etc/hadoop/:/opt/servers/hadoop-2.9.2/share/hadoop/common/lib/*share/hadoop/common/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/:/ope/hadoop/yarn/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/*:/opt/servers/hadoop-2.9.2/contrib/capac.deploy.recoveryMode=ZOOKEEPER - Dspark.deploy.zookeeper.url=linux01,linux02,linux03 -Dspark.deploy.zookeeper.dir=/spark -Xmx1g org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
linux02:   ========================================
linux02:   Unrecognized option: -
linux02:   Error: Could not create the Java Virtual Machine.
linux02:   Error: A fatal exception has occurred. Program will exit.
linux02: full log in /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux02.out
linux01: full log in /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out
linux03: full log in /opt/servers/spark-2.4.5/logs/spark-root-org.apache.spark.deploy.worker.Worker-1-linux03.out

日志

[root@linux01 logs]# pwd
/opt/servers/spark-2.4.5/logs
[root@linux01 logs]# 
[root@linux01 logs]# 
[root@linux01 logs]# ls
spark-root-org.apache.spark.deploy.history.HistoryServer-1-linux01.out    spark-root-org.apache.spark.deploy.master.Master-1-linux01.out.3  spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out.2
spark-root-org.apache.spark.deploy.history.HistoryServer-1-linux01.out.1  spark-root-org.apache.spark.deploy.master.Master-1-linux01.out.4  spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out.3
spark-root-org.apache.spark.deploy.master.Master-1-linux01.out            spark-root-org.apache.spark.deploy.master.Master-1-linux01.out.5  spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out.4
spark-root-org.apache.spark.deploy.master.Master-1-linux01.out.1          spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out    spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out.5
spark-root-org.apache.spark.deploy.master.Master-1-linux01.out.2          spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out.1
[root@linux01 logs]# 
[root@linux01 logs]# cat spark-root-org.apache.spark.deploy.worker.Worker-1-linux01.out
Spark Command: /opt/servers/jdk1.8/bin/java -cp /opt/servers/spark-2.4.5/conf/:/opt/servers/spark-2.4.5/jars/*:/opt/servers/hadoop-2.9.2/etc/hadoop/:/opt/servers/hadoop-2.9.2/share/hadoop/common/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/common/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/hdfs/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/yarn/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/lib/*:/opt/servers/hadoop-2.9.2/share/hadoop/mapreduce/*:/opt/servers/hadoop-2.9.2/contrib/capacity-scheduler/*.jar -Dspark.deploy.recoveryMode=ZOOKEEPER - Dspark.deploy.zookeeper.url=linux01,linux02,linux03 -Dspark.deploy.zookeeper.dir=/spark -Xmx1g org.apache.spark.deploy.worker.Worker --webui-port 8081 spark://linux01:7077
========================================
Unrecognized option: -
Error: Could not create the Java Virtual Machine.
Error: A fatal exception has occurred. Program will exit.

环境检查

  1. Java - 正常
    在这里插入图片描述

  2. zookeeper 集群 - 正常

    • 线程正常
      在这里插入图片描述
    • 启动正常
      在这里插入图片描述
    • 创建节点正常
    create -s /zk-test 123
    
  3. 检查spark:spark-env.sh

在这里插入图片描述
ps:我吐了!!!!

总结

那个框架不好使看哪个框架配置文件(spark-env.sh)

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值