spark2.3.3安装完成之后启动报错:
[hadoop@namenode1 sbin]$ ./start-all.sh
starting org.apache.spark.deploy.master.Master, logging to /home/hadoop/spark-2.3.3-bin-hadoop2.7/logs/spark-hadoop-org.apache.spark.deploy.master.Master-1-namenode1.out
datanode2: starting org.apache.spark.deploy.worker.Worker, logging to /home/hadoop/spark-2.3.3-bin-hadoop2.7/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-datanode2.out
datanode3: starting org.apache.spark.deploy.worker.Worker, logging to /home/hadoop/spark-2.3.3-bin-hadoop2.7/logs/spark-hadoop-org.apache.spark.deploy.worker.Worker-1-datanode3.out
datanode1: starting org.apache.spark.deploy.worker.Worker, logging to /home/hadoop/spark-2.3.3-bin-had