spark集群 hadoop配置参见
su - rdato
cd /u01
tar -zxvf spark-2.1.1-bin-hadoop2.7.tgz
mv spark-2.1.1-bin-hadoop2.7 spark
#复制模版
cp /u01/spark/conf/spark-env.sh.template /u01/spark/conf/spark-env.sh
cp /u01/spark/conf/slaves.template /u01/spark/conf/slaves
#配置spark env
cat >> /u01/spark/conf/spark-env.sh << EOF
export JAVA_HOME=/usr/java/jdk1.8.0_131
export SCALA_HOME=/usr/share/scala
export SPARK_HOME=/u01/spark
export HADOOP_CONF_DIR=/u01/hadoop/etc/hadoop
EOF
#配置slaves vi /u01/spark/conf/slaves
sparkgc1
sparkgc2
sparkgc3
#启动Spark集群
/u01/spark/sbin/start-all.sh
#启动spark backup master
/u01/spark/sibn/start_master.sh
#web spark
http://192.168.168.141:8080/