spark分布式集群搭建
1.解压缩
下载解压缩
http://spark.apache.org/downloads.html
2.环境变量
# spark environment
export SPARK_HOME=/usr/local/spark-2.1.0
export PATH=$SPARK_HOME/bin:$PATH
3.spark_env.sh($SPARK_HOME/conf)
export JAVA_HOME=/usr/local/jdk1.8.0_131
export SCALA_HOME=/usr/local/scala-2.11.12
export SPARK_MASTER_IP=192.168.6.137
export SPARK_WORKER_MEMORY=512m
export HADOOP_HOME=/usr/local/hadoop-2.7.3
4.slaves($SPARK_HOME/conf)
spark1
spark2
spark3
5.启动
$SPARK_HOME/sbin/start-all.sh