搭建前,必须准备好hadoop
#scala environment
export SCALA_HOME=/opt/scala-2.10.6
export PATH=$PATH:$SCALA_HOME/bin
centos-master
centos-slave1
centos-slave2
#spark config
SPARK_HOME=/opt/spark-1.6.0
PATH=PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
配置conf/spark-env.sh
export JAVA_HOME=/usr/java/latest
export SPARK_MASTER_IP=192.168.130.140
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_INSTANCES=1
export SPARK_WORKER_CORES=1
export SPARK_WORKER_MEMORY=1g
分发到各个节点
scp -r /opt/spark-1.6.0 centos-slave1:/opt
scp -r /opt/spark-1.6.0 centos-slave2:/opt
spark standalone启动:
/opt/spark-1.6.0/sbin/start-all.sh
#scala environment
export SCALA_HOME=/opt/scala-2.10.6
export PATH=$PATH:$SCALA_HOME/bin
centos-master
centos-slave1
centos-slave2
#spark config
SPARK_HOME=/opt/spark-1.6.0
PATH=PATH:$SPARK_HOME/bin:$SPARK_HOME/sbin
配置conf/spark-env.sh
export JAVA_HOME=/usr/java/latest
export SPARK_MASTER_IP=192.168.130.140
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_INSTANCES=1
export SPARK_WORKER_CORES=1
export SPARK_WORKER_MEMORY=1g
分发到各个节点
scp -r /opt/spark-1.6.0 centos-slave1:/opt
scp -r /opt/spark-1.6.0 centos-slave2:/opt
spark standalone启动:
/opt/spark-1.6.0/sbin/start-all.sh