Spark安装(非ClouderaManager)
由于cloudera Manager自带的Spark版本为1.6,所以此处单独安装
Spark-2.1.1
1. scala环境
scp scala-2.11.11.tgz hd-26:/usr/local/
ssh hd-26 "cd /usr/local/; tar xf scala-2.11.11.tgz; \
rm -rf scala-2.11.11.tgz; ln -s scala-2.11.11 scala; \
echo 'export SCALA_HOME=/usr/local/scala' >> /etc/profile; source /etc/profile;"
- spark
cp spark-2.1.1-bin-hadoop2.6.tgz /opt/soft
cd /opt/soft
tar xf spark-2.1.1-bin-hadoop2.6.tgz
cd ..
ln -s soft/spark-2.1.1-bin-hadoop2.6/ spark
cd spark/conf
cp spark-env.sh.template spark-env.sh
cp slaves.template slaves
echo -e "hd-26\nhd-27\nhd-28\nhd-30\n" >> slaves
echo "SPARK_EXECUTOR_CORES=2" >> spark-env.sh
echo "SPARK_EXECUTOR_MEMORY=2G" >> spark-env.sh
echo "SPARK_DRIVER_MEMORY=2G" >> spark-env.sh
echo "SPARK_MASTER_HOST=hd-29" >> spark-env.sh
echo "SPARK_MASTER_PORT=7077" >> spark-env.sh
echo "SPARK_WORKER_CORES=4" >> spark-env.sh
echo "SPARK_WORKER_MEMORY=2G" >> spark-env.sh
echo "SPARK_WORKER_PORT=7078" >> spark-env.sh
echo "JAVA_HOME=/usr/local/jdk1.8.0_77" >> spark-env.sh
echo "SPARK_HOME=/opt/spark" >> spark-env.sh
echo "HADOOP_CONF_DIR=/etc/hadoop/conf" >> spark-env.sh
echo "SCALA_HOME=/usr/local/scala"" >> spark-env.sh
cd /opt/soft
scp -r spark-2.1.1-bin-hadoop2.6 hd-26:/opt/soft
ssh hd-26 "cd /opt; ln -s soft/spark-2.1.1-bin-hadoop2.6/ spark"
ssh hd-26 "echo 'SPARK_LOCAL_IP=hd-26' >> /opt/spark/conf/spark-env.sh"
ssh hd-27 "echo 'SPARK_LOCAL_IP=hd-27' >> /opt/spark/conf/spark-env.sh"
cd ../spark/sbin
./start-master.sh
./start-slaves.sh