spark部署
基于Hadoop ha 和zookeeper
解压缩scala包
[root@master ~]# tar -zxvf /h3cu/scala-2.11.8.tgz -C /usr/local/src
重命名
[root@master ~]# mv /usr/local/src/scala-2.11.8/ /usr/local/src/scala
配置环境变量
[root@master ~]# vi /root/.bash_profile
添加配置:
export SCALA_HOME=/usr/local/src/scala
export PATH=$SCALA_HOME/bin:$PATH
[root@master ~]# source /root/.bash_profile
进入scala终端
[root@master ~]# scala
退出终端
:q
解压缩spark安装包
[root@master ~]# tar -zxvf /h3cu/spark-2.0.0-bin-hadoop2.6.tgz -C /usr/local/src
重命名为spark
[root@master ~]# mv /usr/local/src/spark-2.0.0-bin-hadoop2.6/ /usr/local/src/spark
[root@master ~]# vi /root/.bash_profile
添加配置:
export SPARK_HOME=/usr/local/src/spark
export PATH=$SPARK_HOME/bin:$PATH
[root@master ~]# source /root/.bash_profile
[root@master ~]# cd /usr/local/src/spark/conf/
[root@master conf]# cp spark-env.sh.template spark-env.sh
[root@master conf]# vi spark-env.sh
添加配置:
export JAVA_HOME=/usr/local/src/jdk1.8.0_144
export HADOOP_HOME=/usr/local/src/hadoop
export SCALA_HOME=/usr/local/src/scala
export SPARK_MASTER_IP=master
export SPARK_MASTER_PORT=7077
export SPARK_WORKER_CORES=1
export SPARK_WORKER_MEMORY=1G
进入spark终端
[root@master conf]# cd /usr/local/src/spark/bin
[root@master bin]# ./spark-shell