- JDK安装,我们采用jdk1.7.0_55
- tar xvzf jdk-7u55-linux-x64.tar.gz -C /usr/local
cd /usr/local
ln -s jdk1.7.0_55 jdkvim /etc/profile
export JAVA_HOME=/usr/local/jdk
export CLASS_PATH=$JAVA_HOME/lib:$JAVA_HOME/jre/lib
export PATH=$PATH:$JAVA_HOME/bin
source /etc/profile - Scala 安装
-
tar xvzf scala-2.10.3.tgz -C /usr/local
cd /usr/local
ln -s scala-2.10.3 scalavim /etc/profile
export SCALA_HOME=/usr/local/scala
export PATH=$PATH:$SCALA_HOME/binsource /etc/profile
- Spark安装
-
tar xvzf spark-1.1.1-cdh4.tgz -C /usr/local
cd /usr/local
ln -s spark-1.1.1-cdh4 sparkvim /etc/profile
export SPARK_HOME=/usr/local/spark
export PATH=$PATH:$SPARK_HOME/bin
source /etc/profile
cd /usr/local/spark/conf
mv spark-env.sh.template spark-env.shvim ./conf/spark-env.sh 添加以下内容:
export SCALA_HOME=/usr/local/scala-2.10.3
export JAVA_HOME=/usr/local/jdk1.7.0_55
export SPARK_MASTER_IP=localhost
export SPARK_WORKER_INSTANCES=2
export SPARK_MASTER_PORT=8070
export SPARK_MASTER_WEBUI_PORT=8090
export SPARK_WORKER_PORT=8092
export SPARK_WORKER_MEMORY=5000m
- vi ./conf/ slaves 每行一个worker的主机名
- 启动spark spark路径下执行:./sbin/start-all.sh
- 停止spark ./sbin/stop-all.sh
Ubuntu上Spark单独部署
最新推荐文章于 2023-02-21 20:24:14 发布