scala、spark搭建配置

1.笔记

8、完全分布式scala、spark安装 - 幕布

scala、spark搭建配置---第七节---hadoop大数据完全分布式集群_哔哩哔哩_bilibili

2.文档路径

F:\hive_sql学习\hadoop01

3.上传资料到服务器

[root@hadoop001 ~]# cd /home/hhh/

[root@hadoop001 hhh]# ls

apache-hive-2.3.9-bin.tar.gz  mysql-8.0.31-1.el7.x86_64.rpm-bundle.tar  scala-2.12.15.tgz

hadoop-2.7.7.tar.gz           mysql-connector-j-8.0.31.tar.gz           spark-3.2.2-bin-hadoop2.7.tgz

[root@hadoop001 hhh]#

[root@hadoop001 hhh]#

解压scala安装文件到指定目录

[root@hadoop001 ~]# cd /home/hhh/

[root@hadoop001 hhh]# ls

apache-hive-2.3.9-bin.tar.gz  mysql-8.0.31-1.el7.x86_64.rpm-bundle.tar  scala-2.12.15.tgz

hadoop-2.7.7.tar.gz           mysql-connector-j-8.0.31.tar.gz           spark-3.2.2-bin-hadoop2.7.tgz

[root@hadoop001 hhh]#

[root@hadoop001 hhh]# tar -zxvf scala-2.12.15.tgz     -C  /usr/local/software

scala-2.12.15/

scala-2.12.15/lib/

scala-2.12.15/lib/scala-compiler.jar

scala-2.12.15/lib/scalap-2.12.15.jar

scala-2.12.15/lib/scala-reflect.jar

scala-2.12.15/lib/scala-xml_2.12-1.0.6.jar

scala-2.12.15/lib/jline-2.14.6.jar

scala-2.12.15/lib/scala-parser-combinators_2.12-1.0.7.jar

scala-2.12.15/lib/scala-swing_2.12-2.0.3.jar

scala-2.12.15/lib/scala-library.jar

scala-2.12.15/doc/

scala-2.12.15/doc/licenses/

scala-2.12.15/doc/licenses/mit_jquery.txt

scala-2.12.15/doc/licenses/bsd_asm.txt

scala-2.12.15/doc/licenses/apache_jansi.txt

scala-2.12.15/doc/licenses/bsd_jline.txt

scala-2.12.15/doc/licenses/mit_tools.tooltip.txt

scala-2.12.15/doc/LICENSE.md

scala-2.12.15/doc/License.rtf

scala-2.12.15/doc/README

scala-2.12.15/doc/tools/

scala-2.12.15/doc/tools/scaladoc.html

scala-2.12.15/doc/tools/scalap.html

scala-2.12.15/doc/tools/css/

scala-2.12.15/doc/tools/css/style.css

scala-2.12.15/doc/tools/scala.html

scala-2.12.15/doc/tools/index.html

scala-2.12.15/doc/tools/images/

scala-2.12.15/doc/tools/images/scala_logo.png

scala-2.12.15/doc/tools/images/external.gif

scala-2.12.15/doc/tools/scalac.html

scala-2.12.15/doc/tools/fsc.html

scala-2.12.15/bin/

scala-2.12.15/bin/fsc

scala-2.12.15/bin/scalap.bat

scala-2.12.15/bin/scala

scala-2.12.15/bin/scaladoc.bat

scala-2.12.15/bin/fsc.bat

scala-2.12.15/bin/scala.bat

scala-2.12.15/bin/scaladoc

scala-2.12.15/bin/scalap

scala-2.12.15/bin/scalac

scala-2.12.15/bin/scalac.bat

scala-2.12.15/LICENSE

scala-2.12.15/man/

scala-2.12.15/man/man1/

scala-2.12.15/man/man1/scalac.1

scala-2.12.15/man/man1/scala.1

scala-2.12.15/man/man1/scaladoc.1

scala-2.12.15/man/man1/fsc.1

scala-2.12.15/man/man1/scalap.1

scala-2.12.15/NOTICE

[root@hadoop001 hhh]#

[root@hadoop001 hhh]#

配置环境变量

vi /etc/profile

  • export SCALA_HOME=/usr/local/software/scala-2.12.15
  • export PATH=$PATH:$SCALA_HOME/bin

  • 使得配置文件生效

source /etc/profile

  • 检查是否安装成功

scala -version

[root@hadoop001 hhh]# source /etc/profile

[root@hadoop001 hhh]#

[root@hadoop001 hhh]#

[root@hadoop001 hhh]# scala -version

Scala code runner version 2.12.15 -- Copyright 2002-2021, LAMP/EPFL and Lightbend, Inc.

[root@hadoop001 hhh]#

没有任何问题的啊

运行scala

scala

退出

:quit

基本的配置信息  scala

spark安装

解压spark安装文件到指定目录

tar -zxvf spark-3.2.2-bin-hadoop2.7.tgz -C /usr/local/software

重新命名

cd /usr/local/software

mv spark-3.2.2-bin-hadoop2.7 spark3.2

cd /usr/local/software/spark3.2/conf

代码如下

[root@hadoop001 conf]# cp  spark-env.sh.template  spark-env.sh

[root@hadoop001 conf]#

[root@hadoop001 conf]#

[root@hadoop001 conf]# ll

总用量 44

-rw-r--r--. 1 hx   hx   1105 7月  12 2022 fairscheduler.xml.template

-rw-r--r--. 1 hx   hx   2471 7月  12 2022 log4j.properties.template

-rw-r--r--. 1 hx   hx   9141 7月  12 2022 metrics.properties.template

-rw-r--r--. 1 hx   hx   1292 7月  12 2022 spark-defaults.conf.template

-rwxr-xr-x. 1 root root 4428 7月  31 13:49 spark-env.sh

-rwxr-xr-x. 1 hx   hx   4428 7月  12 2022 spark-env.sh.template

-rw-r--r--. 1 hx   hx    865 7月  12 2022 workers.template

[root@hadoop001 conf]#

配置spark-env.sh末尾添加

vi spark-env.sh

  • export JAVA_HOME=/usr/local/software/jdk1.8.0_161
  • export HADOOP_HOME=/usr/local/software/hadoop-2.7.7
  • export HADOOP_CONF_DIR=/usr/local/software/hadoop-2.7.7/etc/hadoop
  • export SPARK_MASTER_IP=hadoop001
  • export SPARK_MASTER_PORT=7077
  • export SPARK_WORKER_MEMORY=512m
  • export SPARK_WORKER_CORES=1
  • export SPARK_EXECUTOR_MEMORY=512m
  • export SPARK_EXECUTOR_CORES=1
  • export SPARK_WORKER_INSTANCES=1

现象

[root@hadoop001 conf]# pwd

/usr/local/software/spark3.2/conf

配置slaveS, 将workers.tmplate拷贝为workers

cp workers.template workers

hadoop002

hadoop003

配置spark-default.conf文件

cp spark-defaults.conf.template spark-defaults.conf

vi spark-defaults.conf

spark.master                     spark://hadoop001:7077

spark.eventLog.enabled           true

spark.eventLog.dir               hdfs://hadoop001:9000/spark-logs

spark.history.fs.logDirectory    hdfs://hadoop001:9000/spark-logs

hadoop002 节点执行命令

chmod 777 /usr/local/software

hadoop003 节点执行命令

chmod 777 /usr/local/software

更改访问spark3.2的权限

hadoop002 节点执行命令

chown hx -R /usr/local/software/spark3.2

这边首先要在hadop001节点进行scp命令的操作

scp -r /usr/local/software/spark3.2 root@hadoop002:/usr/local/software

ok之后

还在在hadop001

scp -r /usr/local/software/spark3.2 root@hadoop003:/usr/local/software

hadoop003 节点执行命令

chown hx -R /usr/local/software/spark3.2

Hadoop1上启动

start-all.sh

启动遇到问题先stop-all.sh

创建spark-logs目录 hadoop001上执行

hdfs dfs -mkdir /spark-logs

[root@hadoop001 spark3.2]# pwd

/usr/local/software/spark3.2

检查4个服务

第2个节点是5个

第3个节点  hadoo[003

http://192.168.153.21:8080访问主节点

我的ip更改了

http://192.168.188.105:8080/

后记

[root@hadoop001 hadoop]# source /etc/profile

[root@hadoop001 hadoop]# scala

Welcome to Scala 2.12.15 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_161).

Type in expressions for evaluation. Or try :help.

scala>

[root@hadoop001 bin]# pwd

/usr/local/software/spark3.2/bin

[root@hadoop001 bin]#

[root@hadoop001 bin]# spark-shell

bash: spark-shell: 未找到命令...

[root@hadoop001 bin]# ./spark-shell

Setting default log level to "WARN".

退出命令:
:quit

[root@hadoop001 bin]# mysql -uroot -p123456

[root@hadoop001 bin]# mysql -uroot -p123456

[root@hadoop001 bin]# ./spark-shell --master local

本地测试spark

  • 15
    点赞
  • 21
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值