基础环境:Centos 7.7
编译环境软件安装目录
mkdir -p /export/server
Hadoop编译安装
-
安装编译相关的依赖
yum install gcc gcc-c++ make autoconf automake libtool curl lzo-devel zlib-devel openssl openssl-devel ncurses-devel snappy snappy-devel bzip2 bzip2-devel lzo lzo-devel lzop libXtst zlib -y
yum install -y doxygen cyrus-sasl* saslwrapper-devel*
-
手动安装cmake
#yum卸载已安装cmake 版本低
yum erase cmake#解压
tar zxvf CMake-3.19.4.tar.gz#编译安装
cd /export/server/CMake-3.19.4./configure
make && make install
#验证
[root@node4 ~]# cmake -version
cmake version 3.19.4#如果没有正确显示版本 请断开SSH连接 重写登录
-
手动安装snappy
#卸载已经安装的
rm -rf /usr/local/lib/libsnappy*
rm -rf /lib64/libsnappy*#上传解压
tar zxvf snappy-1.1.3.tar.gz#编译安装
cd /export/server/snappy-1.1.3
./configure
make && make install#验证是否安装
[root@node4 snappy-1.1.3]# ls -lh /usr/local/lib |grep snappy
-rw-r–r-- 1 root root 511K Nov 4 17:13 libsnappy.a
-rwxr-xr-x 1 root root 955 Nov 4 17:13 libsnappy.la
lrwxrwxrwx 1 root root 18 Nov 4 17:13 libsnappy.so -> libsnappy.so.1.3.0
lrwxrwxrwx 1 root root 18 Nov 4 17:13 libsnappy.so.1 -> libsnappy.so.1.3.0
-rwxr-xr-x 1 root root 253K Nov 4 17:13 libsnappy.so.1.3.0 -
安装配置JDK 1.8
#解压安装包
tar zxvf jdk-8u65-linux-x64.tar.gz#配置环境变量
vim /etc/profileexport JAVA_HOME=/export/server/jdk1.8.0_65
export PATH= P A T H : PATH: PATH:JAVA_HOME/bin
export CLASSPATH=.: J A V A _ H O M E / l i b / d t . j a r : JAVA\_HOME/lib/dt.jar: JAVA_HOME/lib/dt.jar:JAVA_HOME/lib/tools.jarsource /etc/profile
#验证是否安装成功
java -versionjava version “1.8.0_65”
Java™ SE Runtime Environment (build 1.8.0_65-b17)
Java HotSpot™ 64-Bit Server VM (build 25.65-b01, mixed mode)
You have new mail in /var/spool/mail/root -
安装配置maven
#解压安装包
tar zxvf apache-maven-3.5.4-bin.tar.gz#配置环境变量
vim /etc/profileexport MAVEN_HOME=/export/server/apache-maven-3.5.4
export MAVEN_OPTS=“-Xms4096m -Xmx4096m”
export PATH=: M A V E N _ H O M E / b i n : MAVEN\_HOME/bin: MAVEN_HOME/bin:PATHsource /etc/profile
#验证是否安装成功
[root@node4 ~]# mvn -v
Apache Maven 3.5.4#添加maven 阿里云仓库地址 加快国内编译速度
alimaven aliyun maven http://maven.aliyun.com/nexus/content/groups/public/ central
vim /export/server/apache-maven-3.5.4/conf/settings.xml -
安装ProtocolBuffer 3.7.1
#卸载之前版本的protobuf
#解压
tar zxvf protobuf-3.7.1.tar.gz#编译安装
cd /export/server/protobuf-3.7.1
./autogen.sh
./configure
make && make install#验证是否安装成功
[root@node4 protobuf-3.7.1]# protoc --version
libprotoc 3.7.1 -
编译hadoop
#上传解压源码包
tar zxvf hadoop-3.3.0-src.tar.gz#编译
cd /root/hadoop-3.3.0-srcmvn clean package -Pdist,native -DskipTests -Dtar -Dbundle.snappy -Dsnappy.lib=/usr/local/lib
#参数说明:
Pdist,native :把重新编译生成的hadoop动态库;
DskipTests :跳过测试
Dtar :最后把文件以tar打包
Dbundle.snappy :添加snappy压缩支持【默认官网下载的是不支持的】
Dsnappy.lib=/usr/local/lib :指snappy在编译机器上安装后的库路径 -
编译之后的安装包路径
/root/hadoop-3.3.0-src/hadoop-dist/target
-
Hadoop 完全分布式安装
-
集群规划
主机
角色
node1
NN DN RM NM
node2
SNN DN NM
node3
DN NM
-
基础环境
# 主机名 hosts映射
vim /etc/hosts127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4
::1 localhost localhost.localdomain localhost6 localhost6.localdomain6192.168.227.151 node1.itcast.cn node1
192.168.227.152 node2.itcast.cn node2
192.168.227.153 node3.itcast.cn node3JDK 1.8安装 上传 jdk-8u65-linux-x64.tar.gz到/export/server/目录下
cd /export/server/
tar zxvf jdk-8u65-linux-x64.tar.gz#配置环境变量 vim /etc/profile export JAVA\_HOME=/export/server/jdk1.8.0\_65 export PATH=$PATH:$JAVA\_HOME/bin export CLASSPATH=.:$JAVA\_HOME/lib/dt.jar:$JAVA\_HOME/lib/tools.jar #重新加载环境变量文件 source /etc/profile
集群时间同步
ntpdate ntp5.aliyun.com
防火墙关闭
firewall-cmd --state #查看防火墙状态
systemctl stop firewalld.service #停止firewalld服务
systemctl disable firewalld.service #开机禁用firewalld服务ssh免密登录
#node1生成公钥私钥 (一路回车) ssh-keygen #node1配置免密登录到node1 node2 node3 ssh-copy-id node1 ssh-copy-id node2 ssh-copy-id node3
-
上传Hadoop安装包到node1 /export/server
hadoop-3.3.0-Centos7-64-with-snappy.tar.gz
tar zxvf hadoop-3.3.0-Centos7-64-with-snappy.tar.gz
-
修改配置文件(配置文件路径 hadoop-3.3.0/etc/hadoop)
-
hadoop-env.sh
export JAVA_HOME=/export/server/jdk1.8.0_65
#文件最后添加
export HDFS_NAMENODE_USER=root
export HDFS_DATANODE_USER=root
export HDFS_SECONDARYNAMENODE_USER=root
export YARN_RESOURCEMANAGER_USER=root
export YARN_NODEMANAGER_USER=root -
core-site.xml
fs.defaultFS hdfs://node1:8020 hadoop.tmp.dir /export/data/hadoop-3.3.0 hadoop.http.staticuser.user root hadoop.proxyuser.root.hosts \* hadoop.proxyuser.root.groups \* -
hdfs-site.xml
dfs.namenode.secondary.http-address node2:9868 -
mapred-site.xml
mapreduce.framework.name yarn mapreduce.jobhistory.address node1:10020 mapreduce.jobhistory.webapp.address node1:19888 yarn.app.mapreduce.am.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME} mapreduce.map.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME} mapreduce.reduce.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME} -
yarn-site.xml
yarn.resourcemanager.hostname node1 yarn.nodemanager.aux-services mapreduce\_shuffle yarn.nodemanager.pmem-check-enabled false yarn.nodemanager.vmem-check-enabled false yarn.log-aggregation-enable true yarn.log.server.url http://node1:19888/jobhistory/logs yarn.log-aggregation.retain-seconds 604800 -
workers
node1
node2
node3
-
-
分发同步hadoop安装包
cd /export/server
scp -r hadoop-3.3.0 root@node2: P W D s c p − r h a d o o p − 3.3.0 r o o t @ n o d e 3 : PWD scp -r hadoop-3.3.0 root@node3: PWDscp−rhadoop−3.3.0root@node3:PWD
-
将hadoop添加到环境变量(3台机器)
vim /etc/proflie
export HADOOP_HOME=/export/server/hadoop-3.3.0
export PATH= P A T H : PATH: PATH:HADOOP_HOME/bin:$HADOOP_HOME/sbinsource /etc/profile
-
-
Hadoop集群启动
-
(首次启动)格式化namenode
hdfs namenode -format
-
脚本一键启动
[root@node1 ~]# start-dfs.sh
Starting namenodes on [node1]
Last login: Thu Nov 5 10:44:10 CST 2020 on pts/0
Starting datanodes
Last login: Thu Nov 5 10:45:02 CST 2020 on pts/0
Starting secondary namenodes [node2]
Last login: Thu Nov 5 10:45:04 CST 2020 on pts/0[root@node1 ~]# start-yarn.sh
Starting resourcemanager
Last login: Thu Nov 5 10:45:08 CST 2020 on pts/0
Starting nodemanagers
Last login: Thu Nov 5 10:45:44 CST 2020 on pts/0 -
Web UI页面
-
HDFS集群:http://node1:9870/
-
YARN集群:http://node1:8088/
-
-
错误1:运行hadoop3官方自带mr示例出错。
-
错误信息
Error: Could not find or load main class org.apache.hadoop.mapreduce.v2.app.MRAppMaster
Please check whether your etc/hadoop/mapred-site.xml contains the below configuration:
yarn.app.mapreduce.am.env
HADOOP_MAPRED_HOME= f u l l p a t h o f y o u r h a d o o p d i s t r i b u t i o n d i r e c t o r y < / v a l u e > < / p r o p e r t y > < p r o p e r t y > < n a m e > m a p r e d u c e . m a p . e n v < / n a m e > < v a l u e > H A D O O P _ M A P R E D _ H O M E = {full path of your hadoop distribution directory}</value> </property> <property> <name>mapreduce.map.env</name> <value>HADOOP\_MAPRED\_HOME= fullpathofyourhadoopdistributiondirectory</value></property><property><name>mapreduce.map.env</name><value>HADOOP_MAPRED_HOME={full path of your hadoop distribution directory}
mapreduce.reduce.env
HADOOP_MAPRED_HOME=${full path of your hadoop distribution directory}
-
解决 mapred-site.xml,增加以下配置
yarn.app.mapreduce.am.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME} mapreduce.map.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME} mapreduce.reduce.env HADOOP\_MAPRED\_HOME=${HADOOP\_HOME}
-
-