Hadoop 单机部署:
1.配置国内yum源
[root@master ~]# vim /etc/yum.repo.d/CentOS-Base.repo
# CentOS-Base.repo
[base]
name=CentOS-Base
baseurl=http://mirrors.163.com/centos/7/os/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#released updates
[updates]
name=CentOS-7 - Updates - 163.com
#mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=updates
baseurl=http://mirrors.163.com/centos/7/updates/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#additional packages that may be useful
[extras]
name=CentOS-7 - Extras - 163.com
#mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=extras
baseurl=http://mirrors.163.com/centos/7/extras/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#additional packages that extend functionality of existing packages
[centosplus]
name=CentOS-7 - Plus - 163.com
baseurl=http://mirrors.163.com/centos/7/centosplus/$basearch/
gpgcheck=1
enabled=0
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
[root@master ~]# yum clean all
[root@master ~]# yum makecache
[root@master ~]# yum update -y
[root@master ~]# yum install -y vim
2.配置主机名和hosts
[root@master ~]# hostname master
[root@master ~]# vim /etc/hosts
192.168.87.133 master
3.关闭防火墙和selinux
[root@master ~]# iptables -F
[root@master ~]# vim /etc/sysconfig/selinux
SELINUX=disabled
4.安装jdk
[root@master ~]# tar -zxvf jdk-9.0.4_linux-x64_bin.tar.gz
[root@master ~]# cp -r jdk-9.0.4/ /usr/local/java
[root@master ~]# vim /etc/profile.d/java.sh
export JAVA_HOME=/usr/local/java
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
export PATH=.:${JAVA_HOME}/bin:$PATH
[root@master ~]# source /etc/profile.d/java.sh
[root@master ~]# java -version
5.安装hadoop
[root@master ~]# groupadd hadoop
[root@master ~]# useradd -g hadoop -m -d /home/hadoop hadoop
[root@master ~]# echo "password" | passwd --stdin hadoop
[root@master ~]# echo "hadoop ALL = (root) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/hadoop
[root@master ~]# chmod 0440 /etc/sudoers/hadoop
[root@master ~]# tar -zxvf hadoop-3.0.0.tar.gz -C /usr/local/
[root@master ~]# ln -sv /usr/local/hadoop-3.0.0/ /usr/local/hadoop
[root@master ~]# chown -R hadoop.hadoop /usr/local/hadoop
[root@master ~]# vim /etc/profile.d/hadoop.sh
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib:$HADOOP_COMMON_LIB_NATIVE_DIR"
export PATH=.:${JAVA_HOME}/bin:${HADOOP_HOME}/bin:$PATH
[root@master ~]# . /etc/profile.d/hadoop.sh
[root@master ~]# hadoop version
6.创建一些目录
[root@master ~]# mkdir -p /data/dadoop/{tmp,var,dfs}
[root@master ~]# mkdir -p /data/dadoop/dfs/{name,data}
[root@master ~]# chown -R hadoop.hadoop /data/dadoop
7.配置hadoop
[root@master ~]# cd /usr/local/hadoop/etc/hadoop
[root@master ~]# su hadoop
[hadoop@master ~]$ ssh-keygen -t rsa -P ''
[hadoop@master ~]$ ssh-copy-id -i .ssh/id_rsa.pub hadoop@master
[hadoop@master hadoop]$ vim core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>/data/dadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://master:9000</value>
</property>
</configuration>
[hadoop@master hadoop]$ vim hdfs-site.xml
<configuration>
<property>
<name>dfs.name.dir</name>
<value>file:///data/dadoop/dfs/name</value>
<description>Path on the local filesystem where theNameNode stores the namespace.</description>
</property>
<property>
<name>dfs.data.dir</name>
<value>/data/hadoop/dfs/data</value>
<description>Comma separated list of paths on the localfilesystem of a DataNode where it should store its blocks.</description>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description>need not permissions</description>
</property>
</configuration>
[hadoop@master hadoop]$ vim mapred-site.xml
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>master:9001</value>
</property>
<property>
<name>mapred.local.dir</name>
<value>/data/dadoop/var</value>
</property>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
[hadoop@master hadoop]$ vim yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>master:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>master:8031</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>master:8032</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>master:8033</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>master:8088</value>
</property>
8.启动hadoop
[hadoop@master ~]$ /usr/local/hadoop/sbin/start.all.sh
1.配置国内yum源
[root@master ~]# vim /etc/yum.repo.d/CentOS-Base.repo
# CentOS-Base.repo
[base]
name=CentOS-Base
baseurl=http://mirrors.163.com/centos/7/os/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#released updates
[updates]
name=CentOS-7 - Updates - 163.com
#mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=updates
baseurl=http://mirrors.163.com/centos/7/updates/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#additional packages that may be useful
[extras]
name=CentOS-7 - Extras - 163.com
#mirrorlist=http://mirrorlist.centos.org/?release=7&arch=$basearch&repo=extras
baseurl=http://mirrors.163.com/centos/7/extras/$basearch/
gpgcheck=1
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
#additional packages that extend functionality of existing packages
[centosplus]
name=CentOS-7 - Plus - 163.com
baseurl=http://mirrors.163.com/centos/7/centosplus/$basearch/
gpgcheck=1
enabled=0
gpgkey=http://mirrors.163.com/centos/RPM-GPG-KEY-CentOS-7
[root@master ~]# yum clean all
[root@master ~]# yum makecache
[root@master ~]# yum update -y
[root@master ~]# yum install -y vim
2.配置主机名和hosts
[root@master ~]# hostname master
[root@master ~]# vim /etc/hosts
192.168.87.133 master
3.关闭防火墙和selinux
[root@master ~]# iptables -F
[root@master ~]# vim /etc/sysconfig/selinux
SELINUX=disabled
4.安装jdk
[root@master ~]# tar -zxvf jdk-9.0.4_linux-x64_bin.tar.gz
[root@master ~]# cp -r jdk-9.0.4/ /usr/local/java
[root@master ~]# vim /etc/profile.d/java.sh
export JAVA_HOME=/usr/local/java
export JRE_HOME=$JAVA_HOME/jre
export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
export PATH=.:${JAVA_HOME}/bin:$PATH
[root@master ~]# source /etc/profile.d/java.sh
[root@master ~]# java -version
5.安装hadoop
[root@master ~]# groupadd hadoop
[root@master ~]# useradd -g hadoop -m -d /home/hadoop hadoop
[root@master ~]# echo "password" | passwd --stdin hadoop
[root@master ~]# echo "hadoop ALL = (root) NOPASSWD:ALL" | sudo tee /etc/sudoers.d/hadoop
[root@master ~]# chmod 0440 /etc/sudoers/hadoop
[root@master ~]# tar -zxvf hadoop-3.0.0.tar.gz -C /usr/local/
[root@master ~]# ln -sv /usr/local/hadoop-3.0.0/ /usr/local/hadoop
[root@master ~]# chown -R hadoop.hadoop /usr/local/hadoop
[root@master ~]# vim /etc/profile.d/hadoop.sh
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib:$HADOOP_COMMON_LIB_NATIVE_DIR"
export PATH=.:${JAVA_HOME}/bin:${HADOOP_HOME}/bin:$PATH
[root@master ~]# . /etc/profile.d/hadoop.sh
[root@master ~]# hadoop version
6.创建一些目录
[root@master ~]# mkdir -p /data/dadoop/{tmp,var,dfs}
[root@master ~]# mkdir -p /data/dadoop/dfs/{name,data}
[root@master ~]# chown -R hadoop.hadoop /data/dadoop
7.配置hadoop
[root@master ~]# cd /usr/local/hadoop/etc/hadoop
[root@master ~]# su hadoop
[hadoop@master ~]$ ssh-keygen -t rsa -P ''
[hadoop@master ~]$ ssh-copy-id -i .ssh/id_rsa.pub hadoop@master
[hadoop@master hadoop]$ vim core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>/data/dadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://master:9000</value>
</property>
</configuration>
[hadoop@master hadoop]$ vim hdfs-site.xml
<configuration>
<property>
<name>dfs.name.dir</name>
<value>file:///data/dadoop/dfs/name</value>
<description>Path on the local filesystem where theNameNode stores the namespace.</description>
</property>
<property>
<name>dfs.data.dir</name>
<value>/data/hadoop/dfs/data</value>
<description>Comma separated list of paths on the localfilesystem of a DataNode where it should store its blocks.</description>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description>need not permissions</description>
</property>
</configuration>
[hadoop@master hadoop]$ vim mapred-site.xml
<configuration>
<property>
<name>mapred.job.tracker</name>
<value>master:9001</value>
</property>
<property>
<name>mapred.local.dir</name>
<value>/data/dadoop/var</value>
</property>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
[hadoop@master hadoop]$ vim yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce_shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
<property>
<name>yarn.resourcemanager.hostname</name>
<value>master</value>
</property>
<property>
<name>yarn.resourcemanager.scheduler.address</name>
<value>master:8030</value>
</property>
<property>
<name>yarn.resourcemanager.resource-tracker.address</name>
<value>master:8031</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>master:8032</value>
</property>
<property>
<name>yarn.resourcemanager.admin.address</name>
<value>master:8033</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>master:8088</value>
</property>
8.启动hadoop
[hadoop@master ~]$ /usr/local/hadoop/sbin/start.all.sh