1、设置虚拟机网络
vi /etc/sysconfig/network-scripts/ifcfg-eth0
ONBOOT从NO改成yes
2、安装java
tar -zxvf jdk-8u271-linux-x64.tar.gz
vi /etc/profile
set java environment
JAVA_HOME=/home/jdk1.8.0_271
JRE_HOME=/home/jdk1.8.0_271/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export JAVA_HOME JRE_HOME CLASS_PATH PATH
source /etc/profile
3、安装Hadoop
tar -zxvf hadoop-3.3.0.tar.gz
vi /etc/profile
export HADOOP_HOME=/home/hadoop-3.3.0
export PATH=$PATH:$HADOOP_HOME/bin:/$HADOOP_HOME/sbin
最终环境变量
set java environment
JAVA_HOME=/home/jdk1.8.0_271
JRE_HOME=/home/jdk1.8.0_271/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
export HADOOP_HOME=/home/hadoop-3.3.0
export HBASE_HOME=/home/hbase-2.4.0
export JAVA_HOME JRE_HOME CLASS_PATH PATH
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin:$HADOOP_HOME/bin:/$HADOOP_HOME/sbin:$HBASE_HOME/bin:/$HBASE_HOME/sbin
vi /home/hadoop-3.3.0/etc/hadoop
vi core-site.xml
<property>
<name>hadoop.tmp.dir</name>
<value>/home/hadoop-3.3.0/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://192.168.110.129:9000</value>
</property>
vi hadoop-env.sh
将export JAVA_HOME=${JAVA_HOME}
修改为:export JAVA_HOME=/home/jdk1.8.0_271
vi hdfs-site.xml
<property>
<name>dfs.name.dir</name>
<value>/home/hadoop-3.3.0/dfs/name</value>
<description>Path on the local filesystem where theNameNode stores the namespace and transactions logs persistently.</description>
</property>
<property>
<name>dfs.data.dir</name>
<value>/home/hadoop-3.3.0/dfs/data</value>
<description>Comma separated list of paths on the localfilesystem of a DataNode where it should store its blocks.</description>
</property>
<property>
<name>dfs.replication</name>
<value>2</value>
</property>
<property>
<name>dfs.permissions</name>
<value>false</value>
<description>need not permissions</description>
</property>
cp mapred-site.xml.template mapred-site.xml
vi mapred-site.xml
<propert