上传hadoop
sudo mkdir /usr/local/hadoop 创建文件夹放hadoop
sudo tar -zxf /home/mysql_node/下载/hadoop-2.7.7.tar.gz -C /usr/local/hadoop 解压hadoop
sudo mv hadoop-2.7.7/ hadoop 重命名
sudo chown -R hadoop:hadoop ./hadoop 修改文件权限
配置环境变量文件
#Hadoop Environment Variables
export HADOOP_HOME=/usr/local/hadoop/hadoop
export HADOOP_INSTALL=$HADOOP_HOME
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export HADOOP_COMMON_HOME=$HADOOP_HOME
export HADOOP_HDFS_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export PATH=$PATH:$HADOOP_HOME/sbin:$HADOOP_HOME/bin
source ~/.bashrc 使变量设置生效
cd /usr/local/hadoop/hadoop 到该目录下
./bin/hadoop version 检查 Hadoop 是否可用(成功则会显示 Hadoop 版本信息)
配置hadoop文件实现伪分布式
host文件配置
hostname 查看主机名(localhost.localdomain)
sudo vim /etc/hosts host文件配置
127.0.0.1 localhost.localdomain
IP地址 hadoop
cd /usr/local/hadoop/hadoop/etc/hadoop/ 到该目录下
$vim core-site.xml
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop/hadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hadoop:9000</value>
</property>
$ vim hdfs-site.xml
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/hadoop/tmp/dfs/data</value>
</property>
查看进程是否启动 jps
[hadoop@localhost hadoop]$ cd …/
[hadoop@localhost etc]$ cd …/
[hadoop@localhost hadoop]$ ./bin/hdfs namenode -format
[hadoop@localhost hadoop]$ /usr/local/hadoop/hadoop/sbin/start-dfs.sh
停止命令 $ /usr/local/hadoop/hadoop/sbin/stop-dfs.sh
配置yarn vim mapred-site.xml
[hadoop@localhost hadoop]$ /usr/local/hadoop/hadoop/etc/hadoop/
[hadoop@localhost hadoop]$ cp mapred-site.xml.template mapred-site.xml
[hadoop@localhost hadoop]$ vim mapred-site.xml.template mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
修改配置文件yarn-site.xml
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
查看jdk版本
java -version
上传jdk
解压jdk
[root@bogon hadoop]# sudo mkdir /usr/local/java
[root@bogon hadoop]# sudo tar -zxf /home/mysql_node/下载/jdk-8u241-linux-x64.tar.gz -C /usr/local/java
重命名
[root@bogon hadoop]# cd /usr/local/java
[root@bogon java]# sudo mv jdk1.8.0_241/ jdk1.8
重新配置jdk
[root@bogon jdk1.8]# vim ~/.bashrc
export JAVA_HOME=/usr/local/java/jdk1.8
export PATH=
P
A
T
H
:
PATH:
PATH:JAVA_HOME/bin
资源生效
[root@bogon jdk1.8]# source ~/.bashrc
创建hadoop用户组
登录root用户并创建一个hadoop用户。
#useradd -m hadoop -s /bin/bash
修改hadoop用户密码,visudo 命令就是给 hadoop 用户增加管理员权限(先按一下键盘上的 ESC 键,然后输入 :98,找到 root ALL=(ALL) ALL,然后在这行下面增加一行内容:hadoop ALL=(ALL) ALL再保存(先按一下键盘上的 ESC 键,然后输入:wq)。这一步做完以后FinalShell终端切换hadoop用户。
[root@localhost hadoop]# passwd hadoop
Changing password for user hadoop.
New password:
BAD PASSWORD: it is too simplistic/systematic
BAD PASSWORD: is too simple
Retype new password:
passwd: all authentication tokens updated successfully.
[root@localhost hadoop]# visudo
ssh无密登录
cd ~/.ssh/ 到该目录下
ssh-keygen -t rsa 三次回车
cat id_rsa.pub >> authorized_keys 加入授权
chmod 600 ./authorized_keys 修改文件权限