一.Hadoop安装初始化工作
版本:
hadoop:2.7.1
hbase:1.3.1
hive:2.3.0
kylin:2.0.0
jdk:1.8.0
1.新建用户和组
> groupadd hadoop
> useradd -d /home/hadoop -g hadoop -m -s /bin/bash hadoop
> passwd hadoop
|
2.为该用户设置ssh免密登陆
> rm ~/.ssh/*
> cd ~/.ssh
> ssh-keygen -t rsa -P
""
> cat id_rsa.pub >> authorized_keys
> chmod
644
authorized_keys
> chmod
700
~/.ssh/
> ssh localhost
#本机免密完成 下面是从本机访问其他机器
scp ~/.ssh/id_rsa.pub hadoop
@hadoop
-
03
:~/
#在hadoop-
03
机器上执行
cat ~/id_rsa.pub >> ~/.ssh/authorized_keys
|
3.安装jdk,配置环境变量
> vi /etc/profile
#set java environment
JAVA_HOME=/opt/jdk1.
8
.0_144
JRE_HOME=/opt/jdk1.
8
.0_144/jre
CLASS_PATH=.:$JAVA_HOME/lib/dt.jar:$JAVA_HOME/lib/tools.jar:$JRE_HOME/lib
PATH=$PATH:$JAVA_HOME/bin:$JRE_HOME/bin
export JAVA_HOME JRE_HOME CLASS_PATH PATH
> source /etc/profile
|
二.Hadoop安装
> su hadoop
|
1.解压hadoop
> tar zxvf hadoop-
2.7
.
1
.tar.gz -C /opt/hadoop
|
2.设置环境变量
> vi ~/.bash_profile
# set hadoop path
export HADOOP_HOME=/opt/hadoop/hadoop-
2.7
.
1
export PATH=$PATH:$HADOOP_HOME/bin
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/
native
export HADOOP_OPTS=
"-Djava.library.path=$HADOOP_HOME/lib"
> source ~/.bash_profile
|
3.创建目录
> mkdir /opt/hadoop/hadoop-
2.7
.
1
/tmp
> mkdir /opt/hadoop/hadoop-
2.7
.
1
/hdfs
> mkdir /opt/hadoop/hadoop-
2.7
.
1
/hdfs/data
> mkdir /opt/hadoop/hadoop-
2.7
.
1
/hdfs/name
|
4.修改配置文件
a.core-site.xml
<property>
<name>fs.
default
.name</name>
<value>hdfs:
//hadoop-02:9000</value>
<description>HDFS的URI,文件系统:
//namenode标识:端口号</description>
</property>
<property>
<name>hadoop.tmp.dir</name>
<value>/opt/hadoop/hadoop-
2.7
.
|