Linux Hadoop的编译及安装

编译

  • Hadoop的下载及解压
    # hadoop源代码的下载可以为不同版本,推荐hadoop-2.8.1版本,
hadoop:root:/opt/sourcecode:>ll
total 448328
-rw-r--r--  1 root root  34523353 Dec 11 07:02 hadoop-2.8.1-src.tar.gz
hadoop:root:/opt/sourcecode:>tar -xzvf hadoop-2.8.1-src.tar.gz
hadoop:root:/opt/sourcecode:>ll hadoop-2.8.1-src
total 220
-rw-rw-r--  1 root root 15623 May 23  2017 BUILDING.txt
drwxr-xr-x  4 root root  4096 Dec 11 15:17 dev-support
drwxr-xr-x  4 root root  4096 Dec 11 16:07 hadoop-assemblies
drwxr-xr-x  4 root root  4096 Dec 11 16:07 hadoop-build-tools
drwxrwxr-x  3 root root  4096 Dec 11 16:26 hadoop-client
drwxr-xr-x 11 root root  4096 Dec 11 16:12 hadoop-common-project
drwxr-xr-x  3 root root  4096 Dec 11 16:26 hadoop-dist
drwxr-xr-x  9 root root  4096 Dec 11 16:18 hadoop-hdfs-project
drwxr-xr-x 10 root root  4096 Dec 11 16:24 hadoop-mapreduce-project
drwxr-xr-x  4 root root  4096 Dec 11 16:07 hadoop-maven-plugins
drwxr-xr-x  3 root root  4096 Dec 11 16:26 hadoop-minicluster
drwxr-xr-x  4 root root  4096 Dec 11 16:07 hadoop-project
drwxr-xr-x  3 root root  4096 Dec 11 16:07 hadoop-project-dist
drwxr-xr-x 19 root root  4096 Dec 11 16:26 hadoop-tools
drwxr-xr-x  4 root root  4096 Dec 11 16:22 hadoop-yarn-project
-rw-rw-r--  1 root root 99253 May 23  2017 LICENSE.txt
-rw-rw-r--  1 root root 15915 May 23  2017 NOTICE.txt
drwxrwxr-x  2 root root  4096 Jun  2  2017 patchprocess
-rw-rw-r--  1 root root 20477 May 28  2017 pom.xml
-rw-r--r--  1 root root  1366 May 20  2017 README.txt
-rwxrwxr-x  1 root root  1841 May 23  2017 start-build-env.sh

# 查看Hadoop配置条件:

 hadoop:root:/opt/sourcecode/hadoop-2.8.1-src:>cat BUILDING.txt 
Build instructions for Hadoop

----------------------------------------------------------------------------------
Requirements:

* Unix System
* JDK 1.7+
* Maven 3.0 or later
* Findbugs 1.3.9 (if running findbugs)
* ProtocolBuffer 2.5.0
* CMake 2.6 or newer (if compiling native code), must be 3.0 or newer on Mac
* Zlib devel (if compiling native code)
* openssl devel (if compiling native hadoop-pipes and to get the best HDFS encryption performance)
* Linux FUSE (Filesystem in Userspace) version 2.6 or above (if compiling fuse_dfs)
* Internet connection for first build (to fetch all Maven and Hadoop dependencies)

----------------------------------------------------------------------------------
  • JAVA安装
    # 解压Java到/usr/java
hadoop:root:/home/hadoop:>cd /usr/java/
hadoop:root:/usr/java:>tar -xzvf jdk-8u45-linux-x64.gz

# 修改用户组

 hadoop:root:/usr/java:>chown -R root:root jdk1.8.0_45
hadoop:root:/usr/java:>ll
total 169216
drwxr-xr-x 8 root root      4096 Apr 10  2015 jdk1.8.0_45
-rw-r--r-- 1 root root 173271626 Dec 11 07:02 jdk-8u45-linux-x64.gz

# 修改环境变量 /etc/profile

hadoop:root:/usr/java:>vi /etc/profile
export JAVA_HOME=/usr/java/jdk1.8.0_45
export PATH=$JAVA_HOME/bin:$PATH
hadoop:root:/usr/java:>source /etc/profile

hadoop:root:/usr/java:>java -version
java version "1.8.0_45"
Java(TM) SE Runtime Environment (build 1.8.0_45-b14)
Java HotSpot(TM) 64-Bit Server VM (build 25.45-b02, mixed mode)
  • maven安装到/etc/software,配置环境变量
hadoop:root:/opt/software:>unzip apache-maven-3.3.9-bin.zip
hadoop:root:/opt/software:>vi /etc/profile
export MAVEN_HOME=/opt/software/apache-maven-3.3.9
export MAVEN_OPTS="-Xms256m -Xmx512m"
export PATH=$MAVEN_HOME/bin:$JAVA_HOME/bin:$PATH
hadoop:root:/opt/software/protobuf-2.5.0:>source /etc/profile
hadoop:root:/opt/software:>mvn -version
Apache Maven 3.3.9 (bb52d8502b132ec0a5a3f4c09453c07478323dc5; 2015-11-10T11:41:47-05:00)
Maven home: /opt/software/apache-maven-3.3.9
Java version: 1.8.0_45, vendor: Oracle Corporation
Java home: /usr/java/jdk1.8.0_45/jre
Default locale: en_US, platform encoding: UTF-8
OS name: "linux", version: "2.6.32-431.el6.x86_64", arch: "amd64", family: "unix"
  • protobuf安装到/etc/software,配置环境变量
hadoop:root:/opt/software:>tar -xzvf protobuf-2.5.0.tar.gz
hadoop:root:/opt/software/protobuf-2.5.0:>yum install -y gcc gcc-c++ make cmake
hadoop:root:/opt/software/protobuf-2.5.0:>make && make install
hadoop:root:/opt/software/protobuf-2.5.0:>vi /etc/profile
export PROTOC_HOME=/usr/local/protobuf
export PATH=$PROTOC_HOME/bin:$FINDBUGS_HOME/bin:$MAVEN_HOME/bin:$JAVA_HOME/bin:$PATH
hadoop:root:/opt/software/protobuf-2.5.0:>source /etc/profile
hadoop:root:/opt/software/protobuf-2.5.0:>protoc --version
libprotoc 2.5.0
  • Findbugs安装到/opt/software,配置环境变量
hadoop:root:/opt/software:>unzip findbugs-1.3.9.zip
hadoop:root:/opt/software:>vi /etc/profile 
export FINDBUGS_HOME=/opt/software/findbugs-1.3.9
export PATH=$FINDBUGS_HOME/bin:$MAVEN_HOME/bin:$JAVA_HOME/bin:$PATH
hadoop:root:/opt/software:>source /etc/profile
hadoop:root:/opt/software:>findbugs -version
1.3.9
  • 其他依赖
yum install -y openssl openssl-devel svn ncurses-devel zlib-devel libtool
yum install -y snappy snappy-devel bzip2 bzip2-devel lzo lzo-devel lzop autoconf automake
  • 编译 (PS:网速正常大概2-3小时)
 hadoop:root:/opt/software:>cd /opt/sourcecode/hadoop-2.8.1-src
hadoop:root:/opt/sourcecode/hadoop-2.8.1-src:>mvn clean package -Pdist,native -DskipTests -Dtar

安装

  • 创建使用Hadoop的用户,添加sudo权限
 hadoop:root:/opt/sourcecode/hadoop-2.8.1-src:>useradd hadoop
 hadoop:root:/opt/sourcecode/hadoop-2.8.1-src:>id hadoop
uid=500(hadoop) gid=500(hadoop) groups=500(hadoop)
hadoop:root:/opt/sourcecode/hadoop-2.8.1-src:>vi /etc/sudoers
hadoop  ALL=(root)      NOPASSWD:ALL
  • 安装Java

  • 解压Hadoop

 hadoop:root:/opt/sourcecode/hadoop-2.8.1-src/hadoop-dist/target:>mv /opt/sourcecode/hadoop-2.8.1-src/hadoop-dist/target/hadoop-2.8.1.tar.gz /opt/software
hadoop:root:/opt/software:>tar -xzvf hadoop-2.8.1.tar.gz
  • 配置文件
    core-site.xml
hadoop:hadoop:/opt/software/hadoop:>vi /opt/software/hadoop/etc/hadoop/core-site.xml 

<configuration>
    <property>
            <name>fs.defaultFS</name>
            <value>hdfs://192.168.137.5:9000</value>
    </property>
</configuration>

hdfs-site.xml

 <configuration>
    <property>
            <name>dfs.replication</name>
            <value>1</value>
    </property>
    <property>
        <name>dfs.namenode.secondary.http-address</name>
            <value>hadoop:50090</value>
    </property>
    <property>
        <name>dfs.namenode.secondary.https-address</name>
            <value>hadoop:50091</value>
                                                                                                      </property>
</configuration>

hadoop-env.sh

# The java implementation to use.
export JAVA_HOME=/usr/java/jdk1.8.0_45

slave

 hadoop:hadoop:/opt/software/hadoop/etc/hadoop:>cat slaves 
hadoop
  • 配置ssh信任关系
 hadoop:hadoop:/home/hadoop:>ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
hadoop:hadoop:/home/hadoop:>cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
hadoop:hadoop:/home/hadoop:>chmod 0600 ~/.ssh/authorized_keys
hadoop:hadoop:/home/hadoop/.ssh:>ll
total 16
-rw------- 1 hadoop hadoop  395 Dec 14 04:46 authorized_keys
-rw------- 1 hadoop hadoop 1675 Dec 14 04:43 id_rsa
-rw-r--r-- 1 hadoop hadoop  395 Dec 14 04:43 id_rsa.pub
-rw-r--r-- 1 hadoop hadoop 1182 Dec 14 04:59 known_hosts
  • 格式化
hadoop:hadoop:/opt/software/hadoop:>bin/hdfs namenode format
  • 启动HDFS
 hadoop:hadoop:/opt/software/hadoop:>sbin/start-dfs.sh 
  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值