服务器环境是Centeros7
1.安装mysql
rpm -qa|grep mariadb
rpm -e --nodeps mariadb-libs-5.5.56-2.el7.x86_64
rpm -e --nodeps mariadb-server-5.5.56-2.el7.x86_64
rpm -e --nodeps mariadb-5.5.56-2.el7.x86_64
tar -xvf mysql-5.7.24-1.el7.x86_64.rpm-bundle.tar
rpm -ivh mysql-community-common-5.7.24-1.el7.x86_64.rpm
rpm -ivh mysql-community-libs-5.7.24-1.el7.x86_64.rpm
rpm -ivh mysql-community-client-5.7.24-1.el7.x86_64.rpm
rpm -ivh mysql-community-server-5.7.24-1.el7.x86_64.rpm
service mysqld start
chkconfig mysqld on
service mysqld start
chkconfig mysqld on
systemctl stop firewalld.service
systemctl disable firewalld.service
grep password /var/log/mysqld.log
mysql -p
重置密码:
set password = password("3edcVFR$");
use mysql;
GRANT ALL PRIVILEGES ON *.* TO 'root'@'%' IDENTIFIED BY '3edcVFR$' WITH GRANT OPTION;
flush privileges;
2.安装hadoop和hive
tar -zxvf ./apache-hive-2.3.4-bin.tar.gz -C /usr/local/
cd /usr/local/
mv apache-hive-2.3.4-bin hive
vi /etc/profile
# /usr/local/hadoop 为 Hadoop 安装目录
export PATH=$PATH:/usr/local/hadoop/sbin:/usr/local/hadoop/sbin:/usr/local/hive/bin
export HIVE_HOME=/usr/local/hive
export PATH=$PATH:$HIVE_HOME/bin
source /etc/profile
cd /usr/local/hive/conf
mv hive-default.xml.template hive-default.xml
vim hive-site.xml
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true&useSSL=false</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>root</value>
<description>username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>3edcVFR$</value>
<description>password to use against metastore database</description>
</property>
</configuration>
mv hive-env.sh.template hive-env.sh
vim hive-env.sh
# Hadoop的位置:
export HADOOP_HOME=/usr/local/hadoop
# hive的conf目录位置:
export HIVE_CONF_DIR=/usr/local/hive/conf
# hive的lib目录位置:
export HIVE_AUX_JARS_PATH=/usr/local/hive/lib
安装java
tar -zxvf jdk-8u191-linux-x64.tar.gz
mv jdk1.8.0_191/ /usr/local/java
安装hadoop
tar -xzvf hadoop-2.9.1.tar.gz
mv hadoop-2.9.1 /usr/local/hadoop
ssh免密登录
ssh localhost
exit
cd ~/.ssh/
ssh-keygen -t rsa
cat ./id_rsa.pub >> ./authorized_keys
vim /etc/profile
配置文件:
export JAVA_HOME=/usr/local/java
export HADOOP_HOME=/usr/local/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib"
export PATH=.:${JAVA_HOME}/bin:${HADOOP_HOME}/bin:$PATH
配置hadoop
cd /usr/local/hadoop/etc/hadoop/
vim core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>file:/usr/local/hadoop/tmp</value>
<description>Abase for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://master:9000</value>
</property>
<property>
<name>hadoop.proxyuser.root.hosts</name>
<value>*</value>
</property>
<property>
<name>hadoop.proxyuser.root.groups</name>
<value>*</value>
</property>
</configuration>
vi hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/hadoop/tmp/dfs/data</value>
</property>
</configuration>
cp mapred-site.xml.template mapred-site.xml
vim mapred-site.xml
<property>
<name>mapred.local.dir</name>
<value>/usr/local/hadoop/tmp/mapred</value>
</property>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
vim yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
</configuration>
cd /usr/local/hadoop/etc/hadoop/
vim hadoop-env.sh
export JAVA_HOME=/usr/local/java
export HADOOP_COMMON_LIB_NATIVE_DIR=${HADOOP_HOME}/lib/native
export HADOOP_OPTS="-Djava.library.path=${HADOOP_HOME}/lib/native/"
mkdir -p /usr/local/hadoop/tmp/mapred
mkdir -p /usr/local/hadoop/tmp/dfs/data
mkdir -p /usr/local/hadoop/tmp/dfs/name
./hadoop namenode -format
cd /usr/local/hadoop/
./sbin/start-dfs.sh
./sbin/start-yarn.sh
#./sbin/mr-jobhistory-daemon.sh start historyserver
./sbin/stop-dfs.sh
./sbin/stop-yarn.sh
#./sbin/mr-jobhistory-daemon.sh stop historyserver
hadoop fs -mkdir -p /user/hive/warehouse
hadoop fs -mkdir -p /user/hive/tmp
hadoop fs -mkdir -p /user/hive/log
hadoop fs -chmod -R 777 /user/hive/warehouse
hadoop fs -chmod -R 777 /user/hive/tmp
hadoop fs -chmod -R 777 /user/hive/log
复制mysql驱动到
/usr/local/hive/lib/
cd /usr/local/hive/bin
schematool -initSchema -dbType mysql
请先确保 hadoop 已正常启动!
hive
show databases;
create database hds;
use hds;
create table test(id int ,name varchar(100));
insert into test values(1,'zz');
启动远程访问:
hive --service metastore &
hive --service hiveserver2 &
netstat -antpl|grep 10000
3.远程访问URL:jdbc:hive2://192.168.31.27:10000/hds
4.web访问地址:http://192.168.31.27:8088