Hive 安装

##安装数据库 单独一个数据库节点

192.168.x.121 mysql 
yum install mariadb-server -y 

##数据库配置如下 /etc/my.cnf

[mysqld]
datadir=/var/lib/mysql
socket=/var/lib/mysql/mysql.sock
character_set_server = utf8
symbolic-links=0
init_connect='SET NAMES utf8' 
[mysqld_safe]
log-error=/var/log/mariadb/mariadb.log
pid-file=/var/run/mariadb/mariadb.pid
!includedir /etc/my.cnf.d
[client]
default-character-set=utf8
[mysql]
default-character-set=utf8


[root@mysql ~]# 

 

systemctl enable mariadb && systemctl start mariadb 


##开启远程访问

grant all privileges on *.* to root@'%' identified by 'xxxxx@2018' with grant option;


##namenode即master 上面安装hive 服务端

wget http://mirror.bit.edu.cn/apache/hive/hive-2.3.4/apache-hive-2.3.4-bin.tar.gz

##设置环境变量

export HIVE_HOME=/usr/local/hive
export PATH=$PATH:$HIVE_HOME/bin

###配置hive-site.xml 
cd /usr/local/hive/conf 

cp hive-default.xml.template hive-site.xml 

##在hdfs 中配置hive 目录

[root@master hadoop]# hdfs dfs -ls /
[root@master hadoop]# hdfs dfs -mkdir -p /usr/hive/warehouse
[root@master hadoop]# hdfs dfs -chmod 777 /usr/hive/warehouse
[root@master hadoop]# hdfs dfs -ls /usr/hive
Found 1 items
drwxrwxrwx   - root supergroup          0 2019-02-14 16:37 /usr/hive/warehouse
[root@master hadoop]# hdfs dfs -mkdir -p /tmp/hive
[root@master hadoop]# hdfs dfs -chmod 777 /tmp/hive
[root@master hadoop]# hdfs dfs -ls /tmp
Found 1 items
drwxrwxrwx   - root supergroup          0 2019-02-14 16:39 /tmp/hive


##修改$/usr/local/hive/conf/hive-site.xml中的临时目录

mkdir /usr/loca/hive/tmp 

配置文件hive-site.xml:

将文件中的所有 ${system:java.io.tmpdir}替换成/usr/local/hive/tmp
将文件中所有的${system:user.name}替换为root


##下载 连接mysql 的jar 包

mysql-connector-java-5.1.36.jar

wget https://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.47.tar.gz
tar -zxvf mysql-connector-java-5.1.47.tar.gz 
cp mysql-connector-java-5.1.47* /usr/local/hive/lib/


###修改hive-site.xml 中的 关于连接数据库的地址

 <property>
    <name>javax.jdo.option.ConnectionURL</name>
    <!-- <value>jdbc:derby:;databaseName=metastore_db;create=true</value> -->
    <value>jdbc:mysql://192.168.xxx:3306/hive?createDatabaseIfNotExist=true&amp;useUnicode=true&amp;characterEncoding=UTF-8</value>
    <description>
      JDBC connect string for a JDBC metastore.
      To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
      For example, jdbc:postgresql://myhost/db?ssl=true for postgres database.
    </description>


##修改驱动

<property>
  <name>javax.jdo.option.ConnectionDriverName</name>
  <value>com.mysql.jdbc.Driver</value>
  <description>Driver class name for a JDBC metastore</description>
</property>
<property>


##修改用户名

<property>
  <name>javax.jdo.option.ConnectionUserName</name>
  <value>root</value>
  <description>Username to use against metastore database</description>
</property>


##修改密码

<property>
  <name>javax.jdo.option.ConnectionPassword</name>
  <value>xxxmmme</value>
  <description>password to use against metastore database</description>
</property>


###搜索hive.metastore.schema.verification,将对应的value修改为false

<property>
  <name>hive.metastore.schema.verification</name>
  <value>false</value>
  <description>
    Enforce metastore schema version consistency.
    True: Verify that version information stored in is compatible with one from Hive jars.  Also disable automatic
          schema migration attempt. Users are required to manually migrate schema after Hive upgrade which ensures
          proper metastore schema migration. (Default)
    False: Warn if the version information stored in metastore doesnt match with one from in Hive jars.
  </description>
</property>

###修改环境变量

cp hive-env.sh.template hive-env.sh 

[root@master conf]# egrep -v '#|^$' hive-env.sh
export HADOOP_HOME=/usr/local/hadoop
export HIVE_CONF_DIR=/usr/local/hive/conf
export HIVE_AUX_JARS_PATH=/usr/local/hive/lib

##mysql 数据库进行初始化

[root@master bin]# schematool -initSchema -dbType mysql
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]
Metastore connection URL:        jdbc:mysql://192.168.x.121:3306/hive?createDatabaseIfNotExist=true
Metastore Connection Driver :    com.mysql.jdbc.Driver
Metastore connection User:       root
Starting metastore schema initialization to 2.3.0
Initialization script hive-schema-2.3.0.mysql.sql
Initialization script completed
schemaTool completed
[root@master bin]# 

##进入数据库查看,已经初始化完成,表结构已有

mysql -h 192.168.x.x -uroot -p xxx "show databases;"

#进入数据库设置hive 设置 
1)、进入数据库 Metastore 中执行以下 5 条 SQL 语句 

 ①修改表字段注解和表注解
alter table COLUMNS_V2 modify column COMMENT varchar(256) character set utf8
alter table TABLE_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8
② 修改分区字段注解:
alter table PARTITION_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8 ;
alter table PARTITION_KEYS modify column PKEY_COMMENT varchar(4000) character set utf8;
③修改索引注解:
alter table INDEX_PARAMS modify column PARAM_VALUE varchar(4000) character set utf8;

##启动hive 
cd /usr/loca/hive/bin/[root@master bin]#

./hive
which: no hbase in (/usr/local/java/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/usr/local/hadoop/bin:/usr/local/hive/bin:/root/bin)
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/hive/lib/log4j-slf4j-impl-2.6.2.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.apache.logging.slf4j.Log4jLoggerFactory]

Logging initialized using configuration in jar:file:/usr/local/hive/lib/hive-common-2.3.4.jar!/hive-log4j2.properties Async: true
Hive-on-MR is deprecated in Hive 2 and may not be available in the future versions. Consider using a different execution engine (i.e. spark, tez) or using Hive 1.X releases.
hive> 

 show functions;
 desc function sum;
 create database sbux;
 ##导入数据

 load data local inpath '/usr/local/hive/student.dat' into table sbux.student;


 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

石兴稳

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值