将hbase中的内容写入到hive中
1、启动hive时遇到的问题
$ hive
Logging initialized using configuration in jar:file:/usr/local/hive/lib/hive-common-1.2.1.jar!/hive-log4j.properties
Exception in thread "main" java.lang.RuntimeException: java.lang.RuntimeException: Unable to instantiate org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient
没有正常启动Hive 的 Metastore Server服务进程
nohup bin/hive --service metastore &
2、hive相关设置
//设置true用于打开动态分区功能
set hive.exec.dynamic.partition=true;
//允许全部动态分区 strict要求分区字段至少有一个是静态的分区值
set hive.exec.dynamic.partition.mode=nonstrict;
//关闭map阶段优化
set hive.auto.convert.join=false;
//能被每个mapper或者reducer创建的最大动态分区的数目
set hive.exec.max.dynamic.partitions.pernode=100;
//被一条带有动态分区的sql语句所能创建的动态分区总量
set hive.exec.max.dynamic.partitions=1000;
//全局能被创建文件数目的最大值,专门有一个hadoop计数器来跟踪该值,如果超出会报错
set hive.exec.max.created.files=100000;
3、创建hb_user外部表,映射到hbase的events_db:users表中
set hivevar:db=events;
create external table ${db}.hb_user(
user_id String,
birth_year int,
gender String,
locale String,
location String,
time_zone String,
joined_at String
)
stored by 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
with serdeproperties(
'hbase.columns.mapping'='
:key,
profile:birthyear,
profile:gender,
region:locale,
region:location,
region:timezone,
registration:joinedAt'
)
tblproperties('hbase.table.name'='events_users');
4、创建内部表user,将hb_user外部表的数据以orc格式存储到内部表中,提高查询效率
create table ${db}.user
stored as orc as
select * from ${db}.hb_user;
5、删除外部表
drop table if exists ${db}.hb_user;
查看硬盘使用情况
df -h
du -h -x --max-depth=1
将mongodb中的内容写到hive中
1、依赖包
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongodb-driver</artifactId>
<version>3.12.7</version>
</dependency>
<dependency>
<groupId>org.mongodb.mongo-hadoop</groupId>
<artifactId>mongo-hadoop-core</artifactId>
<version>2.0.2</version>
</dependency>
<dependency>
<groupId>org.mongodb.mongo-hadoop</groupId>
<artifactId>mongo-hadoop-hive</artifactId>
<version>2.0.2</version>
</dependency>
2、本地库复制到hive根目录下的lib目录下
本地库地址可以在maven的setting.xml文件中查看
3、创建外部表
create external table ${db}.mg_train(
user_id String,
event_id String,
invited String,
time_stamp String,
interested String
)
stored by 'com.mongodb.hadoop.hive.MongoStorageHandler'
with properties(
'mongo.columns.mapping'='{
"user_id":"user",
"event_id":"event",
"invited":"invited",
"time_stamp":"timestamp",
"interested":"interested"
}'
)
tblproperties('mongo.uri'='mongodb://kgcuser:123456@192.168.232.211:27017/kgcdsj.train');
4、创建内部表
create table ${db}.mgtrain
stored as orc as
select * from ${db}.mg_train;