1:创建Hive 和hBase 关系表
use order;
CREATE TABLE tmp_member_info_new_mysql
(memberkey string, memberid string,uid string,email string, fullname string,sex string,
birthday string,province string,area string, edulevel string,
unitname string, logdata string,domain string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES
("hbase.columns.mapping" = ":key#b,info:memberid,info:uid,info:email,info:fullname,info:sex,
info:birthday,info:province,info:area,info:edulevel,
info:unitname,info:logdata,info:domain")
TBLPROPERTIES ("hbase.table.name"="tmp_member_info_new_mysql");
2:导入数据到hive 和hbase :
rowkey=memberid_uid
insert overwrite table tmp_member_info_new_mysql
select concat_ws('_',lpad(uid,10,'0'),memberid) as memberkey, memberid,uid, email, fullname,sex ,
birthday,province,area, edulevel,unitname,logdata,domain
from order.tmp_member_info_new ;
3: 导出到对应的hdfs文件系统目录下::hadoop fs -mkdir /user/cdel/tmp_member_info_new_mysql
insert overwrite directory '/user/cdel/tmp_member_info_new_mysql_20150420'
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,me.logdata,me.domain
from order.tmp_member_info_new_mysql me where me.logdata <='20150420';
insert overwrite directory '/user/cdel/tmp_member_info_new_mysql_20150421'
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,me.logdata,me.domain
from order.tmp_member_info_new_mysql me where me.logdata >='20150421';
3:使用Sqoop导出到mysql中:
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql --fields-terminated-by '\001';
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql_20150420 --fields-terminated-by '\001';
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql_20150421 --fields-terminated-by '\001';
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,max(me.logdata) as logdata,me.domain
from order.tmp_member_info_new me
where uid is not null and uid !=''
use order;
CREATE TABLE tmp_member_info_new_mysql
(memberkey string, memberid string,uid string,email string, fullname string,sex string,
birthday string,province string,area string, edulevel string,
unitname string, logdata string,domain string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES
("hbase.columns.mapping" = ":key#b,info:memberid,info:uid,info:email,info:fullname,info:sex,
info:birthday,info:province,info:area,info:edulevel,
info:unitname,info:logdata,info:domain")
TBLPROPERTIES ("hbase.table.name"="tmp_member_info_new_mysql");
2:导入数据到hive 和hbase :
rowkey=memberid_uid
insert overwrite table tmp_member_info_new_mysql
select concat_ws('_',lpad(uid,10,'0'),memberid) as memberkey, memberid,uid, email, fullname,sex ,
birthday,province,area, edulevel,unitname,logdata,domain
from order.tmp_member_info_new ;
3: 导出到对应的hdfs文件系统目录下::hadoop fs -mkdir /user/cdel/tmp_member_info_new_mysql
insert overwrite directory '/user/cdel/tmp_member_info_new_mysql_20150420'
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,me.logdata,me.domain
from order.tmp_member_info_new_mysql me where me.logdata <='20150420';
insert overwrite directory '/user/cdel/tmp_member_info_new_mysql_20150421'
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,me.logdata,me.domain
from order.tmp_member_info_new_mysql me where me.logdata >='20150421';
3:使用Sqoop导出到mysql中:
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql --fields-terminated-by '\001';
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql_20150420 --fields-terminated-by '\001';
sqoop export --connect jdbc:mysql://10.223.0.61:3306/cdellog --username cdel --password hdcdel20141217 -m 2 --table member_info_new --columns "memberid,uid,email,fullname,sex,birthday,province,area,edulevel,unitname,logdata,domain" --export-dir /user/cdel/tmp_member_info_new_mysql_20150421 --fields-terminated-by '\001';
select me.memberid,me.uid,me.email,me.fullname,me.sex,me.birthday,me.province,me.area,me.edulevel,me.unitname,max(me.logdata) as logdata,me.domain
from order.tmp_member_info_new me
where uid is not null and uid !=''