sqoop import export

#建库
create database test;
use test;
#建表
CREATE TABLE emp(
    empno        INT primary key,
    ename        VARCHAR(50),
    job        VARCHAR(50),
    mgr        INT,
    hiredate    varchar(50),
    sal        int,
    comm        int,
    deptno        int
);
#插入数据
INSERT INTO emp values(7369,'SMITH','CLERK',7902,'1980-12-17',800,NULL,20);
INSERT INTO emp values(7499,'ALLEN','SALESMAN',7698,'1981-02-20',1600,300,30);
INSERT INTO emp values(7521,'WARD','SALESMAN',7698,'1981-02-22',1250,500,30);
INSERT INTO emp values(7566,'JONES','MANAGER',7839,'1981-04-02',2975,NULL,20);
INSERT INTO emp values(7654,'MARTIN','SALESMAN',7698,'1981-09-28',1250,1400,30);
INSERT INTO emp values(7698,'BLAKE','MANAGER',7839,'1981-05-01',2850,NULL,30);
INSERT INTO emp values(7782,'CLARK','MANAGER',7839,'1981-06-09',2450,NULL,10);
INSERT INTO emp values(7788,'SCOTT','ANALYST',7566,'1987-04-19',3000,NULL,20);
INSERT INTO emp values(7839,'KING','PRESIDENT',NULL,'1981-11-17',5000,NULL,10);
INSERT INTO emp values(7844,'TURNER','SALESMAN',7698,'1981-09-08',1500,0,30);
INSERT INTO emp values(7876,'ADAMS','CLERK',7788,'1987-05-23',1100,NULL,20);
INSERT INTO emp values(7900,'JAMES','CLERK',7698,'1981-12-03',950,NULL,30);
INSERT INTO emp values(7902,'FORD','ANALYST',7566,'1981-12-03',3000,NULL,20);
INSERT INTO emp values(7934,'MILLER','CLERK',7782,'1982-01-23',1300,NULL,10);

mysql -> hdfs

#导入数据
sqoop import \
--connect jdbc:mysql://hadoop01:3306/test \
--username root \
--password 123 \
--table emp \
--null-string '\\N' \
--null-non-string '\\N' \
--fields-terminated-by ',' \
--target-dir hdfs://hadoop01:8020/sqoopdata/emp \
--delete-target-dir \
-m 1
#没有主键时,必须指定--split-by 或-m 
#--split-by empno
#-m 1

hdfs -> mysql

#注意事项: 如果需要把数据导出到MySQL,需要事先在MySQL中创建表。
sqoop export \
--connect jdbc:mysql://hadoop01:3306/test \
--username root \
--password 123 \
--table emp2 \
--input-null-string '\\N' \
--input-null-non-string '\\N' \
--export-dir hdfs://hadoop01:8020/sqoopdata/emp \
--input-fields-terminated-by ',' \
-m 1

mysql -> hive

#MySQL导入到Hive的本质是: 
#1. 先把数据从MySQL导入到HDFS
#2. 通过load的方式,把数据加载到Hive表中
sqoop import \
--connect jdbc:mysql://hadoop01:3306/test \
--username root \
--password 123 \
--table emp \
--hive-import \
--hive-overwrite \
--hive-table 'sqoop.emp' \
--delete-target-dir \
--fields-terminated-by ',' \
-m 1
# 将输入导入到指定的分区中,要求表需要是分区表
sqoop import \
--connect jdbc:mysql://hadoop:3306/test \
--username root \
--password 123 \
--table emp \
--hive-import \
--hive-overwrite \
--hive-table 'sqoop.emp' \
--delete-target-dir \
--fields-terminated-by ',' \
--hive-partition-key 'dt' \
--hive-partition-value '2021-09-15' \
-m 1

hive -> mysql

#本质来讲,就是将数据从HDFS导出到mysql
sqoop export \
--connect jdbc:mysql://hadoop01:3306/test \
--username root \
--password 123 \
--table emp3 \
--export-dir hdfs://hadoop01:8020/user/hive/warehouse/sqoop.db/emp \
--input-fields-terminated-by ',' \
-m 1

mysql -> hbase

sqoop import \
--connect jdbc:mysql://hadoop01:3306/test \
--username root \
--password 123 \
--table emp \
--hbase-create-table \
--hbase-table 'ns1:emp' \
--column-family 'base_info' \
--hbase-row-key 'empno' \
-m 1
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值