目录
1.列出mysql中有哪些数据库
sqoop list-databases \
--connect jdbc:mysql://localhost:3306 \
--username root \
--password 123456
2.列出MySQL中的某个数据库有哪些数据表:
sqoop list-tables \
--connect jdbc:mysql://localhost:3306 \
--username root \
--password 123456
3.从MySQL数据库服务器中的emp表导入HDFS:
\ 换行,emp库名,departments表名
sqoop import \
--connect jdbc:mysql://localhost:3306/emp \
--username root \
--password 123456 \
--table departments \
--m 1
导入成功
查看数据
[root@hadoop sqoop]# hdfs dfs -cat /user/root/departments/part-m-00000
10,Adm,200,1700
20,Mar,201,1800
30,Pur,114,1700
40,Hum,203,2400
50,Shi,121,1500
60,IT,103,1400
70,Pub,204,2700
80,Sal,145,2500
90,Exe,100,1700
100,Fin,108,1700
110,Acc,205,1700
120,Tre,null,1700
130,Cor,null,1700
140,Con,null,1700
150,Sha,null,1700
160,Ben,null,1700
170,Man,null,1700
180,Con,null,1700
190,Con,null,1700
200,Ope,null,1700
210,IT ,null,1700
220,NOC,null,1700
230,IT ,null,1700
240,Gov,null,1700
250,Ret,null,1700
260,Rec,null,1700
270,Pay,null,1700
4.RDBMS到HIVE
sqoop import \
--connect jdbc:mysql://localhost:3306/emp \库名
--username root \
--password 123456 \
--table admin \表名
--hive-import \增加导入hive声明
--split-by id \用id分割列
--m 1 \用一个map执行
到hive中去查看
show tables;
5.导入表数据子集
sqoop import \
--connect jdbc:mysql://localhost:3306/emp \
--username root \
--password 123456 \
--where "department_id='60'" \子集条件声明
--target-dir /l0412 \导入数据的目录
--table employees \
--m 1
导入成功
查看数据
[root@hadoop sqoop]# hdfs dfs -cat /l0412/part-m-00000
103,Alexander,Hunold,AHUNOLD,590.423.4567,IT_PROG,9000.0,null,102,60,1992-04-03 00:00:00.0
104,Bruce,Ernst,BERNST,590.423.4568,IT_PROG,6000.0,null,103,60,1992-04-03 00:00:00.0
105,David,Austin,DAUSTIN,590.423.4569,IT_PROG,4800.0,null,103,60,1998-03-03 00:00:00.0
106,Valli,Pataballa,VPATABAL,590.423.4560,IT_PROG,4800.0,null,103,60,1998-03-03 00:00:00.0
107,Diana,Lorentz,DLORENTZ,590.423.5567,IT_PROG,4200.0,null,103,60,1998-03-03 00:00:00.0
6.Sqoop数据导出
创建数据
[root@hadoop dool]# vim emps.txt
1201,gopal,manager,50000,TP
1202,manisha,preader,50000,TP
1203,kalil,phpdev,30000,AC
1204,prasanth,phpdev,30000,AC
1205,kranthi,admin,20000,TP
1206,satishp,grpdes,20000,GR
上传数据到hdfs上
hdfs dfs -put emps.txt /user/root/epms.txt
需要手动创建mysql中的目标表
CREATE TABLE employee (
id INT NOT NULL PRIMARY KEY,
NAME VARCHAR(20),
deg VARCHAR(20),
salary INT,
dept VARCHAR(10));
然后执行导出命令
sqoop export \
--connect jdbc:mysql://localhost:3306/emp \
--username root \
--password 123456 \
--table employee \
--export-dir /user/root/