大数据技术之Sqoop配置示例
hdfstomysql
export
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--table
hdfstomysql
--columns
id,name,age
-m
1
--export-dir
hdfs://mycluster/hdfstomysql
mysqltohive
import
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--target-dir
/sqoop/THive
--delete-target-dir
--as-textfile
-m
1
--table
T_P
--columns
id,name,age
--hive-import
--hive-overwrite
--hive-table
T_hive
mysqltohdfs
import
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--target-dir
/sqoop/T1
--delete-target-dir
--as-textfile
-m
1
--table
T_P
--columns
id,name,age
------------------------
import
--connect
jdbc:mysql://node3:3306/test
--username
root
--password
123456
--query
'select p.id,name,age,c.card_id,date_format(c.create_date,'%Y-%m-%d') as c_date from t_person p join t_id_card c on p.id=c.p_id where p.age>17 and $CONDITIONS'
--target-dir
/sqoop/test3
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''
--hive-import
--hive-overwrite
--create-hive-table
--hive-table
t_person_card
--hive-partition-key
day
--hive-partition-value
'2016-07-03'
-------------------------
export
--connect
jdbc:mysql://node3:3306/test
--username
root
--password
123456
--table
t_person
--columns
id,name,age
-m
1
--export-dir
hdfs://laoxiao/test
--------------------------------
import
--connect
jdbc:mysql://node3:3306/test
--username
root
--password
123456
--table
t_person
--columns
id,name,age
--where
1=1
--target-dir
/sqoop/test1
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''
=---------------------------
import
--connect
jdbc:mysql://node3:3306/test
--username
root
--password
123456
--query
'select p.id,name,age,c.card_id,date_format(c.create_date,'%Y-%m-%d') as c_date from t_person p join t_id_card c on p.id=c.p_id where p.age>17 and $CONDITIONS'
--target-dir
/sqoop/test2
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''