1.sqoop2-shell执行shell脚本
vim /opt/temp/sqoop2_shell.sh
#!/bin/bash
#环境变量
source /opt/temp/bigdata_env
#安全认证
kinit -kt /opt/temp/user.keytab userName
/opt/sqoop2/sqoop2-shell /opt/temp/sqoop2_shell_job.sh
vim /opt/temp/sqoop2_shell_job.sh
#show create connection 参数
create connection -c 1 --help
#创建connection
create connection -c 1 -name oracle-connection --connector-connection-connectionString jdbc:oracle:thin:@192.168.0.1:1521:schemaName --connector-connection-jdbcDriver oracle.jdbc.driver.OracleDriver --connector-connection-username root --connector-connection-password root123
#删除connection
#delete connection --name oracle-connection
#show create job 参数
create job -t import -xn oracle_test --help
#创建job
create job -t import -xn oracle_test --connector-table-sql select * from schemaName.tableName where \${CONDITIONS} --connector-table-needPartition false --fields-terminated-by '\\\t' --framework-output-outputDirectory /user/hive/warehouse/ods.db/tableName --framework-output-storageType HDFS --framework-throttling-extractors 3 --framework-output-fileType TEXT_FILE --queue root.default --name oracle_job_tableName
#删除job
#delete job --name oracle_job_tableName
#启动job
start job -n oracle_test -s
或者单独执行某一条命令 -c
#show create connection 参数
/opt/sqoop2/sqoop2-shell -c "create connection -c 1 --help"
#创建connection
/opt/sqoop2/sqoop2-shell -c "create connection -c 1 -name oracle-connection --connector-connection-connectionString jdbc:oracle:thin:@192.168.0.1:1521:schemaName --connector-connection-jdbcDriver oracle.jdbc.driver.OracleDriver --connector-connection-username root --connector-connection-password root123"
#删除connection
#/opt/sqoop2/sqoop2-shell -c "delete connection --name oracle-connection"
2.loader-shell执行job
将1创建的job当做模板,批量串行执行table,所有参数值都可以动态传入shell脚本
vim loader-shell.sh
#!/bin/bash
#环境变量
source /opt/temp/bigdata_env
#安全认证
kinit -kt /opt/temp/user.keytab userName
/opt/sqoop2/shell-client/submit_job.sh -n "oracle_job_tableName" -u y -jobType import -connectorType rdb -sql "select * from schemaName.tableName where ${CONDITIONS}" -frameworkType hdfs -extractors 3 -outputDirectory "/user/hive/warehouse/ods.db/tableName"