sqoop import \
-Dmapreduce.job.queuename=root.mda \
--connect 'jdbc:jtds:sqlserver://SGH1PDWBIDBV06.cn.yumchina.com:55650/SQL2008_DWBI06;domain=YumChina;databasename=DWBI2_Delivery_PHHS;sendStringParametersAsUnicode=false' \
--username serv-bda-prd \
--password "%h\wd\4+" \
--query " select min(date_key) as mindate,max(date_key) as maxdate,count(*) as total from raw.t_fact_tld_header_source where \$CONDITIONS " \
--mapreduce-job-name sqoop_import_raw_t_fact_tld_header_source \
--delete-target-dir \
--target-dir "/apps/hive/warehouse/staging/ph/raw_t_fact_tld_header_source" \
--null-non-string '\\N' --null-string '\\N' --fields-terminated-by '\034' \
--hive-drop-import-delims \
-m 1
SQOOP将数据上传至HDFS
最新推荐文章于 2022-12-02 15:16:48 发布