sqoop导入导出测试

usage: sqoop COMMAND [ARGS]

Available commands:
  codegen            Generate code to interact with database records
  create-hive-table  Import a table definition into Hive
  eval               Evaluate a SQL statement and display the results
  export             Export an HDFS directory to a database table
  help               List available commands
  import             Import a table from a database to HDFS
  import-all-tables  Import tables from a database to HDFS
  import-mainframe   Import datasets from a mainframe server to HDFS
  job                Work with saved jobs
  list-databases     List available databases on a server
  list-tables        List available tables in a database
  merge              Merge results of incremental imports
  metastore          Run a standalone Sqoop metastore
  version            Display version information


mysql测试表 :
drop table test.test;
create table test.test
(
id bigint(20) NOT NULL AUTO_INCREMENT,
name varchar(50) DEFAULT NULL COMMENT '名称',
PRIMARY KEY (`id`),
) CHARSET = utf8mb4 COMMENT = '测试表';

insert into test.test(name) values ('one');
insert into test.test(name) values ('two');
insert into test.test(name) values ('three');


select * from test.test;
id	name
1	one
2	two
3	three

mysql->hdfs
/sqoop_tool/sqoop-1.4.7.bin__hadoop-2.6.0/bin/sqoop import \
-Dorg.apache.sqoop.splitter.allow_text_splitter=true \
--connect jdbc:mysql://XXXX:2883/test \
--username user \
--password 'passwd' \
--driver com.mysql.jdbc.Driver \
--fields-terminated-by '|' \
--null-string '\\N' \
--null-non-string '\\N' \
--hive-delims-replacement ' ' \
--outdir /data/sqoopcode \
--delete-target-dir \
--fetch-size 200 \
--map-column-hive ID=String,NAME=String \
--target-dir /user/hive/warehouse/bak.db/test/etl_date=20200920 \
--query "select replace(t.ID,'|',' '),replace(t.NAME,'|',' ') from test.test t where \$CONDITIONS" \
-m 1

hadoop fs -cat /user/hive/warehouse/bak.db/test/etl_date=20200920/*
1|one
2|two
3|three


hdfs->mysql

全量写入:
/sqoop_tool/sqoop-1.4.7.bin__hadoop-2.6.0/bin/sqoop eval \
-Dorg.apache.sqoop.splitter.allow_text_splitter=true \
--connect jdbc:mysql://XXXX:2883/test \
--username user \
--password 'passwd' \
--query "truncate table test.test"

/sqoop_tool/sqoop-1.4.7.bin__hadoop-2.6.0/bin/sqoop export \
-Dorg.apache.sqoop.splitter.allow_text_splitter=true \
--connect jdbc:mysql://XXXX:2883/test \
--username user \
--password 'passwd' \
--input-fields-terminated-by '|' \
--input-lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N' \
--outdir /data/sqoopcode \
--export-dir /user/hive/warehouse/bak.db/test/etl_date=20200920/* \
--table test \
--columns ID,NAME \
-m 1

增量写入:
hadoop fs -get /user/hive/warehouse/bak.db/test/etl_date=20200920/* ./

hadoop fs -put -f part-m-00000 /user/hive/warehouse/bak.db/test/etl_date=20200920/

hadoop fs -cat /user/hive/warehouse/bak.db/test/etl_date=20200920/*
1|oneone
4|four

select * from test.test;
id	name
1	oneone
2	two
3	three
4	four

oracle测试表 :
drop table cxpj.test2;
create table cxpj.test2
(
id int NOT NULL,
name varchar2(50) DEFAULT NULL ,
CONSTRAINT "pk_id" PRIMARY KEY (id)
) ;

insert into cxpj.test2
select '1','one' FROM dual
 union all
select '2','two' FROM dual
 union all
select '3','three' FROM dual

oracle->hdfs

/sqoop_tool/sqoop-1.4.7.bin__hadoop-2.6.0/bin/sqoop import \
-Dorg.apache.sqoop.splitter.allow_text_splitter=true \
--connect jdbc:oracle:thin:@\(DESCRIPTION=\(ADDRESS=\(PROTOCOL=TCP\)\(HOST=XXXX\)\(PORT=1521\)\)\(CONNECT_DATA=\(SERVER=DEDICATED\)\(SERVICE_NAME=TPISFXTDEV\)\)\) \
--username user \
--password 'passwd' \
--fields-terminated-by '|' \
--null-string '\\N' \
--null-non-string '\\N' \
--hive-delims-replacement ' ' \
--outdir /data/sqoopcode \
--delete-target-dir \
--fetch-size 200 \
--map-column-hive ID=String,NAME=String \
--target-dir /user/hive/warehouse/bak.db/test2/etl_date=20200920 \
--query "select replace(t.ID,'|',' '),replace(t.NAME,'|',' ') from cxpj.test2 t where \$CONDITIONS" \
-m 1


hadoop fs -cat /user/hive/warehouse/bak.db/test2/etl_date=20200920/*
1|one|1
2|two|2
3|three|3


hdfs->oracle
/sqoop_tool/sqoop-1.4.7.bin__hadoop-2.6.0/bin/sqoop export \
-Dorg.apache.sqoop.splitter.allow_text_splitter=true \
--connect jdbc:oracle:thin:@\(DESCRIPTION=\(ADDRESS=\(PROTOCOL=TCP\)\(HOST=XXXX\)\(PORT=1521\)\)\(CONNECT_DATA=\(SERVER=DEDICATED\)\(SERVICE_NAME=TPISFXTDEV\)\)\) \
--username user \
--password 'passwd' \
--input-fields-terminated-by '|' \
--input-lines-terminated-by '\n' \
--input-null-string '\\N' \
--input-null-non-string '\\N' \
--outdir /data/sqoopcode \
--update-mode allowinsert \
--update-key ID \
--export-dir /user/hive/warehouse/bak.db/test2/etl_date=20200920/* \
--table CXPJ.TEST2 \
--columns ID,NAME \
-m 1

  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值