java sqoop api_sqoop1.99.4 JAVA API操作

1 packageorg.admln.sqoopOperate;2

3 importorg.apache.sqoop.client.SqoopClient;4 importorg.apache.sqoop.model.MFromConfig;5 importorg.apache.sqoop.model.MJob;6 importorg.apache.sqoop.model.MLink;7 importorg.apache.sqoop.model.MLinkConfig;8 importorg.apache.sqoop.model.MSubmission;9 importorg.apache.sqoop.model.MToConfig;10 importorg.apache.sqoop.submission.counter.Counter;11 importorg.apache.sqoop.submission.counter.CounterGroup;12 importorg.apache.sqoop.submission.counter.Counters;13 importorg.apache.sqoop.validation.Status;14

15 public classHDFSToMysql {16 public static voidmain(String[] args) {17 sqoopTransfer();18 }19 public static voidsqoopTransfer() {20 //初始化

21 String url = "http://hadoop:12000/sqoop/";22 SqoopClient client = newSqoopClient(url);23

24 //创建一个源链接 HDFS

25 long fromConnectorId = 1;26 MLink fromLink =client.createLink(fromConnectorId);27 fromLink.setName("HDFS connector");28 fromLink.setCreationUser("admln");29 MLinkConfig fromLinkConfig =fromLink.getConnectorLinkConfig();30 fromLinkConfig.getStringInput("linkConfig.uri").setValue("hdfs://hadoop:8020/");31 Status fromStatus =client.saveLink(fromLink);32 if(fromStatus.canProceed()) {33 System.out.println("创建HDFS Link成功,ID为: " +fromLink.getPersistenceId());34 } else{35 System.out.println("创建HDFS Link失败");36 }37 //创建一个目的地链接 JDBC

38 long toConnectorId = 2;39 MLink toLink =client.createLink(toConnectorId);40 toLink.setName("JDBC connector");41 toLink.setCreationUser("admln");42 MLinkConfig toLinkConfig =toLink.getConnectorLinkConfig();43 toLinkConfig.getStringInput("linkConfig.connectionString").setValue("jdbc:mysql://hadoop:3306/hive");44 toLinkConfig.getStringInput("linkConfig.jdbcDriver").setValue("com.mysql.jdbc.Driver");45 toLinkConfig.getStringInput("linkConfig.username").setValue("hive");46 toLinkConfig.getStringInput("linkConfig.password").setValue("hive");47 Status toStatus =client.saveLink(toLink);48 if(toStatus.canProceed()) {49 System.out.println("创建JDBC Link成功,ID为: " +toLink.getPersistenceId());50 } else{51 System.out.println("创建JDBC Link失败");52 }53

54 //创建一个任务

55 long fromLinkId =fromLink.getPersistenceId();56 long toLinkId =toLink.getPersistenceId();57 MJob job =client.createJob(fromLinkId, toLinkId);58 job.setName("HDFS to MySQL job");59 job.setCreationUser("admln");60 //设置源链接任务配置信息

61 MFromConfig fromJobConfig =job.getFromJobConfig();62 fromJobConfig.getStringInput("fromJobConfig.inputDirectory").setValue("/out/aboutyunLog/HiveExport/ipstatistical/data");63

64 //创建目的地链接任务配置信息

65 MToConfig toJobConfig =job.getToJobConfig();66 toJobConfig.getStringInput("toJobConfig.schemaName").setValue("aboutyunlog");67 toJobConfig.getStringInput("toJobConfig.tableName").setValue("ipstatistical");68 //toJobConfig.getStringInput("fromJobConfig.partitionColumn").setValue("id");69 //set the driver config values70 //MDriverConfig driverConfig = job.getDriverConfig();71 //driverConfig.getStringInput("throttlingConfig.numExtractors").setValue("3");//这句还没弄明白

72 Status status =client.saveJob(job);73 if(status.canProceed()) {74 System.out.println("JOB创建成功,ID为: "+job.getPersistenceId());75 } else{76 System.out.println("JOB创建失败。");77 }78

79 //启动任务

80 long jobId =job.getPersistenceId();81 MSubmission submission =client.startJob(jobId);82 System.out.println("JOB提交状态为 : " +submission.getStatus());83 while(submission.getStatus().isRunning() && submission.getProgress() != -1) {84 System.out.println("进度 : " + String.format("%.2f %%", submission.getProgress() * 100));85 //三秒报告一次进度

86 try{87 Thread.sleep(3000);88 } catch(InterruptedException e) {89 e.printStackTrace();90 }91 }92 System.out.println("JOB执行结束... ...");93 System.out.println("Hadoop任务ID为 :" +submission.getExternalId());94 Counters counters =submission.getCounters();95 if(counters != null) {96 System.out.println("计数器:");97 for(CounterGroup group : counters) {98 System.out.print("\t");99 System.out.println(group.getName());100 for(Counter counter : group) {101 System.out.print("\t\t");102 System.out.print(counter.getName());103 System.out.print(": ");104 System.out.println(counter.getValue());105 }106 }107 }108 if(submission.getExceptionInfo() != null) {109 System.out.println("JOB执行异常,异常信息为 : " +submission.getExceptionInfo());110 }111 System.out.println("HDFS通过sqoop传输数据到MySQL统计执行完毕");112 }113 }

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值