mysql通过jdbc导数据到hdfs_sqoop client java api将mysql的数据导到hdfs

package com.hadoop.recommend;

import org.apache.sqoop.client.SqoopClient;

import org.apache.sqoop.model.MDriverConfig;

import org.apache.sqoop.model.MFromConfig;

import org.apache.sqoop.model.MJob;

import org.apache.sqoop.model.MLink;

import org.apache.sqoop.model.MLinkConfig;

import org.apache.sqoop.model.MSubmission;

import org.apache.sqoop.model.MToConfig;

import org.apache.sqoop.submission.counter.Counter;

import org.apache.sqoop.submission.counter.CounterGroup;

import org.apache.sqoop.submission.counter.Counters;

import org.apache.sqoop.validation.Status;

public class MysqlToHDFS {

public static void main(String[] args) {

sqoopTransfer();

}

public static void sqoopTransfer() {

//初始化

String url = "http://master:12000/sqoop/";

SqoopClient client = new SqoopClient(url);

//创建一个源链接 JDBC

long fromConnectorId = 2;

MLink fromLink = client.createLink(fromConnectorId);

fromLink.setName("JDBC connector");

fromLink.setCreationUser("hadoop");

MLinkConfig fromLinkConfig = fromLink.getConnectorLinkConfig();

fromLinkConfig.getStringInput("linkConfig.connectionString").setValue("jdbc:mysql://master:3306/hive");

fromLinkConfig.getStringInput("linkConfig.jdbcDriver").setValue("com.mysql.jdbc.Driver");

fromLinkConfig.getStringInput("linkConfig.username").setValue("root");

fromLinkConfig.getStringInput("linkConfig.password").setValue("");

Status fromStatus = client.saveLink(fromLink);

if(fromStatus.canProceed()) {

System.out.println("创建JDBC Link成功,ID为: " + fromLink.getPersistenceId());

} else {

System.out.println("创建JDBC Link失败");

}

//创建一个目的地链接HDFS

long toConnectorId = 1;

MLink toLink = client.createLink(toConnectorId);

toLink.setName("HDFS connector");

toLink.setCreationUser("hadoop");

MLinkConfig toLinkConfig = toLink.getConnectorLinkConfig();

toLinkConfig.getStringInput("linkConfig.uri").setValue("hdfs://master:9000/");

Status toStatus = client.saveLink(toLink);

if(toStatus.canProceed()) {

System.out.println("创建HDFS Link成功,ID为: " + toLink.getPersistenceId());

} else {

System.out.println("创建HDFS Link失败");

}

//创建一个任务

long fromLinkId = fromLink.getPersistenceId();

long toLinkId = toLink.getPersistenceId();

MJob job = client.createJob(fromLinkId, toLinkId);

job.setName("MySQL to HDFS job");

job.setCreationUser("hadoop");

//设置源链接任务配置信息

MFromConfig fromJobConfig = job.getFromJobConfig();

fromJobConfig.getStringInput("fromJobConfig.schemaName").setValue("sqoop");

fromJobConfig.getStringInput("fromJobConfig.tableName").setValue("sqoop");

fromJobConfig.getStringInput("fromJobConfig.partitionColumn").setValue("id");

MToConfig toJobConfig = job.getToJobConfig();

toJobConfig.getStringInput("toJobConfig.outputDirectory").setValue("/user/hdfs/recommend");

MDriverConfig driverConfig = job.getDriverConfig();

driverConfig.getStringInput("throttlingConfig.numExtractors").setValue("3");

Status status = client.saveJob(job);

if(status.canProceed()) {

System.out.println("JOB创建成功,ID为: "+ job.getPersistenceId());

} else {

System.out.println("JOB创建失败。");

}

//启动任务

long jobId = job.getPersistenceId();

MSubmission submission = client.startJob(jobId);

System.out.println("JOB提交状态为 : " + submission.getStatus());

while(submission.getStatus().isRunning() && submission.getProgress() != -1) {

System.out.println("进度 : " + String.format("%.2f %%", submission.getProgress() * 100));

//三秒报告一次进度

try {

Thread.sleep(3000);

} catch (InterruptedException e) {

e.printStackTrace();

}

}

System.out.println("JOB执行结束... ...");

System.out.println("Hadoop任务ID为 :" + submission.getExternalId());

Counters counters = submission.getCounters();

if(counters != null) {

System.out.println("计数器:");

for(CounterGroup group : counters) {

System.out.print("\t");

System.out.println(group.getName());

for(Counter counter : group) {

System.out.print("\t\t");

System.out.print(counter.getName());

System.out.print(": ");

System.out.println(counter.getValue());

}

}

}

if(submission.getExceptionInfo() != null) {

System.out.println("JOB执行异常,异常信息为 : " +submission.getExceptionInfo());

}

System.out.println("MySQL通过sqoop传输数据到HDFS统计执行完毕");

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值