调用本地
1.本地执行ktr文件(转换)
/**
* 运行转换文件方法
* @param params 多个参数变量值
* @param ktrPath 转换文件的路径,后缀ktr
*/
public static void runTransfer() {
String ktrPath="E:\\kettle\\data-integration\\demo\\demo1.ktr";
Trans trans = null;
try {
// // 初始化
// 转换元对象
KettleEnvironment.init();// 初始化
EnvUtil.environmentInit();
TransMeta transMeta = new TransMeta(ktrPath);
// 转换
trans = new Trans(transMeta);
// 执行转换
trans.execute(null);
// 等待转换执行结束
trans.waitUntilFinished();
// 抛出异常
if (trans.getErrors() > 0) {
throw new Exception(
"There are errors during transformation exception!(传输过程中发生异常)");
}
} catch (Exception e) {
e.printStackTrace();
}
}
2.本地执行kjb(JOB)
/**
* java 调用 kettle 的job
*
* @param jobname
* 如: String fName= "D:\\kettle\\informix_to_am_4.ktr";
*/
public static void runJob(String[] params, String jobPath) {
try {
KettleEnvironment.init();
// jobname 是Job脚本的路径及名称
JobMeta jobMeta = new JobMeta(jobPath, null);
Job job = new Job(null, jobMeta);
// 向Job 脚本传递参数,脚本中获取参数值:${参数名}
// job.setVariable(paraname, paravalue);
job.setVariable("id", params[0]);
job.setVariable("content", params[1]);
job.setVariable("file", params[2]);
job.start();
job.waitUntilFinished();
if (job.getErrors() > 0) {
throw new Exception(
"There are errors during job exception!(执行job发生异常)");
}
} catch (Exception e) {
e.printStackTrace();
}
}
调用服务器
1.服务器执行ktr文件(转换)
/**
* 执行存储在数据库资源库中的转换
*
* @Description:
* @throws KettleException
* @author Jeffy
* @since:2016-9-23
*/
public static void executeTrans(String transName) throws KettleException {
//初始化kettle环境
KettleEnvironment.init();
//创建资源库对象,此时的对象还是一个空对象
KettleDatabaseRepository repository = new KettleDatabaseRepository();
//创建资源库数据库对象,类似我们在spoon里面创建资源库
// 数据库连接元对象(连接名称,不必与kettle中配置的保持一致:数据库类型:连接方式(kettle支持的连接方式):资源库IP:资源库实例名:资源库端口:资源库用户名:资源库用户密码)
DatabaseMeta dataMeta = new DatabaseMeta("192.168.0.250", "Mysql", "Native(JDBC)", "192.168.0.250", "kettle-test", "3306",
"root", "root");
//资源库元对象,名称参数,id参数,描述等可以随便定义
KettleDatabaseRepositoryMeta kettleDatabaseMeta =
new KettleDatabaseRepositoryMeta("ETL", "ETL", "ETL description",dataMeta);
//给资源库赋值
repository.init(kettleDatabaseMeta);
//连接资源库
repository.connect("admin","admin");
//根据变量查找到模型所在的目录对象,此步骤很重要。
RepositoryDirectoryInterface directory = repository.findDirectory("/test1");
//创建ktr元对象
TransMeta transMeta = ((Repository) repository).loadTransformation(transName, directory, null, true, null );
//创建ktr
Trans trans = new Trans(transMeta);
//执行ktr
trans.execute(null);
//等待执行完毕
trans.waitUntilFinished();
if(trans.getErrors()>0)
{
System.err.println("Transformation run Failure!");
}
else
{
System.out.println("Transformation run successfully!");
}
}
2.调用服务器的job
public static void executeJob(String jobName) throws KettleException {
try {
//初始化kettle环境
KettleEnvironment.init();
//创建资源库对象,此时的对象还是一个空对象
KettleDatabaseRepository repository = new KettleDatabaseRepository();
//创建资源库数据库对象,类似我们在spoon里面创建资源库
// 数据库连接元对象(连接名称,不必与kettle中配置的保持一致:数据库类型:连接方式(kettle支持的连接方式):资源库IP:资源库实例名:资源库端口:资源库用户名:资源库用户密码)
DatabaseMeta dataMeta = new DatabaseMeta("192.168.0.250", "Mysql", "Native(JDBC)", "192.168.0.250", "kettle-test", "3306",
"root", "root");
//资源库元对象,名称参数,id参数,描述等可以随便定义
KettleDatabaseRepositoryMeta kettleDatabaseMeta =
new KettleDatabaseRepositoryMeta("ETL", "ETL", "ETL description",dataMeta);
//给资源库赋值
repository.init(kettleDatabaseMeta);
//连接资源库
repository.connect("admin","admin");
//根据变量查找到模型所在的目录对象,此步骤很重要。
RepositoryDirectoryInterface directory = repository.findDirectory("/test1");
//创建job元对象
JobMeta jobMeta = ((Repository) repository).loadJob(jobName, directory, null, null ) ;
//创建job
Job job = new Job(repository,jobMeta);
//执行job
job.start();
//等待执行完毕
job.waitUntilFinished();
if (job.getErrors() > 0) {
throw new RuntimeException(
"There were errors during transformation execution.");
}
}catch(Exception e){
}
}