1.首先用kettle界面写好一个ktr脚本,如下图,在这里的kettle使用的是官方的7.1版本
2.在自己的项目里面添加如下依赖,前面几个可以通过maven依赖进来,后面有的jar可以在kettle的应用里面找到
<!-- kettle -->
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-vfs2</artifactId>
<version>2.0</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>27.0.1-jre</version>
</dependency>
<dependency>
<groupId>org.scannotation</groupId>
<artifactId>scannotation</artifactId>
<version>1.0.3</version>
</dependency>
<dependency>
<groupId>dom4j</groupId>
<artifactId>dom4j</artifactId>
</dependency>
<dependency>
<groupId>com.kettle</groupId>
<artifactId>pentaho-vfs-browser</artifactId>
<version>7.1.0.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/lib/pentaho-vfs-browser-7.1.0.0-
12.jar</systemPath>
</dependency>
<dependency>
<groupId>com.kettle</groupId>
<artifactId>kettle-engine</artifactId>
<version>7.1.0.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/lib/kettle-engine-7.1.0.0-12.jar</systemPath>
</dependency>
<dependency>
<groupId>com.kettle</groupId>
<artifactId>kettle-core</artifactId>
<version>7.1.0.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/lib/kettle-core-7.1.0.0-12.jar</systemPath>
</dependency>
<dependency>
<groupId>com.kettle</groupId>
<artifactId>metastore</artifactId>
<version>7.1.0.0</version>
<scope>system</scope>
<systemPath>${project.basedir}/lib/metastore-7.1.0.0-12.jar</systemPath>
</dependency>
3.编写java文件kettle.java,main方法执行成功!
import java.util.Map;
import java.util.UUID;
import org.pentaho.di.core.KettleEnvironment;
import org.pentaho.di.core.util.EnvUtil;
import org.pentaho.di.job.Job;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KettleQuartz {
protected static final Logger logger_info = LoggerFactory.getLogger(KettleQuartz.class);
public static void main(String[] args) {
runKettleTransfer(null, "C:\\dems\\kettle\\dems.ktr");
}
/**
* 执行作业
*
* @param initKettleParam
* @param kjbFilePath
* @return
*/
public static boolean runKettleJob(Map<String, String> initKettleParam, String kjbFilePath) {
String uuid = UUID.randomUUID().toString();
logger_info.info("ExecKettleUtil@runKettleJob:" + uuid + " {kjbFilePath:" + kjbFilePath + "}");
try {
KettleEnvironment.init();
// 初始化job路径
JobMeta jobMeta = new JobMeta(kjbFilePath, null);
Job job = new Job(null, jobMeta);
// 初始化job参数,脚本中获取参数值:${variableName}
if (initKettleParam != null) {
for (String variableName : initKettleParam.keySet()) {
job.setVariable(variableName, initKettleParam.get(variableName));
}
}
job.start();
job.waitUntilFinished();
if (job.getErrors() > 0) {
logger_info.info("ExecKettleUtil@runKettleJob:" + uuid + " 执行失败");
} else {
logger_info.info("ExecKettleUtil@runKettleJob:" + uuid + " 执行成功");
}
return true;
} catch (Exception e) {
logger_info.error("ExecKettleUtil@runKettleJob:" + uuid, e);
return false;
}
}
/**
* 执行转换
* @param initKettleParam
* @param ktrFilePath
* @return
*/
public static boolean runKettleTransfer(Map<String, String> initKettleParam, String ktrFilePath) {
Trans trans = null;
String uuid = UUID.randomUUID().toString();
logger_info.info("ExecKettleUtil@runKettleTransfer:" + uuid + " {ktrFilePath:" + ktrFilePath + "}");
try {
// 初始化
KettleEnvironment.init();
EnvUtil.environmentInit();
TransMeta transMeta = new TransMeta(ktrFilePath);
// 转换
trans = new Trans(transMeta);
// 初始化trans参数,脚本中获取参数值:${variableName}
if (initKettleParam != null) {
for (String variableName : initKettleParam.keySet()) {
trans.setVariable(variableName, initKettleParam.get(variableName));
}
}
// 执行转换
trans.execute(null);
// 等待转换执行结束
trans.waitUntilFinished();
if (trans.getErrors() > 0) {
logger_info.info("ExecKettleUtil@runKettleTransfer:" + uuid + " 执行失败");
} else {
logger_info.info("ExecKettleUtil@runKettleTransfer:" + uuid + " 执行成功");
}
return true;
} catch (Exception e) {
logger_info.error("ExecKettleUtil@runKettleTransfer:" + uuid, e);
return false;
}
}
}