hi,
发现没有记录Job的id到,Trans的日志表中。
Job在运行时,会将Job运行时的Id,传递给Trans,见代码
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException{
....
// Create the transformation from meta-data
//
Trans trans = new Trans(transMeta);
// Pass the socket repository as early as possible...
//
trans.setSocketRepository(parentJob.getSocketRepository());
if (parentJob.getJobMeta().isBatchIdPassed())
{
trans.setPassedBatchId(parentJob.getPassedBatchId());
}
但写日志表根据没有这个栏位。
在执行Trans类的 public boolean endProcessing(String status) 方法时,写日志表。中间部分代码
if (!Const.isEmpty(transMeta.getLogTable())) {
ldb.writeLogRecord(transMeta.getLogTable(), transMeta.isBatchIdUsed(), getBatchId(),
false, transMeta.getName(), status, result.getNrLinesRead(), result
.getNrLinesWritten(), result.getNrLinesUpdated(),
result.getNrLinesInput() + result.getNrFilesRetrieved(), result
.getNrLinesOutput(), result.getNrErrors(), startDate, endDate,
logDate, depDate, currentDate, log_string);
}
org.pentaho.di.core.database.Database类的
public void writeLogRecord(String logtable, boolean use_id, long id,
boolean job, String name, String status, long read, long written,
long updated, long input, long output, long errors,
java.util.Date startdate, java.util.Date enddate,
java.util.Date logdate, java.util.Date depdate,
java.util.Date replayDate, String log_string)
throws KettleDatabaseException {
checkConnection();
boolean update = use_id && log_string != null
&& !status.equalsIgnoreCase(LOG_STATUS_START);
RowMetaInterface rowMeta;
if (job) {
rowMeta = getJobLogrecordFields(update, use_id, !Const
.isEmpty(log_string));
} else {
rowMeta = getTransLogrecordFields(update, use_id, !Const
.isEmpty(log_string));
}
if (update) {
String sql = "UPDATE " + logtable + " SET ";
for (int i = 0; i < rowMeta.size() - 1; i++) // Without ID_JOB or
// ID_BATCH
{
ValueMetaInterface valueMeta = rowMeta.getValueMeta(i);
if (i > 0) {
sql += ", ";
}
sql += databaseMeta.quoteField(valueMeta.getName()) + "=? ";
}
sql += "WHERE ";
if (job) {
sql += databaseMeta.quoteField("ID_JOB") + "=? ";
} else {
sql += databaseMeta.quoteField("ID_BATCH") + "=? ";
}
Object[] data = new Object[] { status, Long.valueOf(read),
Long.valueOf(written), Long.valueOf(updated),
Long.valueOf(input), Long.valueOf(output),
Long.valueOf(errors), startdate, enddate, logdate, depdate,
replayDate, log_string, Long.valueOf(id), };
execStatement(sql, rowMeta, data);
} else {
String sql = "INSERT INTO " + logtable + " ( ";
for (int i = 0; i < rowMeta.size(); i++) {
ValueMetaInterface valueMeta = rowMeta.getValueMeta(i);
if (i > 0)
sql += ", ";
sql += databaseMeta.quoteField(valueMeta.getName());
}
sql += ") VALUES(";
for (int i = 0; i < rowMeta.size(); i++) {
if (i > 0)
sql += ", ";
sql += "?";
}
sql += ")";
try {
pstmt = connection.prepareStatement(databaseMeta.stripCR(sql));
List<Object> data = new ArrayList<Object>();
if (job) {
if (use_id) {
data.add(Long.valueOf(id));
}
data.add(name);
} else {
if (use_id) {
data.add(Long.valueOf(id));
}
data.add(name);
}
data.add(status);
data.add(Long.valueOf(read));
data.add(Long.valueOf(written));
data.add(Long.valueOf(updated));
data.add(Long.valueOf(input));
data.add(Long.valueOf(output));
data.add(Long.valueOf(errors));
data.add(startdate);
data.add(enddate);
data.add(logdate);
data.add(depdate);
data.add(replayDate);
if (!Const.isEmpty(log_string)) {
data.add(log_string);
}
setValues(rowMeta, data.toArray(new Object[data.size()]));
pstmt.executeUpdate();
pstmt.close();
pstmt = null;
} catch (SQLException ex) {
throw new KettleDatabaseException(
"Unable to write log record to log table " + logtable,
ex);
}
}
}