import org.apache.hadoop.hive.ql.hooks.HookContext; //导入依赖的package包/类
@Override
public void run(final HookContext hookContext) throws Exception {
// clone to avoid concurrent access
try {
final HiveEventContext event = new HiveEventContext();
event.setInputs(hookContext.getInputs());
event.setOutputs(hookContext.getOutputs());
event.setHookType(hookContext.getHookType());
final UserGroupInformation ugi = hookContext.getUgi() == null ? Utils.getUGI() : hookContext.getUgi();
event.setUgi(ugi);
event.setUser(getUser(hookContext.getUserName(), hookContext.getUgi()));
event.setOperation(OPERATION_MAP.get(hookContext.getOperationName()));
event.setQueryId(hookContext.getQueryPlan().getQueryId());
event.setQueryStr(hookContext.getQueryPlan().getQueryStr());
event.setQueryStartTime(hookContext.getQueryPlan().getQueryStartTime());
event.setQueryType(hookContext.getQueryPlan().getQueryPlan().getQueryType());
event.setLineageInfo(hookContext.getLinfo());
if (executor == null) {
collect(event);
notifyAsPrivilegedAction(event);
} else {
executor.submit(new Runnable() {
@Override
public void run() {
try {
ugi.doAs(new PrivilegedExceptionAction() {
@Override
public Object run() throws Exception {
collect(event);
return event;
}
});
notifyAsPrivilegedAction(event);
} catch (Throwable e) {
LOG.error("Atlas hook failed due to error ", e);
}
}
});
}
} catch (Throwable t) {
LOG.error("Submitting to thread pool failed due to error ", t);
}
}