// 存储MAP
public static Map<String, IsliCodeLog> logMap = Collections
.synchronizedMap(new HashMap<String, IsliCodeLog>());
// 主题
public static String myTopic = SystemConfig.getString("kafka_log_topic");
// 服务编码
public static String scCode = SystemConfig.getString("code");
/**
* 1.日志直接推送给kafka服务器;2.若kafka宕机,则日志保存到数据库里
*
* @param log
* 日志实体
*/
public void pushLogToKafka() {
if (logLock.tryLock()) {
Producer<String, String> producer = null;
try {
if (logMap.size() > 0) {
producer = new KafkaUtil().getProducer();
// 使用Iterator遍历Map
for (Map.Entry<String, IsliCodeLog> entry : logMap
.entrySet()) {
if (null == entry) {
break;
}
LOGGER.info(" logMap.size=======>>>>>" + logMap.size());
IsliCodeLog log = entry.getValue();
if (null != log) {
log.setServiceCode(scCode);// 服务编码
ProducerRecord<String, String> record = new ProducerRecord<String, String>(
myTopic, log.getIsliCode(),
JsonUtils.toJson(log));
/**
* 发布消息(异步)
*
* 1.kafka服务器正常,日志直接发送给kafka
*
* 2.存在的问题是:kafka宕机,send()不抛异常,这种情况则日志插入到数据库
*
*/
producer.send(record,
new PushLogToKafkaCallback(entry));
}
}
}
} catch (Exception e) {
LOGGER.error("[ParseIsliCodeService.pushLogToKafka] error:"
+ e.getMessage());
} finally {
if (null != producer) {
producer.close();
}
logLock.unlock();
}
}
}
/**
* 日志消息推送给kafka回调函数类
*
* @author wuJH
*
*/
class PushLogToKafkaCallback implements Callback {
private Map.Entry<String, IsliCodeLog> entry;
PushLogToKafkaCallback(Map.Entry<String, IsliCodeLog> entry) {
this.entry = entry;
}
@Override
public void onCompletion(RecordMetadata metadata, Exception e) {
try {
if (null != e) {
parseIsliCodeLogDAO.insertIsliCodeLog(entry.getValue());
LOGGER.error("PushLogToKafkaCallback[onCompletion] error:"
+ e.getMessage());
}
} catch (Exception ex) {
LOGGER.error("PushLogToKafkaCallback[onCompletion] catch_error:"
+ ex.getMessage());
} finally {
String key = entry.getKey();
if (logMap.containsKey(key)) {
logMap.remove(entry.getKey());// 删除MAP
}
}
}
}
public static Map<String, IsliCodeLog> logMap = Collections
.synchronizedMap(new HashMap<String, IsliCodeLog>());
// 主题
public static String myTopic = SystemConfig.getString("kafka_log_topic");
// 服务编码
public static String scCode = SystemConfig.getString("code");
/**
* 1.日志直接推送给kafka服务器;2.若kafka宕机,则日志保存到数据库里
*
* @param log
* 日志实体
*/
public void pushLogToKafka() {
if (logLock.tryLock()) {
Producer<String, String> producer = null;
try {
if (logMap.size() > 0) {
producer = new KafkaUtil().getProducer();
// 使用Iterator遍历Map
for (Map.Entry<String, IsliCodeLog> entry : logMap
.entrySet()) {
if (null == entry) {
break;
}
LOGGER.info(" logMap.size=======>>>>>" + logMap.size());
IsliCodeLog log = entry.getValue();
if (null != log) {
log.setServiceCode(scCode);// 服务编码
ProducerRecord<String, String> record = new ProducerRecord<String, String>(
myTopic, log.getIsliCode(),
JsonUtils.toJson(log));
/**
* 发布消息(异步)
*
* 1.kafka服务器正常,日志直接发送给kafka
*
* 2.存在的问题是:kafka宕机,send()不抛异常,这种情况则日志插入到数据库
*
*/
producer.send(record,
new PushLogToKafkaCallback(entry));
}
}
}
} catch (Exception e) {
LOGGER.error("[ParseIsliCodeService.pushLogToKafka] error:"
+ e.getMessage());
} finally {
if (null != producer) {
producer.close();
}
logLock.unlock();
}
}
}
/**
* 日志消息推送给kafka回调函数类
*
* @author wuJH
*
*/
class PushLogToKafkaCallback implements Callback {
private Map.Entry<String, IsliCodeLog> entry;
PushLogToKafkaCallback(Map.Entry<String, IsliCodeLog> entry) {
this.entry = entry;
}
@Override
public void onCompletion(RecordMetadata metadata, Exception e) {
try {
if (null != e) {
parseIsliCodeLogDAO.insertIsliCodeLog(entry.getValue());
LOGGER.error("PushLogToKafkaCallback[onCompletion] error:"
+ e.getMessage());
}
} catch (Exception ex) {
LOGGER.error("PushLogToKafkaCallback[onCompletion] catch_error:"
+ ex.getMessage());
} finally {
String key = entry.getKey();
if (logMap.containsKey(key)) {
logMap.remove(entry.getKey());// 删除MAP
}
}
}
}