前言:
org.hibernate.event.spi包中有许多用于ORM映射对象的操作监听器,如:
PostLoadEventListener,PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener
我们可利用这些监听器达到当对象信息发生变化时,记录对应日志的业务需求;并结合kafka消息队列,实现异步非阻塞记录日志信息。
下面用一个实例来说明监听器用法(记录实体对象变更信息保存至ES中方便查询):
项目需要集成kafka:
<!--pom.xml添加kafka依赖-->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
kafka Producer:
1、kafka消息生产程序配置生产者信息:
# 配置文件添加kafka生产者配置
kafka:
bootstrap-servers: 10.25.193.113:9092,10.25.193.114:9092,10.25.193.116:9092
producer:
batch-size: 100
acks: 1
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
retries: 3
buffer-memory: 4194304
audit.log.topic: audit-log-topic
2、编写组件类实现PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener接口:
@Component @Slf4j
public class AuditLogListener implements PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener {
private static final long serialVersionUID = 8801230423811868063L;
private static final String OP_TYPE_INSERT = "insert";
private static final String OP_TYPE_UPDATE = "update";
private static final String OP_TYPE_DELETE = "delete";
@Value("${audit.log.topic}")
private String auditLogTopic;
@Autowired
private ObjectMapper objectMapper;
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
/**
* 实现PostInsertEventListener接口方法
*/
@Override
public void onPostInsert(PostInsertEvent event) {
Object object = event.getEntity();
if(requireLogEvent(object)) {
try {
LoggerEntity entity = createEntity(object, OP_TYPE_INSERT, event.getId(), null);
String message = objectMapper.writeValueAsString(entity);
if(log.isInfoEnabled()) {
log.info("send message to kafka: {}", message);
}
kafkaTemplate.send(auditLogTopic, message);
} catch (IOException e) {
log.error("process entity post insert error", e);
}
}
}
/**
* 实现PostUpdateEventListener接口方法
*/
@Override
public void onPostUpdate(PostUpdateEvent event) {
Object object = event.getEntity();
if(requireLogEvent(object)) {
try {
int[] dirtyProperties = event.getDirtyProperties();
Object[] oldState = event.getOldState();
Object[] state = event.getState();
String[] names = event.getPersister().getPropertyNames();
Map<String, Object> details = Maps.newHashMap();
for (int index : dirtyProperties) {
String name = names[index];
Object oldVal = oldState[index];
Object newVal = state[index];
details.put(name, new Object[] {oldVal, newVal});
}
details.put("entityId", event.getId());
LoggerEntity entity = createEntity(object, OP_TYPE_UPDATE, event.getId(), objectMapper.writeValueAsString(details));
String message = objectMapper.writeValueAsString(entity);
if(log.isInfoEnabled()) {
log.info("send message to kafka: {}", message);
}
kafkaTemplate.send(auditLogTopic, message);
} catch (Exception e) {
log.error("post update event error, entity id: " + event.getId(), e);
}
}
}
/**
* 实现PostDeleteEventListener接口方法
*/
@Override
public void onPostDelete(PostDeleteEvent event) {
Object object = event.getEntity();
if(requireLogEvent(object)) {
try {
LoggerEntity entity = createEntity(object, OP_TYPE_DELETE, event.getId(), null);
String message = objectMapper.writeValueAsString(entity);
if(log.isInfoEnabled()) {
log.info("send message to kafka: {}", message);
}
kafkaTemplate.send(auditLogTopic, message);
} catch (Exception e) {
log.error("process entity post delete error", e);
}
}
}
/**
* 结合NoAuditLog自定义注解判断对象是否需要记录日志
*/
private boolean requireLogEvent(Object object) {
NoAuditLog noAuditLog = AnnotationUtils.findAnnotation(object.getClass(), NoAuditLog.class);
return Objects.isNull(noAuditLog);
}
/**
* 构建日志对象信息,用于kafka发送消息体
*/
private LoggerEntity createEntity(Object o, String opType, Serializable id, String detail) {
LoggerEntity entity = new LoggerEntity();
// 为日志对象字段设置值
entity.set...
return entity;
}
}
/**
* 自定义注解,添加注解用于忽略对象信息日志保存
*/
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface NoAuditLog { }
3、注册监听:
import AuditLogListener;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
@Component
public class HibernateListenerInitializer {
@Autowired
private EntityManagerFactory entityManagerFactory;
@Autowired
private AuditLogListener auditLogListener;
@PostConstruct
public void initListener() {
SessionFactoryImplementor sessionFactory = entityManagerFactory.unwrap(SessionFactoryImplementor.class);
EventListenerRegistry registry = sessionFactory.getServiceRegistry().getService(EventListenerRegistry.class);
// post insert
registry.getEventListenerGroup(EventType.POST_INSERT).appendListener(auditLogListener);
// post update
registry.getEventListenerGroup(EventType.POST_UPDATE).appendListener(auditLogListener);
// post delete
registry.getEventListenerGroup(EventType.POST_DELETE).appendListener(auditLogListener);
}
}
kafka Consumer:
1、kafka消息消费程序配置消费者信息:
# 配置文件添加kafka消费者配置
kafka:
bootstrap-servers: 10.25.193.113:9092,10.25.193.114:9092,10.25.193.116:9092
consumer.group-id: group_id_dev
listener.concurrency: 6
consumer.auto-offset-reset: latest
# consumer.auto-commit-interval: 5000
consumer.enable-auto-commit: false
consumer.max-poll-records: 10
audit.log.topic: audit-log-topic
2、消费者程序编写消费“audit-log-topic”topic的程序:
@Component @Slf4j
public class AuditLogConsumer {
@Autowired
private ObjectMapper objectMapper;
// ElasticSearch Repository接口(自行选择日志保存数据源)
@Autowired
private EsLoggerRepository repository;
@KafkaListener(topics = {"${audit.log.topic}"})
public void consume(String message) {
if (log.isInfoEnabled()) {
log.info("received audit log message: {}", message);
}
LoggerEsEntity entity = null;
try {
entity = objectMapper.readValue(message, LoggerEsEntity.class);
} catch (IOException e) {
log.error("parse message error, message: " + message, e);
}
if (Objects.nonNull(entity)) {
String loggerId = UUID.randomUUID().toString();
entity.setLoggerId(loggerId);
log.info("save es log==> {}", entity.toString());
repository.save(entity);
}
}
}
上面这个例子很容易理解,总体流程为:监听器监听到对象信息发生变更->生产发送kafka消息->消费程序消费kafka消息并保存日志