使用Hibernate事件监听机制并结合kafka异步记录对象修改日志

前言

org.hibernate.event.spi包中有许多用于ORM映射对象的操作监听器,如:
PostLoadEventListener,PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener
我们可利用这些监听器达到当对象信息发生变化时,记录对应日志的业务需求;并结合kafka消息队列,实现异步非阻塞记录日志信息。

下面用一个实例来说明监听器用法(记录实体对象变更信息保存至ES中方便查询):

项目需要集成kafka:

<!--pom.xml添加kafka依赖-->
<dependency>
    <groupId>org.springframework.kafka</groupId>
    <artifactId>spring-kafka</artifactId>
</dependency>

kafka Producer:
1、kafka消息生产程序配置生产者信息:

# 配置文件添加kafka生产者配置
kafka:
  bootstrap-servers: 10.25.193.113:9092,10.25.193.114:9092,10.25.193.116:9092
  producer:
    batch-size: 100
    acks: 1
    key-serializer: org.apache.kafka.common.serialization.StringSerializer
    value-serializer: org.apache.kafka.common.serialization.StringSerializer
    retries: 3
    buffer-memory: 4194304
audit.log.topic: audit-log-topic    

2、编写组件类实现PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener接口:

@Component @Slf4j
public class AuditLogListener implements PostInsertEventListener, PostUpdateEventListener, PostDeleteEventListener {
    private static final long serialVersionUID = 8801230423811868063L;

    private static final String OP_TYPE_INSERT = "insert";
    private static final String OP_TYPE_UPDATE = "update";
    private static final String OP_TYPE_DELETE = "delete";

    @Value("${audit.log.topic}")
    private String auditLogTopic;

    @Autowired
    private ObjectMapper objectMapper;
    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;
	
	/**
 	 * 实现PostInsertEventListener接口方法
 	 */
	@Override
    public void onPostInsert(PostInsertEvent event) {
        Object object = event.getEntity();
        if(requireLogEvent(object)) {
            try {
                LoggerEntity entity = createEntity(object, OP_TYPE_INSERT, event.getId(), null);

                String message = objectMapper.writeValueAsString(entity);
                if(log.isInfoEnabled()) {
                    log.info("send message to kafka: {}", message);
                }
                kafkaTemplate.send(auditLogTopic, message);
            } catch (IOException e) {
                log.error("process entity post insert error", e);
            }
        }
    }
    
    /**
 	 * 实现PostUpdateEventListener接口方法
 	 */
    @Override
    public void onPostUpdate(PostUpdateEvent event) {
        Object object = event.getEntity();

        if(requireLogEvent(object)) {
            try {
                int[] dirtyProperties = event.getDirtyProperties();
                Object[] oldState = event.getOldState();
                Object[] state = event.getState();
                String[] names = event.getPersister().getPropertyNames();

                Map<String, Object> details = Maps.newHashMap();
                for (int index : dirtyProperties) {
                    String name = names[index];

                    Object oldVal = oldState[index];
                    Object newVal = state[index];

                    details.put(name, new Object[] {oldVal, newVal});
                }
                details.put("entityId", event.getId());

                LoggerEntity entity = createEntity(object, OP_TYPE_UPDATE, event.getId(), objectMapper.writeValueAsString(details));

                String message = objectMapper.writeValueAsString(entity);
                if(log.isInfoEnabled()) {
                    log.info("send message to kafka: {}", message);
                }
                kafkaTemplate.send(auditLogTopic, message);
            } catch (Exception e) {
                log.error("post update event error, entity id: " + event.getId(), e);
            }
        }
    }
    
    /**
 	 * 实现PostDeleteEventListener接口方法
 	 */
    @Override
    public void onPostDelete(PostDeleteEvent event) {
        Object object = event.getEntity();
        if(requireLogEvent(object)) {
            try {
                LoggerEntity entity = createEntity(object, OP_TYPE_DELETE, event.getId(), null);

                String message = objectMapper.writeValueAsString(entity);
                if(log.isInfoEnabled()) {
                    log.info("send message to kafka: {}", message);
                }
                kafkaTemplate.send(auditLogTopic, message);
            } catch (Exception e) {
                log.error("process entity post delete error", e);
            }
        }
    }

	/**
 	 * 结合NoAuditLog自定义注解判断对象是否需要记录日志
 	 */
    private boolean requireLogEvent(Object object) {
        NoAuditLog noAuditLog = AnnotationUtils.findAnnotation(object.getClass(), NoAuditLog.class);
        return Objects.isNull(noAuditLog);
    }

	/**
 	 * 构建日志对象信息,用于kafka发送消息体
 	 */
    private LoggerEntity createEntity(Object o, String opType, Serializable id, String detail) {
        LoggerEntity entity = new LoggerEntity();
        // 为日志对象字段设置值
		entity.set...
        return entity;
    }
}
/**
 * 自定义注解,添加注解用于忽略对象信息日志保存
 */
@Target({ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
public @interface NoAuditLog { }

3、注册监听:

import AuditLogListener;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.event.service.spi.EventListenerRegistry;
import org.hibernate.event.spi.EventType;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;

@Component
public class HibernateListenerInitializer {

    @Autowired
    private EntityManagerFactory entityManagerFactory;
    @Autowired
    private AuditLogListener auditLogListener;

    @PostConstruct
    public void initListener() {
        SessionFactoryImplementor sessionFactory = entityManagerFactory.unwrap(SessionFactoryImplementor.class);
        EventListenerRegistry registry = sessionFactory.getServiceRegistry().getService(EventListenerRegistry.class);

        // post insert
        registry.getEventListenerGroup(EventType.POST_INSERT).appendListener(auditLogListener);
        // post update
        registry.getEventListenerGroup(EventType.POST_UPDATE).appendListener(auditLogListener);
        // post delete
        registry.getEventListenerGroup(EventType.POST_DELETE).appendListener(auditLogListener);
    }
}

kafka Consumer:
1、kafka消息消费程序配置消费者信息:

# 配置文件添加kafka消费者配置
kafka:
  bootstrap-servers: 10.25.193.113:9092,10.25.193.114:9092,10.25.193.116:9092
  consumer.group-id: group_id_dev
  listener.concurrency: 6
  consumer.auto-offset-reset: latest
  # consumer.auto-commit-interval: 5000
  consumer.enable-auto-commit: false
  consumer.max-poll-records: 10
audit.log.topic: audit-log-topic

2、消费者程序编写消费“audit-log-topic”topic的程序:

@Component @Slf4j
public class AuditLogConsumer {

    @Autowired
    private ObjectMapper objectMapper;
    // ElasticSearch Repository接口(自行选择日志保存数据源)
    @Autowired
    private EsLoggerRepository repository;

    @KafkaListener(topics = {"${audit.log.topic}"})
    public void consume(String message) {
        if (log.isInfoEnabled()) {
            log.info("received audit log message: {}", message);
        }

        LoggerEsEntity entity = null;
        try {
            entity = objectMapper.readValue(message, LoggerEsEntity.class);
        } catch (IOException e) {
            log.error("parse message error, message: " + message, e);
        }

        if (Objects.nonNull(entity)) {
            String loggerId = UUID.randomUUID().toString();
            entity.setLoggerId(loggerId);
            log.info("save es log==> {}", entity.toString());
            repository.save(entity);
        }
    }
}

上面这个例子很容易理解,总体流程为:监听器监听到对象信息发生变更->生产发送kafka消息->消费程序消费kafka消息并保存日志

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值