spring启动过程中动态从数据库中加载KafkaListener方式

背景

因为我们的业务场景是需要不断接入kafka消费的,为了避免不改动代码,做到灵活配置的目的,将topic存储到数据库中,启动过程中加载这些topic,所以启动过程中需要采用字节码生成class,然后class在实例化一个对象,返回给spring,交给spring管理。

KafkaListenerAnnotationBeanPostProcessor实现了BeanPostProcessor接口,并重写了postProcessAfterInitialization接口,当我们的bean交给sparing管理的时候,就会命中这个接口,进行@KafkaListener 注解解析,并启动监听topic

三个字节码工具

  1. asm 易用性不高

  2. javassist,javassit不支持泛型,

  3. bytebuddy 我们采用这个,因为@KafkaListener注解的方法需要接受一个泛型的参数,

    public void test(List<ConsumerRecord<String, String>> records, Acknowledgment ack) {
           doSome()
    }

bytebuddy 泛型文档

butebuddy泛型操作文档连接https://www.jianshu.com/p/628b7128904a https://www.jianshu.com/p/89050a744178

bytebuddy返回值 https://blog.csdn.net/CaptHua/article/details/123195024

添加注解 https://www.jianshu.com/p/e07d1f84b8d7

Bytebuddy代码实操

@Configuration
public class BinlogKafkaConsumerConfig { 
    // 这里定义一些常量,关于类名,方法名
    public final static String GEN_CLASS_NAME = "com.consumer.BinlogByGenCodeConsumer";
    public final static String METHOD_GET_ID_KEY_DATA_REQ_VO_MAP = "getIdKeyDataReqVOMap";
    public final static String SUPER_METHOD_GET_ID_KEY_DATA_REQ_VO_MAP = "getIdKeyDataReqVOMapByBinlog";
    public final static String SUPER_METHOD_RECEIVE_COMMON_MSG_BY_GEN_CODE = "receiveCommonMsgByGenCode";
    public final static String METHOD_FORMAT_GET_ID_KEY_DATA_REQ_VO_MAP = "receiveMsgByGenCode%s";


    @Bean
    public Object binlogKafkaConsumerGenObj() {
        //启动随系统注册数据源
        try {
            return doRun();
        } catch (Exception e) {
            AssertUtils.throwError(e.getMessage());
        }
    }

    public BaseDataConsumer doRun() throws Exception {
        // 查出数据库中维护的topic信息
        List<Topic> list= dao.list();
        if (CollectionUtils.isEmpty(list)) {
            return null;
        }
        // 这里我们生成一个泛型的参数
        TypeDescription.Generic listConsumerRecordGeneric = TypeDescription.Generic.Builder
                .parameterizedType(TypeDescription.ForLoadedType.of(List.class), TypeDescription.Generic.Builder
                        .parameterizedType(ConsumerRecord.class, String.class, String.class).build()).build();
        // 继承一个父类,用于将本类 的方法转调父类                        
        DynamicType.Builder<BaseDataConsumer> definition = new ByteBuddy()
                .subclass(BaseConsumer.class)
                .name(GEN_CLASS_NAME)
                // 定义一个方法,当调用这个方法时转调父类方法
                .method(named(METHOD_GET_ID_KEY_DATA_REQ_VO_MAP))
                .intercept(MethodCall.invoke(named(SUPER_METHOD_GET_ID_KEY_DATA_REQ_VO_MAP)).withArgument(0, 1));
        int i = 0;
        for (Topic topic: list) {
            // 循环定义一个方法名字,拼接i来实现不同方法名
            String methodName = String.format(METHOD_FORMAT_GET_ID_KEY_DATA_REQ_VO_MAP, topic.getId() + i);
            //    定义方法是无返回值的,并且是public公共的
            definition = definition.defineMethod(methodName, void.class, Modifier.PUBLIC)
                    //    声明参数,一个是我们定义的泛型,批量接收消息内容的,一个是kafka需要ack的对象
                    .withParameters(listConsumerRecordGeneric, TypeDescription.ForLoadedType.of(Acknowledgment.class))
                    // 转调 父类方法,因为@KafkaListener的方法固定两个入参都是kafka给我们的,我们转调父类方法需要三个参数
                    // 前两个和本方法参数一致,我们直接通过索引 0 和 1 直接传给父类,第三个需要传递一个固定值,我们声明一个常量传递过去
                    .intercept(MethodCall.invoke(named(SUPER_METHOD_RECEIVE_COMMON_MSG_BY_GEN_CODE)).with(
                            new MethodCall.ArgumentLoader.ForMethodParameter.Factory(0),
                            new MethodCall.ArgumentLoader.ForMethodParameter.Factory(1),
                            new MethodCall.ArgumentLoader.ForStackManipulation(systemIdConstantValue.toStackManipulation(), systemIdConstantValue.getTypeDescription()))
                    )
                    // 这里我们定义注解,注解类通过方法getKafkaListenerParam获取
                    .visit(new MemberAttributeExtension.ForMethod()
                            .annotateMethod(getKafkaListenerParam(fcpTopicConfigInfo))
                            .on(named(methodName)));
            i++;
        }
        //
        try (DynamicType.Unloaded<BaseDataConsumer> make = definition.make()) {
            DynamicType.Loaded<BaseDataConsumer> load = make.load(getClass().getClassLoader());
            if (SystemUtils.isWindows() && StrUtil.isNotBlank(gitLabConfig.getByteBuddyGenPath())) {
                // 保存到当前目录
                File file = new File(gitLabConfig.getByteBuddyGenPath());
                // 通过dynamicType 将字节码保存到文件
                load.saveIn(file);
            }
            BaseDataConsumer baseDataConsumer = load.getLoaded().getDeclaredConstructor().newInstance();
            log.info("加载topic完毕,本次共加载size:{}", fcpTopicConfigInfoList.size());
            return baseDataConsumer;
        }
    }

    public AnnotationDescription getKafkaListenerParam(FcpTopicConfigInfo fcpTopicConfigInfo) {
        String binlogId = fcpTopicConfigInfo.getBinlogId();
        String topics = fcpTopicConfigInfo.getTopics();
        String groupId = fcpTopicConfigInfo.getGroupId();
        AssertUtils.isTrue(Objects.nonNull(fcpTopicConfigInfo.getAutoStartup()), "autoStartup 不能为空");
        boolean autoStartup = BooleanUtil.toBoolean(fcpTopicConfigInfo.getAutoStartup().toString());
        int concurrency = fcpTopicConfigInfo.getPartitionNum() / gitLabConfig.getMachineNum();
        if (concurrency < 1) {
            concurrency = 1;
        }
        String colonyName = fcpTopicConfigInfo.getColonyName();
        AssertUtils.isTrue(StrUtil.isNotBlank(binlogId), "binlogId 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(topics), "topics 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(groupId), "groupId 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(colonyName), "colonyName 不能为空");
        log.info("kafka配置topic:{},concurrency:{},binlogId:{},colonyName:{}", fcpTopicConfigInfo.getTopics(), concurrency, binlogId, colonyName);
        return AnnotationDescription
                .Builder.ofType(KafkaListener.class)
                .define("id", binlogId)
                .defineArray("topics", topics.split(","))
                .define("groupId", groupId)
                .define("concurrency", String.valueOf(concurrency))
                .define("autoStartup", String.valueOf(autoStartup))
                .define("containerFactory", colonyName + "KafkaListenerContainerFactory")
                .build();
    }
}

Javassist

目前的缺陷就是方法参数不能是泛型,导致kafka接受消息,List<comsuer<String,String>>报错

@Configuration
public class BinlogKafkaConsumerConfig {

    // 获取类池:生成class的
    static ClassPool pool = ClassPool.getDefault();
    // 制造类(需要告诉javassist类名)
    static CtClass ctClass;


    @Autowired
    private FcpTopicConfigInfoDao fcpTopicConfigInfoDao;

    static {
        try {
            pool.importPackage("com.common.enums.SyncTypeEnum");
            ctClass = pool.get("com.consumer.BinlogByGenCodeConsumer");
        } catch (Exception e) {
            log.warn("获取类失败", e);
        }
    }

    @Bean
    public Object binlogKafkaConsumerGenObj() {
        //启动随系统注册数据源
        log.info("开始加载表中所有配置信息");
        //try {
        //    doRun();
        //    Class<?> aClass = ctClass.toClass();
        //    return aClass.newInstance();
        //} catch (Exception e) {
        //    log.warn("加载表中所有配置信息,报错", e);
        //    AssertUtils.throwError(e.getMessage());
        //}
        return new Object();
    }

    public void doRun() throws Exception {
        List<FcpTopicConfigInfo> fcpTopicConfigInfoList = fcpTopicConfigInfoDao.listAllValidTopicConfigInfo();
        if (CollectionUtils.isEmpty(fcpTopicConfigInfoList)) {
            log.info("没有要加载的配置信息,return");
            return;
        }
        log.info("开始加载topic,本次共需加载size:{}", fcpTopicConfigInfoList.size());
        for (FcpTopicConfigInfo fcpTopicConfigInfo : fcpTopicConfigInfoList) {
            log.info("开始加载topic,fcpTopicConfigInfo:{}", fcpTopicConfigInfo);
            generateAddMethod(fcpTopicConfigInfo.getBinlogId(),
                    fcpTopicConfigInfo.getTopic(),
                    fcpTopicConfigInfo.getGroupId(),
                    fcpTopicConfigInfo.getConcurrency(),
                    BooleanUtil.toBoolean(fcpTopicConfigInfo.getAutoStartup().toString()),
                    fcpTopicConfigInfo.getColonyName());
        }
        log.info("加载topic完毕,本次共加载size:{}", fcpTopicConfigInfoList.size());
    }

    public static void main(String[] args) throws Exception {
        generateAddMethod("id1", "1", "1", 1, false, "1");
    }

    public static void generateAddMethod(
            String binlogId,
            String topics,
            String groupId,
            Integer concurrency,
            Boolean autoStartup,
            String colonyName
    ) throws Exception {
        AssertUtils.isTrue(StrUtil.isNotBlank(binlogId), "binlogId 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(topics), "topics 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(groupId), "groupId 不能为空");
        AssertUtils.isTrue(Objects.nonNull(concurrency), "concurrency 不能为空");
        AssertUtils.isTrue(Objects.nonNull(autoStartup), "autoStartup 不能为空");
        AssertUtils.isTrue(StrUtil.isNotBlank(colonyName), "colonyName 不能为空");

        String methodName = "receive";
        CtMethod declaredMethod = ctClass.getDeclaredMethod(methodName);
        ConstPool cp = declaredMethod.getMethodInfo().getConstPool();
        ClassMap classMap = new ClassMap();
        CtMethod copy = CtNewMethod.copy(declaredMethod, ctClass, classMap);
        copy.setName(methodName + binlogId);

        String body = "{" +
                "        log.info(\"========== BinlogDataConsumer ==========拉取数: {}\", String.valueOf($1.size()));" +
                "        long start = System.currentTimeMillis();" +
                "        this.dealKafkaData($1, SyncTypeEnum.BINLOG.getCode(), String.valueOf(\"" + binlogId + "\"));" +
                "        $2.acknowledge();" +
                "        long costTime = System.currentTimeMillis() - start;" +
                "        log.info(\"消费时间: {} ms, 平均每笔耗时: {} ms\", String.valueOf(costTime), String.valueOf(costTime / $1.size()));   " +
                "  }";
        copy.setBody(body);
        MethodInfo methodInfo = copy.getMethodInfo();
        // 方法添加注解
        AnnotationsAttribute methodAnnotationsAttribute = new AnnotationsAttribute(cp, AnnotationsAttribute.visibleTag);
        Annotation methodAnnotation = new Annotation("org/springframework/kafka/annotation/KafkaListener", cp);
        methodAnnotation.addMemberValue("id", new StringMemberValue(binlogId, cp));
        methodAnnotation.addMemberValue("topics", getTopicArrayMemberValue(topics, cp));
        methodAnnotation.addMemberValue("groupId", new StringMemberValue(groupId, cp));
        methodAnnotation.addMemberValue("concurrency", new StringMemberValue(concurrency.toString(), cp));
        methodAnnotation.addMemberValue("autoStartup", new StringMemberValue(autoStartup.toString(), cp));
        methodAnnotation.addMemberValue("containerFactory", new StringMemberValue(colonyName + "KafkaListenerContainerFactory", cp));
        methodAnnotationsAttribute.addAnnotation(methodAnnotation);
        methodInfo.addAttribute(methodAnnotationsAttribute);
        // 将方法添加到类中
        ctClass.addMethod(copy);
        ctClass.writeFile("D:\\GitProject\\devops\\consumer\\src\\main\\java\\com\\consumer\\runner");
    }

    private static ArrayMemberValue getTopicArrayMemberValue(String topics, ConstPool cp) {
        ArrayMemberValue arrayMemberValue = new ArrayMemberValue(cp);
        String[] topicArray = topics.split(",");
        MemberValue[] memberValue = new MemberValue[topicArray.length];
        for (int i = 0; i < topicArray.length; i++) {
            memberValue[i] = new StringMemberValue(topicArray[i], cp);
        }
        arrayMemberValue.setValue(memberValue);
        return arrayMemberValue;
    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值