对现有动态数据源的优化

前几天其他组在使用动态数据源时因为实现的问题,在方法A中调用方法B两个方法使用不同的动态数据源,当方法B执行完成后没有恢复方法A的数据源而是使用了默认的动态数据源,通过排查发现数据源存在ThreadLoal中,当ThreadLocal为空时就会使用默认数据源,而清空方法在AOP中方法调用结束后。所以A回到了默认方法。同事解决方案是使用一个新的线程执行B但是我觉得这不太好。所以自己使用stack实现了一个。思路很简单,在ThreadLocal中存放一个stack,当遇到动态数据源时就添加。方法执行结束后对stack进行删除。上代码

配置类

package com.cheche365.dictonary.datatrans.datatrans.config;

import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;

import java.util.ArrayList;
import java.util.List;

@Component
@ConfigurationProperties(prefix = "spring.datasource.druid.dy")
public class DataSourceConfigs {

    private List<DataSourceConfig> thirdDatasources = new ArrayList<>();

    public List<DataSourceConfig> getThirdDatasources() {
        return thirdDatasources;
    }

    public void setThirdDatasources(List<DataSourceConfig> thirdDatasources) {
        this.thirdDatasources = thirdDatasources;
    }

    public static  class DataSourceConfig{
        private String username;
        private String password;
        private String url;
        private String connectionProperties;
        private String filters;
        private String dataSourceName;
        private Integer clientId;
        private String driverClassName;

        public String getDriverClassName() {
            return driverClassName;
        }

        public void setDriverClassName(String driverClassName) {
            this.driverClassName = driverClassName;
        }

        public String getUsername() {
            return username;
        }

        public void setUsername(String username) {
            this.username = username;
        }

        public String getPassword() {
            return password;
        }

        public void setPassword(String password) {
            this.password = password;
        }

        public String getUrl() {
            return url;
        }

        public void setUrl(String url) {
            this.url = url;
        }

        public String getConnectionProperties() {
            return connectionProperties;
        }

        public void setConnectionProperties(String connectionProperties) {
            this.connectionProperties = connectionProperties;
        }

        public String getFilters() {
            return filters;
        }

        public void setFilters(String filters) {
            this.filters = filters;
        }

        public String getDataSourceName() {
            return dataSourceName;
        }

        public void setDataSourceName(String dataSourceName) {
            this.dataSourceName = dataSourceName;
        }

        public Integer getClientId() {
            return clientId;
        }

        public void setClientId(Integer clientId) {
            this.clientId = clientId;
        }
    }
}

DataSourceHolder

package com.cheche365.dictonary.datatrans.datatrans.config;

import lombok.extern.slf4j.Slf4j;

import javax.sql.DataSource;
import java.util.Stack;

/**
 * @author sunyan
 * @date 2022/11/21 15:03
 * @description
 */
@Slf4j
public class DataSourceHolder {
    private static final ThreadLocal<Stack<String>> DATA_SOURCE_HOLDER = new ThreadLocal<>();

    public static synchronized void setDataSource(String dataSource) {
        if(DATA_SOURCE_HOLDER.get() == null){
            DATA_SOURCE_HOLDER.set(new Stack<>());
        }
       DATA_SOURCE_HOLDER.get().push(dataSource);
    }

    public static synchronized String getDataSource(){
         return DATA_SOURCE_HOLDER.get() == null || DATA_SOURCE_HOLDER.get().empty()? null : DATA_SOURCE_HOLDER.get().pop();
    }

    public static synchronized void  clearDataSource(){
        if(DATA_SOURCE_HOLDER.get().size() == 0){
            log.info("当前数据源栈中为空,删除ThreadLocal");
            DATA_SOURCE_HOLDER.remove();
        }
    }
}

package com.cheche365.dictonary.datatrans.datatrans.config;

import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.cheche365.dictonary.datatrans.datatrans.service.QueryService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.KStream;
import org.apache.kafka.streams.kstream.KeyValueMapper;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.beans.factory.UnsatisfiedDependencyException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.source.InvalidConfigurationPropertyValueException;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.Profile;
import org.springframework.core.env.Environment;
import org.springframework.data.jdbc.repository.config.DialectResolver;
import org.springframework.data.relational.core.dialect.Dialect;
import org.springframework.data.relational.core.dialect.MySqlDialect;
import org.springframework.data.relational.core.sql.IdentifierProcessing;
import org.springframework.jdbc.core.ConnectionCallback;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.config.StreamsBuilderFactoryBean;
import org.springframework.kafka.config.StreamsBuilderFactoryBeanCustomizer;
import org.springframework.util.StringUtils;

import javax.sql.DataSource;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;


/**
 * @author sunyan
 * @date 2022/11/15 11:16
 * @description
 */
@Configuration
@EnableKafkaStreams
@Slf4j
public class DefaultConfig {
    @Bean
    DataSource defaultDataSource(){
        DruidDataSource defaultDataSource = DruidDataSourceBuilder.create().build();

        return defaultDataSource;
    }


    @Bean("targetDataSources")
    Map<Object, Object> targetDataSources(DataSourceConfigs dataSourceConfigs){
        Map<Object, Object> currentMap = new ConcurrentHashMap<>();
        dataSourceConfigs.getThirdDatasources().forEach(dataSourceConfig->{
            DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
            dataSource.setDbType("mysql");
            dataSource.setDriverClassName(dataSourceConfig.getDriverClassName());
            dataSource.setUsername(dataSourceConfig.getUsername());
            dataSource.setPassword(dataSourceConfig.getPassword());
            dataSource.setUrl(dataSourceConfig.getUrl());
            Properties connectionProperties = new Properties();
            if(StringUtils.hasLength(dataSourceConfig.getConnectionProperties())){
                String[] connectionPropertiesValue = dataSourceConfig.getConnectionProperties().split("=");
                connectionProperties.setProperty(connectionPropertiesValue[0], connectionPropertiesValue[1]);
            }
            dataSource.setConnectProperties(connectionProperties);
            currentMap.put(dataSourceConfig.getDataSourceName(),dataSource);
        });

        return currentMap;
    }
}

spring动态数据源配置

package com.cheche365.dictonary.datatrans.datatrans.config;

import org.springframework.context.ApplicationListener;
import org.springframework.context.annotation.Primary;
import org.springframework.context.event.ContextRefreshedEvent;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.stereotype.Component;


import javax.sql.DataSource;
import java.util.Map;

/**
 * @author sunyan
 * @date 2022/11/21 14:38
 * @description
 */
@Component
@Primary
public class MyRoutingDataSource extends AbstractRoutingDataSource {

    public MyRoutingDataSource(DataSource defaultDataSource, Map targetDataSources) {
        super.setDefaultTargetDataSource(defaultDataSource);
        super.setTargetDataSources(targetDataSources);
    }

    @Override
    protected Object determineCurrentLookupKey() {
        String dataSource = DataSourceHolder.getDataSource();
        return dataSource == null?super.getResolvedDefaultDataSource():dataSource;
    }
}

spring切面

package com.cheche365.dictonary.datatrans.datatrans.aop;

import com.cheche365.dictonary.datatrans.datatrans.annotion.DyDataSource;
import com.cheche365.dictonary.datatrans.datatrans.config.DataSourceHolder;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.stereotype.Component;

/**
 * @author sunyan
 * @date 2022/11/21 15:38
 * @description
 */
@Component
@Aspect
public class DataSourceAop {

    @Pointcut("@annotation(com.cheche365.dictonary.datatrans.datatrans.annotion.DyDataSource)")
    public void pointCut(){

    }

    @Around("pointCut()&&@annotation(dataSource)")
    public void around( ProceedingJoinPoint joinPoint,DyDataSource dataSource) throws Throwable {
        String dataSourceName = dataSource.value();
        DataSourceHolder.setDataSource(dataSourceName);

        joinPoint.proceed();
        DataSourceHolder.clearDataSource();
    }
}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值