springBoot多数据源配置,如mysql、hive,使用druid连接池

总共需要以下几个类:

package com.config.datasource;

import com.alibaba.druid.pool.DruidDataSource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;

/**
 * @date 2019/10/11 15:27
 * @description
 */
@Slf4j
@Configuration
public class DataSourceConfig {

    @Value("${spring.datasource.driver-class-name}")
    private String driverClassName;
    @Value("${spring.datasource.url}")
    private String url;
    @Value("${spring.datasource.username}")
    private String userName;
    @Value("${spring.datasource.password}")
    private String password;

    @Value("${spring.datasource.hive.driver-class-name}")
    private String hiveDriverClassName;
    @Value("${spring.datasource.hive.url}")
    private String hiveJdbcUrl;
    @Value("${spring.datasource.hive.username}")
    private String hiveUsername;
    @Value("${spring.datasource.hive.password}")
    private String hivePassword;

    @Bean(name = "datasourceMysql")
    public DataSource getDataSourceMysql() {
        DruidDataSource dataSource = new DruidDataSource();
        dataSource.setDriverClassName(driverClassName);
        dataSource.setUrl(url);
        dataSource.setUsername(userName);
        dataSource.setPassword(password);
        log.info("------------datasourceMysql dataSource.getUrl(): {}", dataSource.getUrl());
        return dataSource;
    }

    @Bean(name = "datasourceHive")
    public DataSource getDataSourceHive() {
        DruidDataSource dataSource = new DruidDataSource();
        dataSource.setDriverClassName(hiveDriverClassName);
        dataSource.setUrl(hiveJdbcUrl);
        dataSource.setUsername(hiveUsername);
        dataSource.setPassword(hivePassword);
        dataSource.setTestWhileIdle(true);
        log.info("------------------------datasourceHive dataSource.getUrl(): {}", dataSource.getUrl());
        return dataSource;
    }

    /**
     * 动态数据源: 通过AOP在不同数据源之间动态切换
     *
     * @return
     */
    @Primary
    @Bean(name = "dynamicDataSource")
    public DataSource dynamicDataSource() {
        DynamicDataSource dynamicDataSource = new DynamicDataSource();
        // 默认数据源
        dynamicDataSource.setDefaultTargetDataSource(getDataSourceMysql());
        // 配置多数据源
        Map<Object, Object> dsMap = new HashMap();
        dsMap.put(DynamicDataSource.DS_MYSQL, getDataSourceMysql());
        dsMap.put(DynamicDataSource.DS_HIVE, getDataSourceHive());
        dynamicDataSource.setTargetDataSources(dsMap);
        return dynamicDataSource;
    }

}

2

package content.config.datasource;

import lombok.extern.slf4j.Slf4j;
import org.springframework.util.ObjectUtils;

/**
 * @date 2019/10/11 15:55
 * @description
 */
@Slf4j
public class DataSourceContextHolder {
    /**
     * 默认数据源
     */
    public static final String DEFAULT_DS = "mysql";

    private static final ThreadLocal<String> CONTEXT_HOLDER = new ThreadLocal<>();

    // 设置数据源名
    public static void setDB(String dbType) {
        log.info("setDB -> dbType = [{}]", dbType);
        CONTEXT_HOLDER.set(dbType);
    }

    // 获取数据源名
    public static String getDB() {
        return ObjectUtils.isEmpty(CONTEXT_HOLDER.get()) ? DEFAULT_DS : CONTEXT_HOLDER.get();

    }

    /**
     * 清除数据源名
     */
    public static void clearDB() {
        CONTEXT_HOLDER.remove();
    }
}

3

package content.config.datasource;

import lombok.extern.slf4j.Slf4j;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;

/**
 * @date 2019/10/11 15:53
 * @description
 */
@Slf4j
public class DynamicDataSource extends AbstractRoutingDataSource {

    public static final String DS_MYSQL = "mysql";

    public static final String DS_HIVE = "hive";


    @Override
    protected Object determineCurrentLookupKey() {
        log.info("the dynamic DataSource is: {}", DataSourceContextHolder.getDB());
        return DataSourceContextHolder.getDB();
    }
}

4

package content.config.datasource;

import lombok.extern.slf4j.Slf4j;
import org.apache.ibatis.session.SqlSessionFactory;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;

/**
 * @date 2019/10/11 15:59
 * @description
 */
@Slf4j
@Component
@Aspect
public class DynamicDataSourceAspect {

    @Resource
    private SqlSessionFactory sqlSessionFactory;

    /**
     * @param point
     */
    @Before("execution(* unified.content.dao.hive.*.*(..))")
    public void beforeSwitchDS(JoinPoint point) {
//        Object target = point.getTarget();
        DataSourceContextHolder.setDB(DynamicDataSource.DS_HIVE);
        sqlSessionFactory.getConfiguration().setUseColumnLabel(Boolean.FALSE);
    }

    /**
     * @param point
     */
    @After("execution(* unified.content.dao.*.*.*(..))")
    public void afterSwitchDS(JoinPoint point) {
        DataSourceContextHolder.clearDB();
        sqlSessionFactory.getConfiguration().setUseColumnLabel(Boolean.TRUE);
    }

}

注意,springboot启动入口要加上注解:

@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class})

否则会有循环依赖报错。

***************************
APPLICATION FAILED TO START
***************************

Description:

The dependencies of some of the beans in the application context form a cycle:

   dynamicDataSourceAspect
      ↓
   sqlSessionFactory defined in class path resource [org/mybatis/spring/boot/autoconfigure/MybatisAutoConfiguration.class]
┌─────┐
|  dynamicDataSource defined in class path resource [unified/content/config/datasource/DataSourceConfig.class]
↑     ↓
|  datasourceMysql defined in class path resource [unified/content/config/datasource/DataSourceConfig.class]
↑     ↓
|  org.springframework.boot.autoconfigure.jdbc.DataSourceInitializerInvoker

 

然后就可以了。

这是mysql和hive多数据源的配置,也可以是和impala等其它数据源一起配置。

 

druid配置(application-dev.properties):


#druid连接池设置
spring.datasource.type=com.alibaba.druid.pool.DruidDataSource
spring.datasource.druid.initial-size=1
spring.datasource.druid.min-idle=3
spring.datasource.druid.max-active=20
spring.datasource.druid.max-wait=6000
spring.datasource.druid.time-between-eviction-runs-millis=60000
spring.datasource.druid.min-evictable-idle-time-millis=30000
spring.datasource.druid.validation-query=select 1
spring.datasource.druid.test-while-idle=true
spring.datasource.druid.test-on-borrow=false
spring.datasource.druid.test-on-return=false
spring.datasource.druid.pool-prepared-statements=true
spring.datasource.druid.max-pool-prepared-statement-per-connection-size=20
spring.datasource.druid.connection-error-retry-attempts=0
spring.datasource.druid.break-after-acquire-failure=true

# mysql
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
spring.datasource.url=jdbc:mysql://172.28.xx.xxx:3306/xxxx?serverTimezone=Asia/Shanghai&useUnicode=true&characterEncoding=utf8&zeroDateTimeBehavior=convertToNull&allowMultiQueries=true
spring.datasource.username=xxx
spring.datasource.password=xxx


#hive配置
spring.datasource.hive.driver-class-name=org.apache.hive.jdbc.HiveDriver
spring.datasource.hive.url=jdbc:hive2://172.16.xx.xx:xxxx/default
spring.datasource.hive.username=hive
spring.datasource.hive.password=xxx

 

maven包相关依赖,见pom文件:

        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>druid-spring-boot-starter</artifactId>
            <version>${druid.version}</version>
        </dependency>

        <!-- 引入hive-jdbc -->
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-jdbc</artifactId>
        </dependency>
        <dependency>
            <groupId>org.apache.hive</groupId>
            <artifactId>hive-jdbc</artifactId>
            <version>2.3.3</version>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>servlet-api</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>log4j</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.logging.log4j</groupId>
                    <artifactId>log4j-slf4j-impl</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <!-- 引入hbase -->
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-hadoop</artifactId>
            <version>${spring.hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>servlet-api</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>log4j</groupId>
                    <artifactId>log4j</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.springframework.data</groupId>
            <artifactId>spring-data-hadoop-hbase</artifactId>
            <version>${spring.hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>servlet-api</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>1.3.0</version>
            <!--<version>1.3.6</version>-->
            <exclusions>
                <exclusion>
                    <groupId>org.slf4j</groupId>
                    <artifactId>slf4j-log4j12</artifactId>
                </exclusion>
            </exclusions>
        </dependency>
        <!-- -->


        <!--mysql-->
        <dependency>
            <groupId>org.mybatis.spring.boot</groupId>
            <artifactId>mybatis-spring-boot-starter</artifactId>
            <version>2.1.1</version>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.47</version>
        </dependency>

 

附录:

单独连接hive的配置可如下:

package unified.content.config;

import com.alibaba.druid.pool.DruidDataSource;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

import javax.sql.DataSource;

/**
 *
 */
@Configuration
@ConfigurationProperties(prefix = "spring.datasource")
public class HiveDruidConfig {

    @Value("hive.url")
    private String url;
    @Value("hive.username")
    private String username;
    @Value("hive.password")
    private String password;
    @Value("hive.driver-class-name")
    private String driverClassName;


    @Bean(name = "hiveDataSource")
    @Qualifier("hiveDataSource")
    public DataSource hiveDataSource() {
        DruidDataSource datasource = new DruidDataSource();
        datasource.setUrl(url);
        datasource.setUsername(username);
        datasource.setPassword(password);
        datasource.setDriverClassName(driverClassName);
        return datasource;
    }

    @Bean(name = "hiveJdbcTemplate")
    public JdbcTemplate hiveJdbcTemplate(@Qualifier("hiveDataSource") DataSource dataSource) {
        return new JdbcTemplate(dataSource);
    }
    //省略getter、setter
}

CRUD示例:

package unified.content.controller;

import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.dao.DataAccessException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import javax.sql.DataSource;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;

/**
 * 测试
 */
@Slf4j
@RestController
@RequestMapping("/hive")
public class HiveController {


    @Autowired
    @Qualifier("datasourceHive")
    private DataSource hiveDataSource;

    @Autowired
    @Qualifier("hiveJdbcTemplate")
    private JdbcTemplate hiveJdbcTemplate;

    @RequestMapping("/table/show")
    public List<String> showtables() {
        List<String> list = new ArrayList<String>();
        Statement statement = null;
        try {
            statement = hiveDataSource.getConnection().createStatement();
            String sql = "show tables";
            log.info("Running: " + sql);
            ResultSet res = statement.executeQuery(sql);

//            List<Map<String, Object>> maps = hiveJdbcTemplate.queryForList(sql);

            while (res.next()) {
                list.add(res.getString(1));
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return list;
    }

    /**
     * 查询Hive库中的某张数据表字段信息
     */
    @RequestMapping("/table/describe")
    public List<String> describeTable(String tableName) throws SQLException {
        List<String> list = new ArrayList<String>();
        // Statement statement = jdbcDataSource.getConnection().createStatement();
        Statement statement = hiveDataSource.getConnection().createStatement();
        String sql = "describe " + tableName;
        log.info("Running: " + sql);
        ResultSet res = statement.executeQuery(sql);
        while (res.next()) {
            list.add(res.getString(1));
        }
        return list;
    }

    /**
     * 查询指定tableName表中的数据
     */
    @RequestMapping("/table/select")
    public List<String> selectFromTable(String tableName) throws SQLException {
        // Statement statement = jdbcDataSource.getConnection().createStatement();
        Statement statement = hiveDataSource.getConnection().createStatement();
        String sql = "select * from " + tableName;
        log.info("Running: " + sql);
        ResultSet res = statement.executeQuery(sql);
        List<String> list = new ArrayList<String>();
        int count = res.getMetaData().getColumnCount();
        String str = null;
        while (res.next()) {
            str = "";
            for (int i = 1; i < count; i++) {
                str += res.getString(i) + " ";
            }
            str += res.getString(count);
            log.info(str);
            list.add(str);
        }
        return list;
    }


    /**
     * @return
     */
    @RequestMapping("/table/select2")
    public String select() {
        String sql = "select * from HIVE_TEST";
        List<Map<String, Object>> rows = hiveJdbcTemplate.queryForList(sql);
        Iterator<Map<String, Object>> it = rows.iterator();
        while (it.hasNext()) {
            Map<String, Object> row = it.next();
            System.out.println(String.format("%s\t%s", row.get("key"), row.get("value")));
        }
        return "Done";
    }


    /**
     * 示例:创建新表
     */
    @RequestMapping("/table/create")
    public String createTable() {
        StringBuffer sql = new StringBuffer("CREATE TABLE IF NOT EXISTS ");
        sql.append("user_sample");
        sql.append("(user_num BIGINT, user_name STRING, user_gender STRING, user_age INT)");
        sql.append("ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' "); // 定义分隔符
        sql.append("STORED AS TEXTFILE"); // 作为文本存储

        log.info("Running: " + sql);
        String result = "Create table successfully...";
        try {
            // hiveJdbcTemplate.execute(sql.toString());
            hiveJdbcTemplate.execute(sql.toString());
        } catch (DataAccessException dae) {
            result = "Create table encounter an error: " + dae.getMessage();
            log.error(result);
        }
        return result;

    }

    /**
     * 示例:将Hive服务器本地文档中的数据加载到Hive表中
     */
    @RequestMapping("/table/load")
    public String loadIntoTable() {
        String filepath = "/home/hadoop/user_sample.txt";
        String sql = "load data local inpath '" + filepath + "' into table user_sample";
        String result = "Load data into table successfully...";
        try {
            // hiveJdbcTemplate.execute(sql);
            hiveJdbcTemplate.execute(sql);
        } catch (DataAccessException dae) {
            result = "Load data into table encounter an error: " + dae.getMessage();
            log.error(result);
        }
        return result;
    }

    /**
     * 示例:向Hive表中添加数据
     */
    @RequestMapping("/table/insert")
    public String insertIntoTable() {
        String sql = "INSERT INTO TABLE user_sample(user_num,user_name,user_gender,user_age) VALUES(888,'Plum','M',32)";
        String result = "Insert into table successfully...";
        try {
            // hiveJdbcTemplate.execute(sql);
            hiveJdbcTemplate.execute(sql);
        } catch (DataAccessException dae) {
            result = "Insert into table encounter an error: " + dae.getMessage();
            log.error(result);
        }
        return result;
    }

    /**
     * 示例:删除表
     */
    @RequestMapping("/table/delete")
    public String delete(String tableName) {
        String sql = "DROP TABLE IF EXISTS " + tableName;
        String result = "Drop table successfully...";
        log.info("Running: " + sql);
        try {
            // hiveJdbcTemplate.execute(sql);
            hiveJdbcTemplate.execute(sql);
        } catch (DataAccessException dae) {
            result = "Drop table encounter an error: " + dae.getMessage();
            log.error(result);
        }
        return result;
    }


    @RequestMapping("/table/delete2")
    public String delete() {
        StringBuffer sql = new StringBuffer("DROP TABLE IF EXISTS HIVE_TEST");
        log.info(sql.toString());
        hiveJdbcTemplate.execute(sql.toString());
        return "Done";
    }


}

 

  • 0
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值