.yml文件
spring:
#Spring boot视图配置
mvc:
view:
prefix: /WEB-INF/
suffix: .jsp
#编码格式
http:
encoding.force: true
encoding.charset: UTF-8
encoding.enabled: true
tomcat:
uri-encoding: UTF-8
application:
name: dev-manager
profiles:
active: dev
datasource:
hikari:
#配置数据源类型
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: com.mysql.cj.jdbc.Driver
jdbcUrl: jdbc:mysql://localhost:3306/test?useUnicode=true&characterEncoding=utf8&autoReconnect=true&allowMultiQueries=true
username: root
password: admin123
#初始化,最小,最大连接数
initialSize: 3
minidle: 3
maxActive: 18
# 获取数据库连接等待的超时时间
maxWait: 60000
# 配置多久进行一次检测,检测需要关闭的空闲连接 单位毫秒
timeBetweenEvictionRunsMillis: 60000
validationQuery: SELECT 1 FROM dual
# 配置监控统计拦截的filters,去掉后,监控界面的sql无法统计
filters: stat,wall,log4j
# druid配置
druid:
initial-size: 1
max-active: 20
min-idle: 1
test-on-borrow: true
stat-view-servlet:
allow: true
jpa.database: MYSQL
# hive 数据源自定义配置
hive:
jdbcUrl: jdbc:hive2://localhost:10000/test
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: org.apache.hive.jdbc.HiveDriver
username: hive
password: hive
# phoenix 数据源自定义配置
phoenix:
enable: true
jdbcUrl: jdbc:phoenix:localhost:2181/hbase
type: com.alibaba.druid.pool.DruidDataSource
driver-class-name: org.apache.phoenix.jdbc.PhoenixDriver
username:
password:
default-auto-commit: true
schema.isNamespaceMappingEnabled: true
schema.mapSystemTablesToNamespace: true
添加各数据库的DataSource
package com.bridge.common;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.web.bind.annotation.CrossOrigin;
import javax.sql.DataSource;
/**
* @Date: 19-4-19
* @Auther: wxuan
* @Description:
*/
@Configuration
public class MultipleDBConfig {
@Bean(name = "mysql")
@Primary
@ConfigurationProperties(prefix = "spring.datasource.hikari")
public DataSource mysqlDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "mysqlJdbcTemplate")
public JdbcTemplate mysqlJdbcTemplate(@Qualifier("mysql") DataSource dsMysql) {
return new JdbcTemplate(dsMysql);
}
@Bean(name = "phoenix")
@ConfigurationProperties(prefix = "phoenix")
public DataSource phoenixDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "phoenixJdbcTemplate")
public JdbcTemplate phoenixJdbcTemplate(@Qualifier("phoenix")
DataSource dsPhoenix) {
return new JdbcTemplate(dsPhoenix);
}
@Bean(name = "hive")
@ConfigurationProperties(prefix = "hive")
public DataSource hiveDataSource() {
return DataSourceBuilder.create().build();
}
@Bean(name = "hiveJdbcTemplate")
public JdbcTemplate hiveJdbcTemplate(@Qualifier("hive")
DataSource dsHive) {
return new JdbcTemplate(dsHive);
}
}
样例:
package com.bridge.web.mapper;
import com.bridge.web.util.Kafka;
import com.tkmapper.BaseDao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jdbc.core.BeanPropertyRowMapper;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import tk.mybatis.mapper.common.BaseMapper;
@Repository
public class KafkaMapper{
@Autowired
@Qualifier("hiveJdbcTemplate")
JdbcTemplate hiveJdbcTemplate;
@Autowired
@Qualifier("mysqlJdbcTemplate")
JdbcTemplate mysqlJdbcTemplate;
public Kafka selectByPrimaryKey(Integer val){
String sql = "select * from Kafka where id = ?";
RowMapper<Kafka> rowMapper = new BeanPropertyRowMapper(Kafka.class);
Kafka kafka = mysqlJdbcTemplate.queryForObject(sql,rowMapper,val);
return kafka;
}
public Kafka selectByPrimaryKeyHive(String var1) {
String sql = "select * from Kafka where id = ?";
RowMapper<Kafka> rowMapper = new BeanPropertyRowMapper(Kafka.class);
Kafka kafka = hiveJdbcTemplate.queryForObject(sql,rowMapper,var1);
return kafka;
}
}
项目添加Hbase-site.xml
<?xml version="1.0"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<!-- 指定启动分组功能 -->
<property>
<name>phoenix.schema.isNamespaceMappingEnabled</name>
<value>true</value>
<description>命名空间开启</description>
</property>
<property>
<name>hbase.regionserver.wal.codec</name>
<value>org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec</value>
<description>二级索引支持</description>
</property>
</configuration>