SpringBoot单体项目同时配置Mysql,Hive,ClickHouse多个数据源
1、导入pom依赖
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-ldap</artifactId>
</dependency>
<!-- jdbc -->
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-jdbc</artifactId>
</dependency>
<!-- druid -->
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.2.8</version>
<exclusions>
<exclusion>
<groupId>com.sun</groupId>
<artifactId>tools</artifactId>
</exclusion>
<exclusion>
<groupId>com.sun</groupId>
<artifactId>jconsole</artifactId>
</exclusion>
</exclusions>
</dependency>
<!-- mybatis -->
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>2.1.1</version>
</dependency>
<!-- mysql-jdbc -->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.46</version>
</dependency>
<!-- 添加hive依赖 注意,下面在exclusions里面的包必须排除掉,否则启动报错-->
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>3.1.0</version>
<exclusions>
<exclusion>
<groupId>javax.jdo</groupId>
<artifactId>jdo2-api</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
</exclusion>
<exclusion>
<groupId>org.eclipse.jetty</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<artifactId>servlet-api</artifactId>
<groupId>javax.servlet</groupId>
</exclusion>
</exclusions>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.httpcomponents/httpclient -->
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpclient</artifactId>
<version>4.5.6</version>
</dependency>
<!-- restTemplate utils dependency -->
<dependency>
<groupId>com.arronlong</groupId>
<artifactId>httpclientutil</artifactId>
<version>1.0.4</version>
</dependency>
2、添加Configuration类
(1)、DataSourceConfig
@Configuration
public class DataSourceConfig {
/**
* HIVE METADATA(HIVE元数据)
* @return
*/
@Primary
@Bean(name = "hiveMetaDataSource")
@Qualifier("hiveMetaDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive-meta")
public DataSource hiveMetaDataSource() {
DruidDataSource dataSource = new DruidDataSource();
return dataSource;
}
/**
* HIVE METADATA(另一个HIVE元数据)
* @return
*/
@Primary
@Bean(name = "hiveMetaSecondDataSource")
@Qualifier("hiveMetaSecondDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive-meta-second")
public DataSource hiveMetaSecondDataSource() {
DruidDataSource dataSource = new DruidDataSource();
return dataSource;
}
/**
* Hive DATA
* @return
*/
@Bean(name = "hiveDataSource")
@Qualifier("hiveDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.hive")
public DataSource hiveDataSource() {
return new DruidDataSource();
}
/**
* TABLEX META
* @return
*/
@Bean(name = "tablexMetaDataSource")
@Qualifier("tablexMetaDataSource")
@ConfigurationProperties(prefix = "spring.datasource.druid.tablex-meta")
public DataSource tablexMetaDataSource() {
return new DruidDataSource();
}
// 未使用 fixme
/**
* Hive Meta JdbcTemplate
* @param hiveMetaDataSource
* @return
*/
@Bean
public JdbcTemplate hiveMetaJdbcTemplate(@Qualifier("hiveMetaDataSource") DataSource hiveMetaDataSource) {
return new JdbcTemplate(hiveMetaDataSource);
}
/**
* Hive Meta JdbcTemplate
* @param hiveMetaSecondDataSource
* @return
*/
@Bean
public JdbcTemplate hiveMetaSecondJdbcTemplate(@Qualifier("hiveMetaSecondDataSource") DataSource hiveMetaSecondDataSource) {
return new JdbcTemplate(hiveMetaSecondDataSource);
}
/**
* Hive JdbcTemplate
* @param hiveDataSource
* @return
*/
@Bean
public JdbcTemplate hiveTemplate(@Qualifier("hiveDataSource") DataSource hiveDataSource) {
return new JdbcTemplate(hiveDataSource);
}
/**
* tablex Meta JdbcTemplate
* @param tablexMetaDataSource
* @return
*/
@Bean
public JdbcTemplate tablexMetaTemplate(@Qualifier("tablexMetaDataSource") DataSource tablexMetaDataSource) {
return new JdbcTemplate(tablexMetaDataSource);
}
}
(2)、HIVE元数据配置类HiveTabMetaConfig
@Configuration
@MapperScan(
basePackages = "com.xxxx.xxxx.mapper.hivemeta",
sqlSessionFactoryRef = "hiveTabMetaSqlSessionFactory",
sqlSessionTemplateRef = "hiveTabMetaSqlSessionTemplate"
)
public class HiveTabMetaConfig {
private DataSource hiveMetaDataSource;
public HiveTabMetaConfig(@Qualifier("hiveMetaDataSource") DataSource hiveMetaDataSource) {
this.hiveMetaDataSource = hiveMetaDataSource;
}
@Bean(name= "hiveTabMetaTransactionManager")
public DataSourceTransactionManager hiveTabMetaTransactionManager() {
return new DataSourceTransactionManager(hiveMetaDataSource);
}
@Bean(name = "hiveTabMetaSqlSessionFactory")
public SqlSessionFactory hiveTabMetaSqlSessionFactory() throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(hiveMetaDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/hivemeta/*.xml"));
return bean.getObject();
}
@Bean
public SqlSessionTemplate hiveTabMetaSqlSessionTemplate() throws Exception {
return new SqlSessionTemplate(hiveTabMetaSqlSessionFactory());
}
}
(3)、另一个HIVE元数据配置类HiveTabMetaSecondConfig
@Configuration
@MapperScan(
basePackages = "com.xxx.xxx.mapper.hivemetasecond",
sqlSessionFactoryRef = "hiveTabMetaSecondSqlSessionFactory",
sqlSessionTemplateRef = "hiveTabMetaSecondSqlSessionTemplate"
)
public class HiveTabMetaSecondConfig {
private DataSource hiveMetaSecondDataSource;
public HiveTabMetaSecondConfig(@Qualifier("hiveMetaSecondDataSource") DataSource hiveMetaSecondDataSource) {
this.hiveMetaSecondDataSource = hiveMetaSecondDataSource;
}
@Bean(name= "hiveTabMetaSecondTransactionManager")
public DataSourceTransactionManager hiveTabMetaSecondTransactionManager() {
return new DataSourceTransactionManager(hiveMetaSecondDataSource);
}
@Bean(name = "hiveTabMetaSecondSqlSessionFactory")
public SqlSessionFactory hiveTabMetaSecondSqlSessionFactory() throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(hiveMetaSecondDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/hivemetasecond/*.xml"));
return bean.getObject();
}
@Bean
public SqlSessionTemplate hiveTabMetaSecondSqlSessionTemplate() throws Exception {
return new SqlSessionTemplate(hiveTabMetaSecondSqlSessionFactory());
}
}
注意:两个HIVE元数据的xml文件要分成两个包,否则另一个无法识别。
(4)、HIVE配置类HiveConfig
@Configuration
@MapperScan(
basePackages = "com.xxx.xxx.mapper.hive",
sqlSessionFactoryRef = "hiveSqlSessionFactory",
sqlSessionTemplateRef = "hiveSqlSessionTemplate"
)
public class HiveConfig {
private DataSource hiveDataSource;
public HiveConfig(@Qualifier("hiveDataSource") DataSource hiveDataSource) {
this.hiveDataSource = hiveDataSource;
}
@Bean
public SqlSessionFactory hiveSqlSessionFactory() throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(hiveDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/hive/*.xml"));
return bean.getObject();
}
@Bean(name= "hiveTransactionManager")
public DataSourceTransactionManager hiveTransactionManager() {
return new DataSourceTransactionManager(hiveDataSource);
}
@Bean
public SqlSessionTemplate hiveSqlSessionTemplate() throws Exception {
return new SqlSessionTemplate(hiveSqlSessionFactory());
}
}
(5)、Mysql配置类TablexMetaConfig
@Configuration
@MapperScan(
basePackages = "com.xxx.xxx.mapper.tablex",
sqlSessionFactoryRef = "tablexMetaSqlSessionFactory",
sqlSessionTemplateRef = "tablexMetaSqlSessionTemplate"
)
public class TablexMetaConfig {
private DataSource tablexMetaDataSource;
public TablexMetaConfig(@Qualifier("tablexMetaDataSource") DataSource tablexMetaMetaDataSource) {
this.tablexMetaDataSource = tablexMetaMetaDataSource;
}
@Bean
public SqlSessionFactory tablexMetaSqlSessionFactory() throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(tablexMetaDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mapper/tablex/*.xml"));
return bean.getObject();
}
@Bean(name= "tablexMetaDataTransactionManager")
public DataSourceTransactionManager tablexMetaDataTransactionManager() {
return new DataSourceTransactionManager(tablexMetaDataSource);
}
@Bean
public SqlSessionTemplate tablexMetaSqlSessionTemplate() throws Exception {
return new SqlSessionTemplate(tablexMetaSqlSessionFactory());
}
}
(6)、ClickHouse一般使用http请求发送http配置类RestTemplateConfig
@Configuration
public class RestTemplateConfig {
@Bean("restTemplate")
public RestTemplate restTemplate() {
RestTemplate restTemplate = new RestTemplate(clientHttpRequestFactory());
//换上fastjson
List<HttpMessageConverter<?>> httpMessageConverterList = restTemplate.getMessageConverters();
Iterator<HttpMessageConverter<?>> iterator = httpMessageConverterList.iterator();
if (iterator.hasNext()) {
HttpMessageConverter<?> converter = iterator.next();
//原有的String是ISO-8859-1编码 去掉
if (converter instanceof StringHttpMessageConverter) {
iterator.remove();
}
}
httpMessageConverterList.add(new StringHttpMessageConverter(Charset.forName("utf-8")));
return restTemplate;
}
@Bean
public HttpComponentsClientHttpRequestFactory clientHttpRequestFactory() {
HttpComponentsClientHttpRequestFactory rf = new HttpComponentsClientHttpRequestFactory();
rf.setHttpClient(httpClient());
return rf;
}
@Bean
public CloseableHttpClient httpClient() {
CloseableHttpClient httpClient = null;
try {
httpClient = HCB.custom()
.timeout(10000)
.pool(400, 100)
.retry(10, true).build();
} catch (HttpProcessException e) {
e.printStackTrace();
}
return httpClient;
}
/**
* 创建FastJson转换器
* @return
*/
public HttpMessageConverter fastJsonHttpMessageConverters() {
FastJsonHttpMessageConverter fastConvert = new FastJsonHttpMessageConverter();
FastJsonConfig fastJsonConfig = new FastJsonConfig();
fastJsonConfig.setSerializerFeatures(SerializerFeature.PrettyFormat,
SerializerFeature.WriteNullStringAsEmpty,
SerializerFeature.WriteNullNumberAsZero,
SerializerFeature.WriteNullListAsEmpty,
SerializerFeature.WriteMapNullValue,
SerializerFeature.DisableCheckSpecialChar);
// 设置返回时间格式,默认24小时制,若不设置,时间会转换为时间戳
fastJsonConfig.setDateFormat("yyyy-MM-dd HH:mm:ss");
//处理中文乱码问题
List<MediaType> fastMediaTypes = Lists.newArrayList();
fastMediaTypes.add(MediaType.APPLICATION_JSON_UTF8);
fastConvert.setSupportedMediaTypes(fastMediaTypes);
fastConvert.setFastJsonConfig(fastJsonConfig);
return fastConvert;
}
}
(7)、Clickhouse工具类,封装http Post请求
@Component
@Data
@AllArgsConstructor
public class ClickHouseUtil {
@Autowired
@Qualifier("restTemplate")
RestTemplate restTemplate;
@Value("${clickhouse.url}")
private String chUrl;
@Value("${clickhouse.username}")
private String userName;
@Value("${clickhouse.password}")
private String password;
public ClickHouseUtil() {
}
/**
* 发送请求
* @param sql
* @return
*/
public boolean sendPostRequest(String sql) {
boolean isok = false;
// 设置请求头,请求认证
HttpHeaders headers;
headers = new HttpHeaders();
String authorization = userName + ":" + password;
String basicAuth = new String(Base64.getEncoder().encode(authorization.getBytes(Charset.forName("US-ASCII"))));
headers.set("Authorization", "Basic " + basicAuth);
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity<MultiValueMap<String,Object>> httpEntity = new HttpEntity(sql,headers);
ResponseEntity<String> responseString = null;
try {
// 执行 post sql 查询
responseString = restTemplate.exchange(chUrl, HttpMethod.POST, httpEntity, String.class);
System.out.println("responseString: "+responseString);
if (responseString.getStatusCode().value() == 200) {
isok = true;
} else{
// throw exception
}
} catch (Exception e) {
e.printStackTrace();
throw new MyException(e.getMessage());
}
return isok;
}
public String sendPostRequest1(String sql) {
// 设置请求头,请求认证
HttpHeaders headers;
headers = new HttpHeaders();
String authorization = userName + ":" + password;
String basicAuth = new String(Base64.getEncoder().encode(authorization.getBytes(Charset.forName("US-ASCII"))));
headers.set("Authorization", "Basic " + basicAuth);
headers.setContentType(MediaType.APPLICATION_JSON);
HttpEntity<MultiValueMap<String,Object>> httpEntity = new HttpEntity(sql,headers);
ResponseEntity<String> responseString = null;
try {
// 执行 post sql 查询
responseString = restTemplate.exchange(chUrl, HttpMethod.POST, httpEntity, String.class);
// System.out.println("responseString: "+responseString);
if (responseString.getStatusCode().value() == 200) {
} else{
// throw exception
}
} catch (Exception e) {
e.printStackTrace();
throw new MyException(e.getMessage());
}
return responseString.getBody();
}
}
(8)、yml配置文件
application.yml
spring:
########## 配置WebStatFilter,用于采集web关联监控的数据 ##########
web-stat-filter:
enabled: true # 启动 StatFilter
url-pattern: /* # 过滤所有url
exclusions: "*.js,*.gif,*.jpg,*.png,*.css,*.ico,/druid/*" # 排除一些不必要的url
session-stat-enable: true # 开启session统计功能
session-stat-max-count: 1000 # session的最大个数,默认100
## druid
datasource:
druid:
stat-view-servlet:
enabled: true
url-pattern: /druid/*
reset-enable: true
login-username: admin
login-password: zhangx102
allow: # 设置允许指定 ip 可以访问 druid 后台监控界面
profiles:
active: dev
application-dev.yml
# CLICKHOUSE DATASOURCE CONFIG
clickhouse:
url: http://xx.xx.x.xx:9092
username: xxxx
password: xxxx
## 数据源配置
datasource:
druid:
### HIVE META CONFIG
hive-meta:
name: HIVE-META
url: jdbc:mysql://xx.xx.x.xx:3306/xxxx?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 3
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
updatetableparam.path: /hiveServing/addUpdateTableParams
updatecolumncomment.path: /hiveServing/updateColumnOfHiveColumn·
hive-meta-second:
name: HIVE-META-ANOTHER
url: jdbc:mysql://xx.xx.x.xx:3306/xxxx?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 3
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
updatetableparam.path: /hiveServing/addUpdateTableParams
updatecolumncomment.path: /hiveServing/updateColumnOfHiveColumn·
tablex-meta:
name: tablex-meta
url: jdbc:mysql://xx.xx.x.xx:3306/tablemanagement?characterEncoding=utf8&useSSL=false
username: xxxx
password: xxxx
driver-class-name: com.mysql.jdbc.Driver
initialSize: 5
maxActive: 10
maxWait: 60000
minIdle: 1
timeBetweenEvictionRunsMillis: 60000
minEvictableIdleTimeMillis: 300000
testWhileIdle: true
testOnBorrow: true
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
validationQuery: SELECT 1
validation-query-timeout: 500
filters: stat,wall
hive: #hive数据源
url: jdbc:hive2://xx.xx.x.xx:2181,xx.xx.x.xx:2181,xx.xx.x.xx:2181/xxxx;serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2
username: xxxx
password: xxxx
driver-class-name: org.apache.hive.jdbc.HiveDriver
initialSize: 1
minIdle: 1
maxIdle: 5
maxActive: 50
maxWait: 10000
timeBetweenEvictionRunsMillis: 10000
minEvictableIdleTimeMillis: 300000
validationQuery: select 'x'
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
poolPreparedStatements: true
maxOpenPreparedStatements: 20
# 打开PSCache,并且指定每个连接上PSCache的大小
maxPoolPreparedStatementPerConnectionSize: 20
connectionErrorRetryAttempts: 0
breakAfterAcquireFailure: false