需求:前端录入外部数据源,数据库类型多样,录入的数据源需要保存在系统中
本文只是一个单元测试,用于测试方案可行性,完整业务实现可以参考另一篇文章
1,引入依赖
<dependency>
<groupId>org.springframework.data</groupId>
<artifactId>spring-data-mongodb</artifactId>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.2.12</version>
</dependency>
<!--外部数据库-->
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.11</version>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.5.0</version>
</dependency>
<dependency>
<groupId>com.microsoft.sqlserver</groupId>
<artifactId>mssql-jdbc</artifactId>
<version>9.5.0-SNAPSHOT.jre8-preview</version>
</dependency>
<dependency>
<groupId>org.xerial</groupId>
<artifactId>sqlite-jdbc</artifactId>
</dependency>
2,多数据源demo
支持mysql,sqlserver,postgresql,sqlite,支持初始化时加载和动态新增
import com.alibaba.druid.pool.DruidDataSourceFactory;
import javax.sql.DataSource;
import java.sql.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* @Description 多数据源测试,支持mysql,sqlserver,postgresql,sqlite,支持初始化时加载和动态新增
* @Date 2022/12/1 11:31
* @Author 余乐
**/
public class ConnectAllData {
//数据源连接池,放在单例类里
private static final ConcurrentHashMap<String, DataSource> hashMap = new ConcurrentHashMap<>();
//静态代码块先加载已有的数据源mysql,sqlite (正式开发一般放到ApplicationRunner的实现类里,从数据库读取已保存的配置)
{
System.out.println("初始化代码块加载已有的数据源");
try {
DataSource mysqlDataSource = createdDataSource(
"com.mysql.cj.jdbc.Driver",
"jdbc:mysql://IP:PORT/tpst-mms?useUnicode=true&characterEncoding=utf-8&useSSL=false&serverTimezone=UTC",
"用户名", "密码", "mysql");
hashMap.put("mysql", mysqlDataSource);
// sqlite 单文件即数据库,测试使用相对路径 ,资源目录下 resources/sqlite/test.db
DataSource sqliteDataSource = createdDataSource(
"org.sqlite.JDBC",
"jdbc:sqlite::resource:sqlite/test.db",
null, null, "sqlite");
hashMap.put("sqlite", sqliteDataSource);
System.out.println("已有数据源加载完毕:mysql,sqlite");
} catch (Exception e) {
System.out.println(e.toString());
}
}
//查询默认数据源mysql的数据
public List<String> getUsers(String sql) throws Exception {
Connection connection = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
List<String> names = new ArrayList<>();
try {
connection = hashMap.get("mysql").getConnection();
statement = connection.prepareStatement(sql);
//字段类型
HashMap<Integer, String> valType = new HashMap<>();
// 核心语句,取得查询的元数据
ResultSetMetaData mateRsmd = statement.executeQuery().getMetaData();
for (int i = 0; i < mateRsmd.getColumnCount(); i++) {
int index = i + 1;
String columnName = mateRsmd.getColumnName(index);
String columnType = mateRsmd.getColumnTypeName(index);
valType.put(i, columnType);
names.add(columnName);
}
System.out.println("表头信息 : " + names.toString());
System.out.println("数据信息 : ");
Statement st = connection.createStatement();
resultSet = st.executeQuery(sql);
while (resultSet.next()) {
List<String> rowdata = new ArrayList<>();
for (int i = 0; i <= names.size(); i++) {
if ("VARCHAR".equals(valType.get(i))) {
rowdata.add(resultSet.getString(i + 1));
}
}
System.out.println(rowdata.toString());
}
} finally {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
return names;
}
//查询postgresql
public List<String> getDcsGroup(String sql) throws Exception {
Connection connection = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
List<String> names = new ArrayList<>();
try {
connection = hashMap.get("postgres").getConnection();
statement = connection.prepareStatement(sql);
//字段类型
HashMap<Integer, String> valType = new HashMap<>();
// 核心语句,取得查询的元数据
ResultSetMetaData mateRsmd = statement.executeQuery().getMetaData();
for (int i = 0; i < mateRsmd.getColumnCount(); i++) {
int index = i + 1;
String columnName = mateRsmd.getColumnName(index);
String columnType = mateRsmd.getColumnTypeName(index);
valType.put(i, columnType.toUpperCase());
names.add(columnName + ":" + columnType.toUpperCase());
}
System.out.println("表头信息 : " + names.toString());
System.out.println("数据信息 : ");
Statement st = connection.createStatement();
resultSet = st.executeQuery(sql);
while (resultSet.next()) {
List<String> rowdata = new ArrayList<>();
for (int i = 0; i <= names.size(); i++) {
if ("VARCHAR".equals(valType.get(i))) {
rowdata.add(resultSet.getString(i + 1));
}
}
System.out.println(rowdata.toString());
}
} finally {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
return names;
}
//查询sql server
public List<String> getMssqlData(String sql) throws Exception {
Connection connection = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
List<String> names = new ArrayList<>();
try {
connection = hashMap.get("mssql").getConnection();
statement = connection.prepareStatement(sql);
//字段类型
HashMap<Integer, String> valType = new HashMap<>();
// 核心语句,取得查询的元数据
ResultSetMetaData mateRsmd = statement.executeQuery().getMetaData();
for (int i = 0; i < mateRsmd.getColumnCount(); i++) {
int index = i + 1;
String columnName = mateRsmd.getColumnName(index);
String columnType = mateRsmd.getColumnTypeName(index);
valType.put(i, columnType.toUpperCase());
names.add(columnName + ":" + columnType.toUpperCase()+":"+mateRsmd.getColumnLabel(index));
}
System.out.println("表头信息 : " + names.toString());
System.out.println("数据信息 : ");
Statement st = connection.createStatement();
resultSet = st.executeQuery(sql);
while (resultSet.next()) {
List<Object> rowdata = new ArrayList<>();
for (int i = 0; i <= names.size(); i++) {
if ("VARCHAR".equals(valType.get(i))) {
rowdata.add(resultSet.getString(i + 1));
} else if ("INT".equals(valType.get(i))) {
rowdata.add(resultSet.getInt(i + 1));
}
}
System.out.println(rowdata.toString());
}
} finally {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
return names;
}
//查询sqlite
public List<String> getSqlite(String sql) throws Exception {
Connection connection = null;
PreparedStatement statement = null;
ResultSet resultSet = null;
List<String> names = new ArrayList<>();
try {
connection = hashMap.get("sqlite").getConnection();
statement = connection.prepareStatement(sql);
//字段类型
HashMap<Integer, String> valType = new HashMap<>();
// 核心语句,取得查询的元数据
ResultSetMetaData mateRsmd = statement.executeQuery().getMetaData();
for (int i = 0; i < mateRsmd.getColumnCount(); i++) {
int index = i + 1;
String columnName = mateRsmd.getColumnName(index);
String columnType = mateRsmd.getColumnTypeName(index);
valType.put(i, columnType.toUpperCase());
names.add(columnName + ":" + columnType.toUpperCase());
}
System.out.println("表头信息 : " + names.toString());
System.out.println("数据信息 : ");
Statement st = connection.createStatement();
resultSet = st.executeQuery(sql);
while (resultSet.next()) {
List<Object> rowdata = new ArrayList<>();
for (int i = 0; i <= names.size(); i++) {
if ("TEXT".equals(valType.get(i))) {
rowdata.add(resultSet.getString(i + 1));
} else if ("INT".equals(valType.get(i))) {
rowdata.add(resultSet.getInt(i + 1));
}
}
System.out.println(rowdata.toString());
}
} finally {
if (resultSet != null) {
resultSet.close();
}
if (statement != null) {
statement.close();
}
if (connection != null) {
connection.close();
}
}
return names;
}
/**
* 创建数据源 DruidDataSource
*
* @param drive 驱动
* @param url 地址
* @param username 用户名
* @param password 密码
* @param datasourceName 数据源名称(考虑不同数据源差异可定制参数)
* @return
* @throws Exception
*/
public static DataSource createdDataSource(String drive, String url, String username, String password, String datasourceName) throws Exception {
Map map = new HashMap<>();
map.put(DruidDataSourceFactory.PROP_DRIVERCLASSNAME, drive);
map.put(DruidDataSourceFactory.PROP_URL, url);
map.put(DruidDataSourceFactory.PROP_USERNAME, username);
map.put(DruidDataSourceFactory.PROP_PASSWORD, password);
// 初始化时建立物理连接的个数
map.put(DruidDataSourceFactory.PROP_INITIALSIZE, "5");
// 最小连接池数量
map.put(DruidDataSourceFactory.PROP_MINIDLE, "5");
// 最大连接池数量
map.put(DruidDataSourceFactory.PROP_MAXACTIVE, "15");
// 获取连接时最大等待时间,单位毫秒
map.put(DruidDataSourceFactory.PROP_MAXWAIT, "60000");
// 检测连接的间隔时间,单位毫秒
map.put(DruidDataSourceFactory.PROP_TIMEBETWEENEVICTIONRUNSMILLIS, "60000");
// wall:防御sql注入 stat:监控统计; sqllite不支持
if (!"sqlite".equals(datasourceName)) {
map.put(DruidDataSourceFactory.PROP_FILTERS, "wall,stat");
}
map.put(DruidDataSourceFactory.PROP_NAME, datasourceName);
return DruidDataSourceFactory.createDataSource(map);
}
/**
* 测试主类
* 1,正式开发 一般先用一个sql把库表信息全部查出;
* 2,在明确查询sql返回字段时,不需要遍历表头;
* 3,新加的数据源在在放入hashMap后同时需要持久化到自有数据库中,以便项目重启时加载
* @param args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
ConnectAllData connectAllData = new ConnectAllData();
System.out.println("------------------查询已有数据源mysql的数据-----------------------");
connectAllData.getUsers("select * from sso_user");
System.out.println("------------------查询已有数据源sqlite的数据-----------------------");
connectAllData.getSqlite("select * from tfxq_gl_rygl_rwqd");
System.out.println("------------------添加数据源 postgres----------------------");
DataSource plsqlDataSource2 = createdDataSource("org.postgresql.Driver",
"jdbc:postgresql://IP:PORT/sim_zhsn?useUnicode=true&characterEncoding=UTF-8&useSSL=false&serverTimezone=Asia/Shanghai&zeroDateTimeBehavior=CONVERT_TO_NULL&allowMultiQueries=true",
"用户名", "密码", "postgres");
hashMap.put("postgres", plsqlDataSource2);
System.out.println("新增数据源加载完毕:postgres");
System.out.println("------------------查询新增postgres的数据-----------------------");
connectAllData.getDcsGroup("select * from adcs_group limit 2");
System.out.println("------------------添加数据源 mssql----------------------");
DataSource plsqlDataSource3 = createdDataSource("com.microsoft.sqlserver.jdbc.SQLServerDriver",
"jdbc:sqlserver://IP:PORT;DatabaseName=test",
"用户名", "密码", "mssql");
hashMap.put("mssql", plsqlDataSource3);
System.out.println("新增数据源加载完毕:mssql");
System.out.println("------------------查询 mssql的数据-----------------------");
connectAllData.getMssqlData("select * from test");
}
}
3,运行结果
初始化代码块加载已有的数据源
已有数据源加载完毕:mysql,sqlite
------------------查询已有数据源mysql的数据-----------------------
表头信息 : [userId, loginName, name, role, telephone, email]
数据信息 :
[227227b7cab74b00b82c8f7394a04997, cloudwalk, cloudwalk, 普通用户, 18939246730, 112333@qq.com]
[415cabd8c4a348aeaea4a109abdce6ea, zhsnMxgl2022, zhsnMxgl2022, 管理员, 13096350718, jianglixiang1@cloudwalk.com]
[c405574ede8f433fb4931b0538fd3008, zhcs_admin, zhcs_admin, 普通用户, 18012423334, ]
------------------查询已有数据源sqlite的数据-----------------------
10:53:58.423 [main] ERROR com.alibaba.druid.pool.DruidDataSource - testWhileIdle is true, validationQuery not set
表头信息 : [jd:TEXT, yj:TEXT, ej:TEXT, sj:TEXT, sij:TEXT, wj:TEXT]
数据信息 :
[籍田街道, 58634, 25864, 25634, 25863, 25634]
[正兴街道, 32487, 32087, 32408, 34087, 34087]
[兴隆街道, 3287, 3287, 3487, 3287, 3487]
[万安街道, 42768, 4768, 43768, 42768, 32768]
[煎茶街道, 43768, 43768, 42768, 42768, 42768]
------------------添加数据源 postgres----------------------
新增数据源加载完毕:postgres
------------------查询新增postgres的数据-----------------------
表头信息 : [id:VARCHAR, parentid:VARCHAR, name:VARCHAR, update_time:VARCHAR]
数据信息 :
[12380582-2b6a-462c-a21a-c93773eaff80, 743be7a1-ce72-4b7e-a6d0-fc70f675eb9a, 森林防火, 2022-10-28 02:00:00]
[4e1eef9c-44b3-464d-9b5d-5fc9f6336961, 169a26be-262c-4798-be1b-1bf9d1e9ed6a, 湖畔路北段与宝塘一街交叉口, 2022-10-28 02:00:00]
------------------添加数据源 mssql----------------------
新增数据源加载完毕:mssql
------------------查询 mssql的数据-----------------------
表头信息 : [id:VARCHAR:id, name:VARCHAR:name, age:INT:age]
数据信息 :
[a2afaw2, ??, 43]
进程已结束,退出代码0
4, 查询全库表字段信息
sqlserver
SELECT
d.name 表名,
a.name 字段名,
b.name 类型,
isnull( g.[value], ' ' ) AS 说明
FROM yscolumns a
LEFT JOIN systypes b ON a.xtype= b.xusertype
INNER JOIN sysobjects d ON a.id= d.id
AND d.xtype= 'U'
AND d.name<> 'dtproperties'
LEFT JOIN syscomments e ON a.cdefault= e.id
LEFT JOIN sys.extended_properties g ON a.id= g.major_id
AND a.colid= g.minor_id
WHERE
b.name IS NOT NULL
ORDER BY a.colorder