Druid流式读取数据库
package data.deal.handles;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.pool.DruidDataSourceFactory;
import com.alibaba.druid.pool.DruidPooledConnection;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import pool.DBPasswordCenterAgency;
import pool.DbPoolConnection;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileWriter;
import java.io.InputStream;
import java.sql.*;
import java.util.*;
/**
* @description: 流式读取数据库
*/
public class SqlRunner {
public static Logger logger = Logger.getLogger(SqlRunner.class);
private static DbPoolConnection dbPoolConnection = null;
private static DruidDataSource druidDataSource1 = null;
private static DruidDataSource druidDataSource2 = null;
public static String action1 = "";
public static String action2 = "";
public static void Init()
{
try
{
Properties properties1 = loadPropertiesFile("config/DBCon.properties");
logger.info("数据为: "+properties1.getProperty("url"));
druidDataSource1 = (DruidDataSource) DruidDataSourceFactory.createDataSource(properties1);
String password = properties1.getProperty("password");
action1 = properties1.getProperty("action");
if (password == null)
{
password = DBPasswordCenterAgency.GetPassword(properties1.getProperty("dbSource"), properties1.getProperty("username"));
druidDataSource1.setPassword(password);
}
} catch (Exception e) {
logger.info("DBCon.properties配置失败");
logger.info(e.getMessage());
}
}
private static Properties loadPropertiesFile(String fullFile)
{
if ((fullFile == null) || (fullFile.equals(""))) {
throw new IllegalArgumentException("Properties file path can not be null" + fullFile);
}
InputStream inputStream = null;
Properties p = null;
try {
inputStream = new FileInputStream(new File(fullFile));
p = new Properties();
p.load(inputStream);
} catch (Exception e) {
e.printStackTrace();
try
{
if (inputStream != null)
inputStream.close();
} catch (Exception e1) {
e1.printStackTrace();
}
}
finally
{
try
{
if (inputStream != null)
inputStream.close();
}
catch (Exception e) {
e.printStackTrace();
}
}
return p;
}
public static synchronized DbPoolConnection getInstance()
{
if (dbPoolConnection == null) {
dbPoolConnection = new DbPoolConnection();
}
return dbPoolConnection;
}
public static synchronized void getPool()
{
if (druidDataSource1 == null)
Init();
}
public static DruidPooledConnection getConnection(int sourceType) throws SQLException
{
getPool();
DruidPooledConnection connection = null;
switch (sourceType) {
case 1:
connection = druidDataSource1.getConnection();
if (isNotEmpty(action1)) {
CallableStatement st = connection.prepareCall("Call DBMS_APPLICATION_INFO.SET_ACTION('" + action1 + "')");
st.execute();
}
break;
case 2:
connection = druidDataSource2.getConnection();
if (isNotEmpty(action2)) {
CallableStatement st = connection.prepareCall("Call DBMS_APPLICATION_INFO.SET_ACTION('" + action2 + "')");
st.execute();
}
break;
}
return connection;
}
public static boolean isNotEmpty(String input)
{
return (input != null) && (!"".equals(input.trim()));
}
public static Logger LOGGER = Logger.getLogger(SqlRunner.class);
public static void downLoadData(String tableName, String calField, int splitCount, int maxCount, String baseFolder) throws Exception {
Map<Integer, Set<String>> dataMap = new HashMap<>();
String sql = String.format("SELECT * FROM XXX.TB_OBJECT_%s", tableName);
DruidPooledConnection conn = null;
SqlRunner.getInstance();
conn = getConnection(1);
conn.setAutoCommit(false);
int count = 0;
List<String> list = new ArrayList<>();
PreparedStatement ps = null;
ResultSet rs = null;
try {
ps = conn.prepareStatement(
sql,
ResultSet.TYPE_FORWARD_ONLY,
ResultSet.CONCUR_READ_ONLY,
ResultSet.CLOSE_CURSORS_AT_COMMIT);
ps.setFetchSize(2000);
rs = ps.executeQuery();
while (rs.next()) {
count++;
if (count % 10000 == 0) {
LOGGER.info(tableName + " has load " + count);
}
JSONObject jsonObject = new JSONObject();
//获取结果集元数据(获取此 ResultSet 对象的列的编号、类型和属性。)
ResultSetMetaData rd = rs.getMetaData();
for (int i = 0; i < rd.getColumnCount(); i++) {
//获取列名
String columnName = rd.getColumnLabel(i + 1);
Object obj = rs.getObject(columnName);
jsonObject.put(columnName, obj);
}
// String dataJson = JSON.toJSONString(jsonObject, SerializerFeature.WRITE_MAP_NULL_FEATURES);
String dataJson = JSON.toJSONStringWithDateFormat(jsonObject, "yyyyMMdd", SerializerFeature.WriteDateUseDateFormat);
String calValue = jsonObject.getString(calField);
if (StringUtils.isBlank(calValue)) {
continue;
}
int slot = Math.abs(calValue.hashCode() % splitCount);
if (dataMap.containsKey(slot)) {
Set<String> set = dataMap.get(slot);
set.add(dataJson);
if (set.size() >= maxCount) {
appendFile(baseFolder, tableName, slot, set);
set.clear();
}
} else {
Set<String> set = new HashSet<>();
set.add(dataJson);
dataMap.put(slot, set);
}
}
for (Map.Entry<Integer, Set<String>> entry : dataMap.entrySet()) {
int slot = entry.getKey();
Set<String> set = entry.getValue();
appendFile(baseFolder, tableName, slot, set);
set.clear();
}
} catch (Exception e) {
LOGGER.error("read:" + tableName + " error");
} finally {
if (rs != null) {
rs.close();
}
if (ps != null) {
ps.close();
}
conn.close();
}
LOGGER.info(tableName + "has load over:" + count);
}
private static void appendFile(String baseFolder, String tableName, int slot, Set<String> set) throws Exception {
// d://1
// d://1/100
String folderPath = baseFolder + slot + File.separator;
File folder = new File(folderPath);
if (!folder.exists()) {
folder.mkdirs();
}
// d://1/100/6222
String filePath = folderPath + tableName + ".txt";
writeFile(set, filePath);
}
public static void writeFile(Collection collection, String name) throws Exception {
FileWriter fw = new FileWriter(name, true);
IOUtils.writeLines(collection, System.getProperty("line.separator"), fw);
fw.flush();
fw.close();
}
}
如有错误欢迎指正