Java prestodriver 查询写入Excel
package cn.strivepine.hive;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import cn.hutool.poi.excel.ExcelUtil;
import cn.hutool.poi.excel.ExcelWriter;
import cn.hutool.setting.dialect.Props;
import java.io.File;
import java.io.IOException;
import java.sql.*;
import java.util.List;
public class HiveQuery {
private static final Log log = LogFactory.get();
public void query(String filename, String sql) throws SQLException, IOException {
Props props = new Props(this.getClass().getClassLoader().getResource("QueryHive.properties"));
File directory = new File("");
String filePath = directory.getCanonicalPath() + File.separatorChar + props.getProperty("file.path");
File filePath1 = new File(filePath);
if (!filePath1.exists()) {
filePath1.mkdirs();
}
ExcelWriter writer = ExcelUtil.getWriter(filePath + "/" + filename + ".xlsx");
String url = props.getProperty("query.jdbc.url");
String user = props.getProperty("query.jdbc.user");
String password = props.getProperty("query.jdbc.password");
try {
Class.forName(props.getProperty("query.jdbc.prestodriver"));
} catch (ClassNotFoundException e) {
log.error("driver error!", e);
System.exit(1);
}
Connection connection = DriverManager.getConnection(url, user, password);
List<List<String>> rows = CollUtil.newArrayList();
List<String> row2 = CollUtil.newArrayList();
String query = sql;
Statement stmt = null;
try {
stmt = connection.createStatement();
ResultSet rs = stmt.executeQuery(query);
for (int i = 1; i < rs.getMetaData().getColumnCount(); i++) {
row2.add(rs.getMetaData().getColumnName(i));
}
rows.add(row2);
while (rs.next()) {
List<String> row1 = CollUtil.newArrayList();
for (int i = 1; i < rs.getMetaData().getColumnCount(); i++) {
row1.add(rs.getString(i));
}
rows.add(row1);
}
writer.write(rows, true);
writer.close();
} finally {
if (stmt != null) {
stmt.close();
}
}
}
}
package cn.strivepine;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import cn.hutool.log.Log;
import cn.hutool.log.LogFactory;
import cn.hutool.setting.dialect.Props;
import cn.strivepine.hive.HiveQuery;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigFactory;
import com.typesafe.config.ConfigValue;
import java.io.File;
import java.io.IOException;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class Main {
private static final Log log = LogFactory.get();
public static void main(String[] args) throws InterruptedException {
String conf_file = System.getProperty("CONF_FILE");
Config config = null;
if (StrUtil.isNotBlank(conf_file)) {
File file = new File(conf_file);
config = ConfigFactory.parseFile(file);
} else {
System.out.println("usage: java -DCONF_FILE='' ");
System.exit(0);
}
Properties props = new Properties();
Config sql = config.getConfig("sql");
HiveQuery queryHive = new HiveQuery();
ExecutorService executorService = Executors.newFixedThreadPool(10);
List<Runnable> tasks = new ArrayList<>();
for (Map.Entry<String, ConfigValue> entry : sql.entrySet()) {
String key = entry.getKey();
String value = sql.getString(key);
props.put(key, value);
Runnable task = () -> {
try {
queryHive.query(key, value);
} catch (SQLException e) {
e.printStackTrace();
log.error("SQL ERROR ! Please check key:{},value:{}",key,value);
} catch (IOException e) {
log.error(e);
}
};
tasks.add(task);
}
tasks.forEach(executorService::submit);
executorService.shutdown();
executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
}
}