package com.qjzh.bigdata.api.hadoop.hive.dao.imp;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import com.qjzh.bigdata.api.hadoop.hive.dao.HiveDaoI;
import com.qjzh.bigdata.api.utils.PropertiesUtils;
public class HiveDaoService implements HiveDaoI {
private static final Logger log = Logger.getLogger(HiveDaoService.class);
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String hdfs_url = null;
private static String mainDir = null;
private static String hive_url = null;
private static String us = null;
private static String pas = null;
private Connection conn = null;
private Statement pstmt = null;
static {
try {
hdfs_url = PropertiesUtils.getProValue("hdfs_url");
mainDir = PropertiesUtils.getProValue("mainDir");
hive_url = PropertiesUtils.getProValue("hive2.jdbc.url");
us = PropertiesUtils.getProValue("hive2_user");
pas = PropertiesUtils.getProValue("hive2_pass");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* 根据所需要的属性创建表
*
* @param tableName
* 表名
* @param columnMap
* 列名<列名:列类型>
* @param initFilePath
* 初始路径
* @param isReplace
* 是否覆盖老表
* @param isJson
* 是否指定JSON格式
* @return
* @throws Exception
*/
public boolean createTable(String tableName, Map<String, String> columnMap, String initFilePath, boolean isReplace,
boolean isJson) throws Exception {
boolean isRun = false;
this.openHiveJdbc();
if (isReplace) {
pstmt.execute("drop table if exists " + tableName);
}
StringBuffer sqlSb = new StringBuffer("create EXTERNAL table " + tableName + " (");
for (Entry<String, String> entry : columnMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
sqlSb.append(" " + key + " " + value + ",");
}
// sqlSb.subSequence(0, sqlSb.length() - 1);
sqlSb.deleteCharAt(sqlSb.length() - 1);
sqlSb.append(" ) ");
if (isJson) {
sqlSb.append(" Row Format Serde 'org.openx.data.jsonserde.JsonSerDe' ");
} else {
sqlSb.append(" stored as textfie ");
}
if (initFilePath != null) {
sqlSb.append(" location '" + hdfs_url + mainDir + initFilePath + "'");
}
System.out.println("table:" + sqlSb.toString());
pstmt.execute(sqlSb.toString());
this.finish(null);
isRun = true;
return isRun;
}
/**
* 创建表
*/
public boolean createTable(String tableName, String sql, boolean isReplace) throws Exception {
boolean isRun = false;
this.openHiveJdbc();
if (isReplace) {
pstmt.execute("drop table if exists " + tableName);
}
pstmt.execute(sql);
this.finish(null);
isRun = true;
return isRun;
}
public void dropTable(String tableName) throws Exception {
this.openHiveJdbc();
pstmt.execute("drop table if exists " + tableName);
this.finish(null);
}
public void dropTable(List<String> tableList) throws Exception {
this.openHiveJdbc();
for (String tableName : tableList) {
pstmt.execute("drop table if exists " + tableName);
}
this.finish(null);
}
public String find(String sql) throws Exception {
String resultStr = null;
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
while (res.next()) {
// res.getcl
resultStr = res.getString(1);
}
this.finish(res);
return resultStr;
}
public List<String> findStringList(String sql) throws Exception {
List<String> resultStr = new ArrayList<String>();
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
while (res.next()) {
resultStr.add(res.getString(1));
}
this.finish(res);
return resultStr;
}
public String showTable(String tableName) throws Exception {
String result = null;
String sql = "show tables '" + tableName + "'";
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
if (res.next()) {
result = res.getString(1);
}
this.finish(res);
return result;
}
public Map<String, String> describeTable(String tableName) throws Exception {
Map<String, String> resultMap = new HashMap<String, String>();
String sql = "describe " + tableName;
ResultSet res = pstmt.executeQuery(sql);
this.openHiveJdbc();
while (res.next()) {
String propertyname = res.getString(1);
String type = res.getString(2);
resultMap.put(propertyname, type);
}
this.finish(res);
return resultMap;
}
/**
* 查询单条记录
*
* @param sql
* @param params
* @return
* @throws Exception
*/
public Map<String, Object> findSimpleResult(String sql) throws Exception {
this.openHiveJdbc();
Map<String, Object> map = new HashMap<String, Object>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);// 返回查询结果
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
// if (cols_value == null) {
// cols_value = "";
// }
map.put(cols[i], cols_value);
}
}
this.finish(null);
return map;
}
/**
* 查询多条记录
*
* @param sql
* @param params
* @return
* @throws SQLException
*/
public List<Map<String, Object>> findModeResult(String sql) throws Exception {
this.openHiveJdbc();
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
Map<String, Object> map = new HashMap<String, Object>();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
System.out.println(cols_name + "->" + cols_value);
// if (cols_value == null) {
// cols_value = "";
// }
map.put(cols[i], cols_value);
}
list.add(map);
}
this.finish(null);
return list;
}
/**
* 通过反射机制查询单条记录
*
* @param sql
* @param params
* @param cls
* @return
* @throws Exception
*/
public <T> T findSimpleRefResult(String sql, Class<T> cls) throws Exception {
this.openHiveJdbc();
T resultObject = null;
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
// 通过反射机制创建一个实例
resultObject = cls.newInstance();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
// if (cols_value == null) {
// cols_value = "";
// }
Field field = cls.getDeclaredField(cols[i]);
field.setAccessible(true); // 打开javabean的访问权限
field.set(resultObject, cols_value);
}
}
this.finish(null);
return resultObject;
}
/**
* 通过反射机制查询多条记录
*
* @param sql
* @param params
* @param cls
* @return
* @throws Exception
*/
public <T> List<T> findMoreRefResult(String sql, Class<T> cls) throws Exception {
this.openHiveJdbc();
List<T> list = new ArrayList<T>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
// 通过反射机制创建一个实例
T resultObject = cls.newInstance();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
if (cols_value == null) {
cols_value = "";
}
Field field = cls.getDeclaredField(cols[i]);
field.setAccessible(true); // 打开javabean的访问权限
field.set(resultObject, cols_value);
}
list.add(resultObject);
}
this.finish(null);
return list;
}
private void openHiveJdbc() throws Exception {
log.info("HiveDaoService.openHiveJdbc start");
log.info("driverName = "+ driverName);
Class.forName(driverName);
log.info("hive_url = "+ hive_url);
log.info("us = "+ us);
log.info("pas = "+ pas);
conn = DriverManager.getConnection(hive_url,us,pas);
log.info("conn = "+ conn);
pstmt = conn.createStatement();
log.info("pstmt = "+ pstmt);
log.info("HiveDaoService.openHiveJdbc end");
}
private void finish(ResultSet res) throws Exception {
log.info("HiveDaoService.finish start");
if (res != null) {
res.close();
}
if (pstmt != null) {
pstmt.close();
}
if (conn != null) {
conn.close();
}
log.info("HiveDaoService.finish end");
}
public static void main(String[] args) throws Exception {
HiveDaoService a = new HiveDaoService();
// a.createTable("json_nested_test", "", true);
a.openHiveJdbc();
// String sql = "select * from json_nested_test where create_date is not null and country like '%中%' limit 20";
String sql = "select * from t_macc_macc_20160101";
List<Map<String, Object>> rs = a.findModeResult(sql);
System.out.println(rs);
}
}
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import com.qjzh.bigdata.api.hadoop.hive.dao.HiveDaoI;
import com.qjzh.bigdata.api.utils.PropertiesUtils;
public class HiveDaoService implements HiveDaoI {
private static final Logger log = Logger.getLogger(HiveDaoService.class);
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
private static String hdfs_url = null;
private static String mainDir = null;
private static String hive_url = null;
private static String us = null;
private static String pas = null;
private Connection conn = null;
private Statement pstmt = null;
static {
try {
hdfs_url = PropertiesUtils.getProValue("hdfs_url");
mainDir = PropertiesUtils.getProValue("mainDir");
hive_url = PropertiesUtils.getProValue("hive2.jdbc.url");
us = PropertiesUtils.getProValue("hive2_user");
pas = PropertiesUtils.getProValue("hive2_pass");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* 根据所需要的属性创建表
*
* @param tableName
* 表名
* @param columnMap
* 列名<列名:列类型>
* @param initFilePath
* 初始路径
* @param isReplace
* 是否覆盖老表
* @param isJson
* 是否指定JSON格式
* @return
* @throws Exception
*/
public boolean createTable(String tableName, Map<String, String> columnMap, String initFilePath, boolean isReplace,
boolean isJson) throws Exception {
boolean isRun = false;
this.openHiveJdbc();
if (isReplace) {
pstmt.execute("drop table if exists " + tableName);
}
StringBuffer sqlSb = new StringBuffer("create EXTERNAL table " + tableName + " (");
for (Entry<String, String> entry : columnMap.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
sqlSb.append(" " + key + " " + value + ",");
}
// sqlSb.subSequence(0, sqlSb.length() - 1);
sqlSb.deleteCharAt(sqlSb.length() - 1);
sqlSb.append(" ) ");
if (isJson) {
sqlSb.append(" Row Format Serde 'org.openx.data.jsonserde.JsonSerDe' ");
} else {
sqlSb.append(" stored as textfie ");
}
if (initFilePath != null) {
sqlSb.append(" location '" + hdfs_url + mainDir + initFilePath + "'");
}
System.out.println("table:" + sqlSb.toString());
pstmt.execute(sqlSb.toString());
this.finish(null);
isRun = true;
return isRun;
}
/**
* 创建表
*/
public boolean createTable(String tableName, String sql, boolean isReplace) throws Exception {
boolean isRun = false;
this.openHiveJdbc();
if (isReplace) {
pstmt.execute("drop table if exists " + tableName);
}
pstmt.execute(sql);
this.finish(null);
isRun = true;
return isRun;
}
public void dropTable(String tableName) throws Exception {
this.openHiveJdbc();
pstmt.execute("drop table if exists " + tableName);
this.finish(null);
}
public void dropTable(List<String> tableList) throws Exception {
this.openHiveJdbc();
for (String tableName : tableList) {
pstmt.execute("drop table if exists " + tableName);
}
this.finish(null);
}
public String find(String sql) throws Exception {
String resultStr = null;
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
while (res.next()) {
// res.getcl
resultStr = res.getString(1);
}
this.finish(res);
return resultStr;
}
public List<String> findStringList(String sql) throws Exception {
List<String> resultStr = new ArrayList<String>();
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
while (res.next()) {
resultStr.add(res.getString(1));
}
this.finish(res);
return resultStr;
}
public String showTable(String tableName) throws Exception {
String result = null;
String sql = "show tables '" + tableName + "'";
this.openHiveJdbc();
ResultSet res = pstmt.executeQuery(sql);
if (res.next()) {
result = res.getString(1);
}
this.finish(res);
return result;
}
public Map<String, String> describeTable(String tableName) throws Exception {
Map<String, String> resultMap = new HashMap<String, String>();
String sql = "describe " + tableName;
ResultSet res = pstmt.executeQuery(sql);
this.openHiveJdbc();
while (res.next()) {
String propertyname = res.getString(1);
String type = res.getString(2);
resultMap.put(propertyname, type);
}
this.finish(res);
return resultMap;
}
/**
* 查询单条记录
*
* @param sql
* @param params
* @return
* @throws Exception
*/
public Map<String, Object> findSimpleResult(String sql) throws Exception {
this.openHiveJdbc();
Map<String, Object> map = new HashMap<String, Object>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);// 返回查询结果
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
// if (cols_value == null) {
// cols_value = "";
// }
map.put(cols[i], cols_value);
}
}
this.finish(null);
return map;
}
/**
* 查询多条记录
*
* @param sql
* @param params
* @return
* @throws SQLException
*/
public List<Map<String, Object>> findModeResult(String sql) throws Exception {
this.openHiveJdbc();
List<Map<String, Object>> list = new ArrayList<Map<String, Object>>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
Map<String, Object> map = new HashMap<String, Object>();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
System.out.println(cols_name + "->" + cols_value);
// if (cols_value == null) {
// cols_value = "";
// }
map.put(cols[i], cols_value);
}
list.add(map);
}
this.finish(null);
return list;
}
/**
* 通过反射机制查询单条记录
*
* @param sql
* @param params
* @param cls
* @return
* @throws Exception
*/
public <T> T findSimpleRefResult(String sql, Class<T> cls) throws Exception {
this.openHiveJdbc();
T resultObject = null;
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
// 通过反射机制创建一个实例
resultObject = cls.newInstance();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
// if (cols_value == null) {
// cols_value = "";
// }
Field field = cls.getDeclaredField(cols[i]);
field.setAccessible(true); // 打开javabean的访问权限
field.set(resultObject, cols_value);
}
}
this.finish(null);
return resultObject;
}
/**
* 通过反射机制查询多条记录
*
* @param sql
* @param params
* @param cls
* @return
* @throws Exception
*/
public <T> List<T> findMoreRefResult(String sql, Class<T> cls) throws Exception {
this.openHiveJdbc();
List<T> list = new ArrayList<T>();
int index = 1;
ResultSet resultSet = pstmt.executeQuery(sql);
ResultSetMetaData metaData = resultSet.getMetaData();
int col_len = metaData.getColumnCount();
String[] cols = new String[col_len];
for (int i = 0; i < col_len; i++) {
String cn = metaData.getColumnName(i + 1);
cols[i] = cn.substring(cn.lastIndexOf(".") + 1, cn.length());
}
while (resultSet.next()) {
// 通过反射机制创建一个实例
T resultObject = cls.newInstance();
for (int i = 0; i < col_len; i++) {
String cols_name = metaData.getColumnName(i + 1);
Object cols_value = resultSet.getObject(cols_name);
if (cols_value == null) {
cols_value = "";
}
Field field = cls.getDeclaredField(cols[i]);
field.setAccessible(true); // 打开javabean的访问权限
field.set(resultObject, cols_value);
}
list.add(resultObject);
}
this.finish(null);
return list;
}
private void openHiveJdbc() throws Exception {
log.info("HiveDaoService.openHiveJdbc start");
log.info("driverName = "+ driverName);
Class.forName(driverName);
log.info("hive_url = "+ hive_url);
log.info("us = "+ us);
log.info("pas = "+ pas);
conn = DriverManager.getConnection(hive_url,us,pas);
log.info("conn = "+ conn);
pstmt = conn.createStatement();
log.info("pstmt = "+ pstmt);
log.info("HiveDaoService.openHiveJdbc end");
}
private void finish(ResultSet res) throws Exception {
log.info("HiveDaoService.finish start");
if (res != null) {
res.close();
}
if (pstmt != null) {
pstmt.close();
}
if (conn != null) {
conn.close();
}
log.info("HiveDaoService.finish end");
}
public static void main(String[] args) throws Exception {
HiveDaoService a = new HiveDaoService();
// a.createTable("json_nested_test", "", true);
a.openHiveJdbc();
// String sql = "select * from json_nested_test where create_date is not null and country like '%中%' limit 20";
String sql = "select * from t_macc_macc_20160101";
List<Map<String, Object>> rs = a.findModeResult(sql);
System.out.println(rs);
}
}