第一次使用hadoop,记录下工具类,也供大家参考一下。
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Hive的JavaApi
*/
@Component
public class HiveJdbcUtil {
private static final Logger log = LoggerFactory.getLogger(HiveJdbcUtil.class);
private static String DRIVERNAME = org.apache.hive.jdbc.HiveDriver.class.getName();
private static String URL;
private static String USER;
private static String PASSWORD;
private static ResultSet RES;
@Value("${hive-url}")
private void setURL(String url) {
HiveJdbcUtil.URL = url;
}
@Value("${hive-user}")
private void setUSER(String user) {
HiveJdbcUtil.USER = user;
}
@Value("${hive-password}")
private void setPASSWORD(String password) {
HiveJdbcUtil.PASSWORD = password;
}
//call操作,执行脚本
public static boolean call(String sql) throws Exception {
Connection conn = null;
CallableStatement stmt = null;
try {
conn = getConn();
stmt = conn.prepareCall(sql);
return stmt.execute();
} finally {
try {
if (conn != null) {
conn.close();
}
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static void update(String sql) throws Exception {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
log.info("update data,sql:{}", sql);
int i = stmt.executeUpdate(sql);
log.info("update data,result:{}", i);
} finally {
try {
if (conn != null) {
conn.close();
}
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
//插入
public static void insertData(String sql) throws Exception {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
log.info("insert data,sql:{}", sql);
int i = stmt.executeUpdate(sql);
log.info("insert data,result:{}", i);
} finally {
try {
if (conn != null) {
conn.close();
}
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
//查询结果数量
public static Long countData(String sql) throws Exception {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
log.info("Running:" + sql);
RES = stmt.executeQuery(sql);
if (RES.next()) {
return RES.getLong(1);
}
} finally {
try {
if (conn != null) {
conn.close();
}
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
return null;
}
//查询,并封装成需要的类
public static List<ECostDetailDto> selectDetail(String detailSql) throws Exception {
Connection conn = null;
Statement stmt = null;
try {
conn = getConn();
stmt = conn.createStatement();
log.info("select data,sql:{}", detailSql);
RES = stmt.executeQuery(detailSql);
List<ECostDetailDto> list = new ArrayList<ECostDetailDto>();
ECostDetailDto eDto = null;
while (RES.next()) {
eDto = new ECostDetailDto();
eDto.setId(RES.getString(1));
eDto.setGroupid(RES.getString(2));
eDto.setJobid(RES.getString(3));
eDto.setBillingreference(RES.getString(4));
eDto.setCompanycode(RES.getString(5));
eDto.setEccfiscalyear(RES.getString(6));
eDto.setEccdocumentnumber(RES.getString(7));
eDto.setEccpostingdate(RES.getDate(8));
eDto.setEccdocumentitemno(RES.getString(9));
eDto.setProfitcenteramount(RES.getDouble(10));
eDto.setProfitcentercurrency(RES.getString(11));
eDto.setCompanyamount(RES.getDouble(12));
eDto.setCompanycurrency(RES.getString(13));
eDto.setTransactionamount(RES.getDouble(14));
eDto.setTransactioncurrency(RES.getString(15));
eDto.setItemid(RES.getString(16));
list.add(eDto);
}
return list;
} finally {
try {
if (conn != null) {
conn.close();
}
if (stmt != null) {
stmt.close();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
}
public static Connection getConn() throws ClassNotFoundException,
SQLException {
Class.forName(DRIVERNAME);
Connection conn = DriverManager.getConnection(URL, USER, PASSWORD);
return conn;
}
}
注意点:ResultSet中取值是从1开始的,不是0.
sql使用的mysql查询语言,记录两条:
CALL rdl_cecc_bsis_detail_active() //call脚本
SELECT * FROM rdl_cecc_bsis_detail //查询,不要库名,直接查表
配置文件中添加:
hive-url=${HIVE-URL:jdbc:hive2\://111.111.111.111:10000/dataBaseName}
hive-driver-class-name=org.apache.hive.jdbc.HiveDriver
hive-user=${HIVE-USER}
hive-password=${HIVE-PASSWORD}
另外,手动导入inceptor-driver-5.2.2.jar,这个jar包可以在网上搜到,也可以下载我上传的资源。暂时没有找到依赖配置