package org.apache.hadoop.hive.client;
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
import org.apache.hadoop.hive.client.util.ReadFileUtil;
/**
*
* 工具类 负责和hive server进行 交互 执行sql==》mapreduce
*
*启动metastore服务hive --service metastore
*[hduser@master conf]$ netstat -ano | grep 7777
*tcp 0 0 0.0.0.0:7777 0.0.0.0:* LISTEN off (0.00/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:42259 ESTABLISHED keepalive (1691.89/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:38764 ESTABLISHED keepalive (7049.14/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:38763 ESTABLISHED keepalive (6876.57/0/0)
*tcp 0 0 10.1.21.77:42259 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*tcp 0 0 10.1.21.77:38763 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*tcp 0 0 10.1.21.77:38764 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*
*启动提供jdbc thrist等外部服务server hive server hive --service hiveserver 默认监听端口10000
*[hduser@master conf]$ netstat -ano | grep 10000
*tcp 0 0 0.0.0.0:10000 0.0.0.0:* LISTEN off (0.00/0/0)
*[hduser@master conf]$
*
*/
public class HiveJdbcClient {
// private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
*/
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
// Connection con = DriverManager.getConnection("jdbc:hive://10.1.253.84:10000/default", "", "");
Connection con =null;
try{
con = DriverManager.getConnection("jdbc:hive2://10.1.253.45:10000/default", "", "");
}catch(Exception e){
e.printStackTrace();
}
String sql;
Statement stmt = con.createStatement();
ResultSet rs= stmt.executeQuery("show tables");
while(rs.next()){
System.out.println(rs.getString(1));
}
/* Statement stmt = con.createStatement();
String tableName = "iap_app_log_002";
//stmt.executeQuery("drop table " + tableName);
ResultSet res = null;//stmt.executeQuery("create table " + tableName + " (key int, value string)");
// show tables
String sql = "show tables";//'" + tableName + "'";
System.out.println("Running a hql: "+ sql);
res = stmt.executeQuery(sql);
while(res.next()){
System.out.println(res.getString(1));
}*/
//sql = "select log_id,domain_id,operate_type_id,operate_type_name from " + tableName + " where domain_id = 'UAP' limit 10 ";
/* sql = "SELECT LOG.*,substr(LOG.OPERATE_TIME, 12, 8) AS OPERATETIME "
+" FROM IAP_APP_LOG_HIVE_00003 LOG "
//+" LEFT SEMI JOIN IAP_AUDIT_RULE_CLIENT_IP_00003 ON (LOG.CLIENT_IP = IAP_AUDIT_RULE_CLIENT_IP_00003.CLIENT_IP) "
//+" LEFT SEMI JOIN IAP_AUDIT_RULE_ORGANIZATION_00003 ON (LOG.PERSON_ORG_ID = IAP_AUDIT_RULE_ORGANIZATION_00003.ORG_ID) "
+" WHERE LOG.MAIN_ACCT_ID = 192772 "
+" AND LOG.AUDIT_LEVEL_ID <= 5 "
+" AND LOG.AUDIT_TYPE_ID = 67 "
+" AND LOG.IS_WORK_DAY = 1 "
+" AND LOG.IS_WORK_TIME = 1 "
+" AND LOG.MAIN_ACCT_STATUS_ID = 0 "
+" AND LOG.PERSON_STATUS_ID = 1 "
+" AND LOG.SUB_ACCT_STATUS_ID = 1 "
+" AND LOG.OPERATE_TYPE_ID in ('1-HABASS-10080', '1-HABASS-10000') "
+" AND LOG.OPERATE_RESULT = 0 "
+" AND LOG.SUB_ACCT_NAME regexp '^396' "
+" AND LOG.OPERATE_CONTENT regexp '2011年集团客户市场考核月报表ABC,' "
+" AND LOG.CREATE_TIME >= unix_timestamp('2011-12-06 00:00:00') "
+" AND LOG.CREATE_TIME <= unix_timestamp('2011-12-06 23:59:59') "
+" AND substr(LOG.OPERATE_TIME, 12, 8) >= '00:00:00' "
+" AND substr(LOG.OPERATE_TIME, 12, 8) <= '15:09:59'";
//+" AND IAP_AUDIT_RULE_CLIENT_IP_00003.RULE_ID = 'd693d320-af18-4fa8-a9c5-b6adf00dcc1b'"
//+" AND IAP_AUDIT_RULE_ORGANIZATION_00003.RULE_ID = 'd693d320-af18-4fa8-a9c5-b6adf00dcc1b'";
*/
//sql = "select * from " + tableName + " where log_id = '6597cb9c-93b1-4447-a48a-39b79f04ddb3' ";
//System.out.println("Running a hql: " + sql);
// sql = ReadFileUtil.readFile("D:\\workspace9.0\\Iap_Task_MapReduce\\src\\com\\hive\\client\\hql2.sql");
//sql = sql.replaceAll("\r", "").replaceAll("\n", "");
// System.out.println(sql);
// stmt.execute(sql);
//res = stmt.executeQuery(sql);
//while(res.next()) {
//System.out.println(res.toString());
//System.out.println(" "+res.getString(1));
//System.out.print("__"+res.getString(2));
//System.out.print("__"+res.getString(3));
//System.out.print("__"+res.getString(4));
//try {
//System.out.print(new String(res.getString(4).getBytes(),"UTF-8"));
//} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
//}
//System.out.println(new String(res.getString(3).getBytes(),"UTF-8"));
/* System.out.println(res.getString(1));
System.out.println(res.getString(2));
System.out.println(res.getString(3));
System.out.println(res.getString(4));
System.out.println(res.getString(5));
System.out.println(res.getString(6));
System.out.println(res.getString(7));
System.out.println(res.getString(8));
System.out.print("__"+res.getString(4));
System.out.print("__"+res.getString(5));
System.out.print("__"+res.getString(6));
System.out.print("__"+res.getString(7));
*/
//}
/*
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// load data into table
// NOTE: filepath has to be local to the hive server
// NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
String filepath = "/tmp/a.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
// select * query
sql = "select * from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
}
// regular hive query
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
*/
}
}
import java.io.UnsupportedEncodingException;
import java.sql.SQLException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.DriverManager;
import org.apache.hadoop.hive.client.util.ReadFileUtil;
/**
*
* 工具类 负责和hive server进行 交互 执行sql==》mapreduce
*
*启动metastore服务hive --service metastore
*[hduser@master conf]$ netstat -ano | grep 7777
*tcp 0 0 0.0.0.0:7777 0.0.0.0:* LISTEN off (0.00/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:42259 ESTABLISHED keepalive (1691.89/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:38764 ESTABLISHED keepalive (7049.14/0/0)
*tcp 0 0 10.1.21.77:7777 10.1.21.77:38763 ESTABLISHED keepalive (6876.57/0/0)
*tcp 0 0 10.1.21.77:42259 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*tcp 0 0 10.1.21.77:38763 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*tcp 0 0 10.1.21.77:38764 10.1.21.77:7777 ESTABLISHED off (0.00/0/0)
*
*启动提供jdbc thrist等外部服务server hive server hive --service hiveserver 默认监听端口10000
*[hduser@master conf]$ netstat -ano | grep 10000
*tcp 0 0 0.0.0.0:10000 0.0.0.0:* LISTEN off (0.00/0/0)
*[hduser@master conf]$
*
*/
public class HiveJdbcClient {
// private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
/**
* @param args
* @throws SQLException
*/
public static void main(String[] args) throws SQLException {
try {
Class.forName(driverName);
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.exit(1);
}
// Connection con = DriverManager.getConnection("jdbc:hive://10.1.253.84:10000/default", "", "");
Connection con =null;
try{
con = DriverManager.getConnection("jdbc:hive2://10.1.253.45:10000/default", "", "");
}catch(Exception e){
e.printStackTrace();
}
String sql;
Statement stmt = con.createStatement();
ResultSet rs= stmt.executeQuery("show tables");
while(rs.next()){
System.out.println(rs.getString(1));
}
/* Statement stmt = con.createStatement();
String tableName = "iap_app_log_002";
//stmt.executeQuery("drop table " + tableName);
ResultSet res = null;//stmt.executeQuery("create table " + tableName + " (key int, value string)");
// show tables
String sql = "show tables";//'" + tableName + "'";
System.out.println("Running a hql: "+ sql);
res = stmt.executeQuery(sql);
while(res.next()){
System.out.println(res.getString(1));
}*/
//sql = "select log_id,domain_id,operate_type_id,operate_type_name from " + tableName + " where domain_id = 'UAP' limit 10 ";
/* sql = "SELECT LOG.*,substr(LOG.OPERATE_TIME, 12, 8) AS OPERATETIME "
+" FROM IAP_APP_LOG_HIVE_00003 LOG "
//+" LEFT SEMI JOIN IAP_AUDIT_RULE_CLIENT_IP_00003 ON (LOG.CLIENT_IP = IAP_AUDIT_RULE_CLIENT_IP_00003.CLIENT_IP) "
//+" LEFT SEMI JOIN IAP_AUDIT_RULE_ORGANIZATION_00003 ON (LOG.PERSON_ORG_ID = IAP_AUDIT_RULE_ORGANIZATION_00003.ORG_ID) "
+" WHERE LOG.MAIN_ACCT_ID = 192772 "
+" AND LOG.AUDIT_LEVEL_ID <= 5 "
+" AND LOG.AUDIT_TYPE_ID = 67 "
+" AND LOG.IS_WORK_DAY = 1 "
+" AND LOG.IS_WORK_TIME = 1 "
+" AND LOG.MAIN_ACCT_STATUS_ID = 0 "
+" AND LOG.PERSON_STATUS_ID = 1 "
+" AND LOG.SUB_ACCT_STATUS_ID = 1 "
+" AND LOG.OPERATE_TYPE_ID in ('1-HABASS-10080', '1-HABASS-10000') "
+" AND LOG.OPERATE_RESULT = 0 "
+" AND LOG.SUB_ACCT_NAME regexp '^396' "
+" AND LOG.OPERATE_CONTENT regexp '2011年集团客户市场考核月报表ABC,' "
+" AND LOG.CREATE_TIME >= unix_timestamp('2011-12-06 00:00:00') "
+" AND LOG.CREATE_TIME <= unix_timestamp('2011-12-06 23:59:59') "
+" AND substr(LOG.OPERATE_TIME, 12, 8) >= '00:00:00' "
+" AND substr(LOG.OPERATE_TIME, 12, 8) <= '15:09:59'";
//+" AND IAP_AUDIT_RULE_CLIENT_IP_00003.RULE_ID = 'd693d320-af18-4fa8-a9c5-b6adf00dcc1b'"
//+" AND IAP_AUDIT_RULE_ORGANIZATION_00003.RULE_ID = 'd693d320-af18-4fa8-a9c5-b6adf00dcc1b'";
*/
//sql = "select * from " + tableName + " where log_id = '6597cb9c-93b1-4447-a48a-39b79f04ddb3' ";
//System.out.println("Running a hql: " + sql);
// sql = ReadFileUtil.readFile("D:\\workspace9.0\\Iap_Task_MapReduce\\src\\com\\hive\\client\\hql2.sql");
//sql = sql.replaceAll("\r", "").replaceAll("\n", "");
// System.out.println(sql);
// stmt.execute(sql);
//res = stmt.executeQuery(sql);
//while(res.next()) {
//System.out.println(res.toString());
//System.out.println(" "+res.getString(1));
//System.out.print("__"+res.getString(2));
//System.out.print("__"+res.getString(3));
//System.out.print("__"+res.getString(4));
//try {
//System.out.print(new String(res.getString(4).getBytes(),"UTF-8"));
//} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
//e.printStackTrace();
//}
//System.out.println(new String(res.getString(3).getBytes(),"UTF-8"));
/* System.out.println(res.getString(1));
System.out.println(res.getString(2));
System.out.println(res.getString(3));
System.out.println(res.getString(4));
System.out.println(res.getString(5));
System.out.println(res.getString(6));
System.out.println(res.getString(7));
System.out.println(res.getString(8));
System.out.print("__"+res.getString(4));
System.out.print("__"+res.getString(5));
System.out.print("__"+res.getString(6));
System.out.print("__"+res.getString(7));
*/
//}
/*
// describe table
sql = "describe " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1) + "\t" + res.getString(2));
}
// load data into table
// NOTE: filepath has to be local to the hive server
// NOTE: /tmp/a.txt is a ctrl-A separated file with two fields per line
String filepath = "/tmp/a.txt";
sql = "load data local inpath '" + filepath + "' into table " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
// select * query
sql = "select * from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(String.valueOf(res.getInt(1)) + "\t" + res.getString(2));
}
// regular hive query
sql = "select count(1) from " + tableName;
System.out.println("Running: " + sql);
res = stmt.executeQuery(sql);
while (res.next()) {
System.out.println(res.getString(1));
}
*/
}
}