直接代码吧:记得要开启hive jdbc服务hive --service hiveserver
package hive;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
public class HiveDemo {
static{
//注册jdbc驱动
try {
Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver");
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static void main(String[] args) throws Exception {
//创建连接
Connection conn = DriverManager.getConnection("jdbc:hive://hadoop:10000/default","","");
//System.out.println(conn);
Statement st = conn.createStatement();
String tableName = "u1_data";
//删除表
st.executeQuery("drop table "+tableName);
//创建表
ResultSet rs = st.executeQuery("create table "+tableName+"("
+ "userid int,"
+ "movieid int,"
+ "rating int,"
+ "city string,"
+ "viewTime string"
+ ")"
+ "row format delimited "
+ "fields terminated by ‘\t‘ "
+ "stored as textfile");
//显示所有的表
String sql = "show tables";
System.out.println("running:"+sql);
rs = st.executeQuery(sql);
if(rs.next()){
System.out.println(rs.getString(1));
}
//得到表信息
sql = "describe "+tableName;
System.out.println("running:"+sql);
rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1)+"\t"+rs.getString(2));
}
//加载数据
String filePath = "hdfs://hadoop:9000/input";
sql = "load data inpath ‘"+filePath+"‘ overwrite into table "+tableName;
System.out.println("running:"+sql);
rs = st.executeQuery(sql);
//查询数据
sql = "select * from "+tableName+" limit 5";
System.out.println("running:"+sql);
rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(3)+"\t"+rs.getString(4));
}
//查询数量
sql = "select count(*) from "+tableName;
System.out.println("running:"+sql);
rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1));
}
//关闭资源
rs.close();
st.close();
conn.close();
}
}
原文:http://www.cnblogs.com/jsunday/p/3872763.html