package com.wacai.stanlee.util;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
/**
* @author yishou
* @date 2021/5/17
* @description 测试类
*/
public class HiveJdbc {
// Hive 0.11.0版本以前
//private static String driverName = "org.apache.hadoop.hive.jdbc.HiveDriver";
// Hive 0.11.0版本以后
private static String driverName = "org.apache.hive.jdbc.HiveDriver";
// Hive 0.11.0版本以前 jdbc:hive
// private static String url = "jdbc:hive2://172.16.48.24:10000/tmp";
// Hive 0.11.0版本以后jdbc:hive2
private static String url = "jdbc:hive2://172.16.48.24:10000/tmp";
public static void main(String[] args) {
try {
Class.forName(driverName);
Connection con = DriverManager.getConnection(url, "qa_conn",
"qa_conn");
Statement stmt = con.createStatement();
// 如果存在了就删除
String tableName = "jdbc_table";
// String sql = "drop table if exists " + tableName;
String sql = "select * from ods.1030leads";
// stmt.execute(sql);
// // 创建表
// sql = "create table"
// + tableName
// + " (key string,value string) row format delimited fields terminated by ',' stored as textfile ";
// stmt.execute(sql);
// //加载数据
// String Path="/home/hive_1.txt";
// sql ="load data local inpath '"+Path+"' into table "+tableName;
// stmt.execute(sql);
// // 查询数据
//sql ="select * from "+tableName;
ResultSet res = stmt.executeQuery(sql);
while(res.next()){
System.out.println(res.getString(1)+"\t"+res.getString(1));
}
} catch (ClassNotFoundException e) {
System.out.println("没有找到驱动类");
e.printStackTrace();
} catch (SQLException e) {
System.out.println("连接Hive的信息有问题");
e.printStackTrace();
}
}
}
JAVA API操作hive
最新推荐文章于 2023-05-08 09:07:45 发布