1、pom依赖
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-exec</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-metastore</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-cli</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-common</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-service</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-shims</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.hive.hcatalog</groupId>
<artifactId>hive-hcatalog-core</artifactId>
<version>${hive.version}</version>
</dependency>
<dependency>
<groupId>org.apache.thrift</groupId>
<artifactId>libfb303</artifactId>
<version>0.9.3</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>5.1.47</version>
</dependency>
2、代码编写
public static void main(String[] args) throws Exception {
Class.forName("org.apache.hive.jdbc.HiveDriver");
Connection con = DriverManager.getConnection("jdbc:hive2://IP:10000/default", "hive", "hive");
Statement st = con.createStatement();
//查询hive表数据
ResultSet rs = st.executeQuery("SELECT * from ods.dept");
while (rs.next()){
System.out.println(rs.getString(1) + "," + rs.getString(2));
}
//查询hive表结构---为了保证字段顺序
Map<String, String> allColumnsAndType = new LinkedHashMap<>();
rs = con.createStatement().executeQuery("describe " + "src_config_cs.src_paramter");
if (rs != null) {
while (rs.next()) {
String col_name = rs.getString("col_name");
if (!StringUtils.isBlank(col_name)) {
allColumnsAndType.put(rs.getObject("col_name").toString(), rs.getObject("data_type").toString());
} else break;
System.out.println(rs.getString("col_name") + "\t" + rs.getString("data_type"));
}
}
//查询hive表结构
HiveConf hiveConf = new HiveConf();
hiveConf.set("hive.metastore.uris","thrift://IP:9083");
HiveMetaStoreClient hiveMetaStoreClient = new HiveMetaStoreClient(hiveConf);
//database默认为“default”
Database database= hiveMetaStoreClient.getDatabase("src");
//获取database下的所有table
List<String> tablesList = hiveMetaStoreClient.getAllTables("ods");
for(String table:tablesList)
{
System.out.println(table);
}
// 获取 Hive 表的分区字段
Table table = hiveMetaStoreClient.getTable("ods", "ods_merchant");
List<FieldSchema> schemas = table.getSd().getCols();
if(table.isSetPartitionKeys()){
List<FieldSchema> partitionKeys = table.getPartitionKeys();
for (FieldSchema key : partitionKeys) {
key.setComment("partition key");
}
schemas.addAll(partitionKeys);
for (FieldSchema key : partitionKeys) {
System.out.println(key);
}
}
hiveMetaStoreClient.close();
rs.close();
st.close();
con.close();
}