Hadoop-Java操作Hive 2020-01-05

目录

Hadoop_Hive

1.Java操作Hive

代码

打印结果

2. hive笔记


Hadoop_Hive

1.Java操作Hive

代码

package com.lius.hive.javaOperation;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;

import org.apache.curator.RetryPolicy;
import org.apache.log4j.Logger;

/**
 * hive-Client
 * @author Administrator
 *
 */
public class hiveJdbcCli {

	private static String driverName = "org.apache.hive.jdbc.HiveDriver";
	private static String url = "jdbc:hive2://hadoop1:10000";
	private static String user = "root";
	private static String password ="123456";
	private static String sql = "";
	private static Connection conn;
	private static Statement stmt;
	private static ResultSet rs;
//	private static final Logger log = Logger.getLogger(hiveJdbcCli.class);
	private static String tableName = "t_hive3";
	public static void main(String[] args) throws SQLException {
		
		try {
			conn = getConn();
			//第一步.存在就先删除表
			dropTable(conn);
			//第二步.不存在就创建
			createTable(conn,tableName);
			//第三步.查看创建的表
			showTables(conn,tableName);
			//第四步.执行describe table操作
			describeTables(conn,tableName);
			//第五步.执行load data into table操作
//			loadData(conn,tableName,"/usr/lius/hive/nums/20200105/nums.txt");
			loadLocalData(conn,tableName,"/root/nums.txt");
			//第六步.执行select * query操作
			selectData(conn,tableName);
			//第七步.执行regular hive query统计操作
			countData(conn,tableName);
		}catch (Exception e) {
			// TODO: handle exception
			e.printStackTrace();
		}finally {
			if(conn!=null) {
				conn.close();
			}
			
		}
	
	}
	/**
	 * 第七步.执行regular hive query统计操作
	 * @param conn
	 * @param tableName
	 * @throws SQLException
	 */
private static void countData(Connection conn, String tableName) throws SQLException {
		// TODO Auto-generated method stub
		sql = String.format("select count(1) from %s  ", tableName);
		rs = executeQuery();
		printResultSet(rs);
}

/**
 * 第六步.执行select * query操作
 * @param conn
 * @param tableName
 * @throws SQLException
 */
private static void selectData(Connection conn, String tableName) throws SQLException {
		// TODO Auto-generated method stub
		sql = String.format("select * from %s", tableName);
		rs = executeQuery();
		printResultSet(rs);
}

/**
 * 第五步.执行load data into table操作
 * @param conn
 * @param tableName
 * @param hdfPath
 * @throws SQLException
 */
private static void loadData(Connection conn, String tableName,String hdfPath) throws SQLException {
		// TODO Auto-generated method stub
		sql = String.format("load data  inpath '%s' into table %s",hdfPath,tableName);
		execute();	
}

/**
 * 第五步.执行load data into table操作
 * @param conn
 * @param tableName
 * @param localPath
 * @throws SQLException
 */
private static void loadLocalData(Connection conn, String tableName,String localPath) throws SQLException {
	// TODO Auto-generated method stub
//	sql = String.format("load data  inpath '%s' into table %s",hdfPath,tableName);
	sql = String.format("load data local inpath '%s' into table %s",localPath,tableName);
	execute();
}


/**
 * 第四步.执行describe table操作
 * @param conn
 * @param tableName
 * @throws SQLException
 */
private static void describeTables(Connection conn, String tableName) throws SQLException {
		// TODO Auto-generated method stub
		sql = String.format("describe  %s", tableName);
		rs = executeQuery();
		printResultSet(rs);
}

/**
 * 第三步.查看创建的表
 * @param conn
 * @param tableName
 * @throws SQLException
 */
private static void showTables(Connection conn, String tableName) throws SQLException {
		// TODO Auto-generated method stub
		sql = String.format("show tables '%s'", tableName);
		rs = executeQuery();
		printResultSet(rs);	
}

/**
 * 第二步.不存在就创建
 * @param conn
 * @param tableName
 * @throws SQLException
 */
private static void createTable(Connection conn, String tableName) throws SQLException {
		// TODO Auto-generated method stub
	sql = String.format("create table %s(a int,b int,c int) row format delimited fields TERMINATED by '.'", tableName);
	execute();
}

/**
 * 第一步.存在就先删除表
 * @param conn
 * @return
 * @throws SQLException
 */
private static String dropTable(Connection conn) throws SQLException {
		// TODO Auto-generated method stub
	sql = String.format("drop table if exists %s", "t_hive3");
	execute();
	return tableName;
	
}


/**
 * 执行SQL不返回结果
 * @throws SQLException
 */
private static void execute() throws SQLException {
	stmt = conn.createStatement();
	printSQL();
	stmt.execute(sql);
	stmt.close();
}
/**
 * 执行SQL返回结果
 * @return
 * @throws SQLException
 */
private static ResultSet executeQuery() throws SQLException {
	stmt = conn.createStatement();
	printSQL();
	rs = stmt.executeQuery(sql);
	return rs;
}

/**
 * 打印返回结果
 * @param rs
 * @throws SQLException
 */
private static void printResultSet(ResultSet rs) throws SQLException {
	
	ResultSetMetaData md = rs.getMetaData();
	int counts = md.getColumnCount();
	int x =1;
	while(rs.next()){
		StringBuffer sbf = new StringBuffer();
		for(int i =1;i<=counts;i++) {
			sbf.append(" "+rs.getObject(md.getColumnName(i)));
		}
		System.out.println("row"+(x++)+" ==> "+sbf);
	};
	stmt.close();
}

/**
 * 打印SQL
 */
private static void printSQL() {
	System.out.println("Running SQL:"+sql);
}

/**
 * 获取连接
 * @return
 * @throws ClassNotFoundException
 * @throws SQLException
 */
private static Connection getConn() throws ClassNotFoundException, SQLException {
	// TODO Auto-generated method stub
	Class.forName(driverName);
	Connection conn = DriverManager.getConnection(url, user, password);
	return conn;
}
}

打印结果

 SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder".
SLF4J: Defaulting to no-operation (NOP) logger implementation
SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details.
Running SQL:drop table if exists t_hive3
Running SQL:create table t_hive3(a int,b int,c int) row format delimited fields TERMINATED by '.'
Running SQL:show tables 't_hive3'
row1 ==>  t_hive3
Running SQL:describe  t_hive3
row1 ==>  a int 
row2 ==>  b int 
row3 ==>  c int 
Running SQL:load data local inpath '/root/nums.txt' into table t_hive3
Running SQL:select * from t_hive3
row1 ==>  1 2 3
row2 ==>  4 5 6
row3 ==>  7 8 9
Running SQL:select count(1) from t_hive3  
row1 ==>  3

2. hive笔记

--建表语句
create external table 
people
(
id int,
sex string,
name string

partitioned by (logdate string,hour string) 
row format delimited fields terminated by ','

||

create table t_hive(a int,b int,c int) row format delimited fields terminited by '.';


--追加数据
    --hdfs
             alter table people add if  not exists partition (logdate=20200105,hour=00) location '/usr/lius/hive/people/20200105'
            ||
            load data inpath '/usr/lius/hive/nums/20200105/nums.txt' overwrite into table t_hive;    
    --本地导入数据
            load data local inpath '../path' overwrite into table t_hive;
--条件查询
select * from people where logdate = '20200105';


--UDF函数

class xxx extends UDF{
public string evaluate(String name,String sex){...}

add jar xx.jar;
create temporary function xxx as 'com.lius.hive.helloUDF';
select helloworld(people.sex,people.name) from people;


--查询结果保存到本地
insert overwrite local directory '../path'  select * from people;


--正则匹配表名
show tables '*t*';


--增加字段
alter table t_hive add columns(d int);

--获取表结构描述
describe t_hive;

--重命名表名

alter table t_hive t_hive to t_hive1;


--创建表并从其他表导入数据
create table t_hive2 as select * from t_hive;

--复制表结构不导入数据
create table t_hive3 like t_hive;

--从其他表导入数据
insert overwrite table t_hive3 select * from t_hive2;

--hive查询HiveQL
from (select b,c as c2 from t_hive3) t select t.b,t.c2 limit 2;
select * from t_hive3 limit 2;

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值