Hive安装配置使用及java api调用

mysql -u root -p root;

建立hive用户,hive数据库,并授权。

create database hive;

grant all on hive.* to hive@'%'  identified by 'hive';

grant all on hive.* to hive@'localhost'  identified by 'hive';

flush privileges; 

 

退出mysql 

exit

 

验证hive用户

mysql -u hive -p hive

show databases;

看到如下反馈信息,则说明创建成功

mysql> show databases;

+--------------------+

| Database          |

+--------------------+

| information_schema |

| hive              |

| test              |

+--------------------+

3 rows in set (0.00 sec)

 

退出mysql

exit

 

三,安装hive

1,解压安装包

cd  ~

tar -zxvf apache-hive-1.0.1-bin.tar.gz

2,建立软连接

ln -s apache-hive-1.0.1-bin hive

3,添加环境变量

vi  .bash_profile

导入下面的环境变量

export HIVE_HOME=/home/Hadoop/software/hive

export PATH=$PATH:$HIVE_HOME/bin

 

使其有效

source .bash_profile

 

4

cphive/conf/hive-default.xml.template hive/conf/hive-site.xml

编辑hive-site.xml修改以下参数:

<property> 

   <name>javax.jdo.option.ConnectionURL </name> 

   <value>jdbc:mysql://localhost:3306/hive </value> 

</property> 

 

<property> 

   <name>javax.jdo.option.ConnectionDriverName </name> 

   <value>com.mysql.jdbc.Driver </value> 

</property>



<property> 

   <name>javax.jdo.option.ConnectionPassword </name> 

   <value>hive </value> 

</property> 

 

<property> 

   <name>hive.hwi.listen.port </name> 

   <value>9999 </value> 

   <description>This is the port the Hive Web Interface will listen on </descript ion> 

</property> 



<property> 

   <name>datanucleus.autoCreateSchema </name> 

   <value>true</value> 

</property> 

 

<property> 

   <name>datanucleus.fixedDatastore </name> 

   <value>false</value> 

</property> 

</property> 



  <property>

    <name>javax.jdo.option.ConnectionUserName</name>

    <value>hive</value>

    <description>Username to use against metastoredatabase</description>

  </property>



  <property>

    <name>hive.exec.local.scratchdir</name>

    <value>/home/hdpsrc/hive/iotmp</value>

    <description>Local scratch space for Hivejobs</description>

  </property>

  <property>

    <name>hive.downloaded.resources.dir</name>

    <value>/home/hdpsrc/hive/iotmp</value>

    <description>Temporary local directory for addedresources in the remote file system.</description>

  </property>

  <property>

    <name>hive.querylog.location</name>

    <value>/home/hdpsrc/hive/iotmp</value>

    <description>Location of Hive run time structured logfile</description>

  </property>

 

 

5,拷贝mysql-connector-java-5.1.6-bin.jar 到hive 的lib下面

mv/home/hdpsrc/Desktop/mysql-connector-java-5.1.6-bin.jar /home/hdpsrc/hive/lib/

 

 

 

 

hive常用命令     

 

#创建新表

 

hive> CREATE TABLE t_hive (a int, b int, c int) ROW FORMAT DELIMITEDFIELDS TERMINATED BY ',';

 

#导入数据t_hive.txtt_hive

 

hive> LOAD DATA LOCAL INPATH '/home/hadoop/software/test/t_hive.txt'OVERWRITE INTO TABLE t_hive;

 

#正则匹配表名

 

hive>show tables '*t*';

 

#增加一个字段

 

hive> ALTER TABLE t_hive ADD COLUMNS (new_col String);

 

#重命令表名

 

hive> ALTER TABLE t_hive RENAME TO t_hadoop;

 

#HDFS加载数据

 

hive> LOAD DATA INPATH '/user/hive/warehouse/t_hive/t_hive.txt'OVERWRITE INTO TABLE t_hive2;

 

#从其他表导入数据

 

hive> INSERT OVERWRITE TABLE t_hive2 SELECT * FROM t_hive ;

 

#创建表并从其他表导入数据

 

hive> CREATE TABLE t_hive AS SELECT * FROM t_hive2 ;

 

#仅复制表结构不导数据

 

hive> CREATE TABLE t_hive3 LIKE t_hive;

 

#通过Hive导出到本地文件系统

 

hive> INSERT OVERWRITE LOCAL DIRECTORY '/tmp/t_hive' SELECT * FROMt_hive;

 

#Hive查询HiveQL

 

from ( select b,c as c2 from t_hive) t select t.b, t.c2 limit 2;

 

select b,c from t_hive limit 2;

 

#创建视图

 

hive> CREATE VIEW v_hive AS SELECT a,b FROM t_hive;

 

#删表

 

drop table if exists t_hft;

 

#创建分区表

 

DROP TABLE IF EXISTS t_hft;

CREATE TABLE t_hft(

SecurityID STRING,

tradeTime STRING,

PreClosePx DOUBLE

) PARTITIONED BY (tradeDate INT)

ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';

 

#导入分区数据

 

hive> load data local inpath '/home/BlueBreeze/data/t_hft_1.csv'overwrite into table t_hft partition(tradeDate=20130627);

 

#查看分区表

 

hive> SHOW PARTITIONS t_hft;

 

 

api调用:

 

nohup hive --service hiveserver2 &

或者bin/hive --service hiveserver -p 10002

 

代表你已经成功的在端口为10002(默认的端口是10000)启动了hiveserver服务。这时候,你就可以通过Java代码来连接hiveserver

问题1:无法启动,因为用的是hive2,应该使用命令 hive –service hiveserver2

问题2Nosuitable driver found for jdbc:hive://192.168.184.169:10000/default ,原因,因为用的版本是hive-1.0.1,url应该改成jdbc:hive2://192.168.184.169:10000/default即可

 

代码如下:

import java.sql.Connection; 

import java.sql.DriverManager; 

import java.sql.ResultSet; 

import java.sql.SQLException; 

import java.sql.Statement; 

 

import org.apache.log4j.Logger; 

 

 /**

  * Hive版本:1.0.1

  *

 * HiveJavaApi

 

 启动hive的远程服务接口命令行执行:bin/hive --service hiveserver2 &

 

 * @author ycblus

*/ 

 public class HiveJdbcClient { 

   static Logger log =Logger.getLogger("HiveJdbcClient");

  

   private static String driverName= "org.apache.hive.jdbc.HiveDriver"; 

   private static String url ="jdbc:hive2://192.168.184.158:10000/default"; 

   private static String user ="hadoop";  //主机的用户名

   private static String password ="hadoop";  //远程主机密码

   private static String sql =""; 

   private static ResultSetres; 

 

   public static void main(String[]args) { 

      

       Connection conn = null; 

       Statement stmt = null; 

       try { 

           conn = getConn(); 

           stmt =conn.createStatement(); 

 

           String tableName ="t_hive";

           // 第一步:存在就先删除 

           dropTable(stmt); 

 

           // 第二步:不存在就创建 

           createTable(stmt,tableName); 

 

           // 第三步:查看创建的表 

           showTables(stmt,tableName); 

 

           // 执行describe table操作 

           describeTables(stmt,tableName); 

 

           // 执行load data into table操作 

           loadData(stmt,tableName); 

 

           // 执行 select * query 操作 

           //selectData(stmt,tableName); 

 

           // 执行 regular hive query 统计操作 

           countData(stmt,tableName);

 

       } catch(ClassNotFoundException e) { 

           e.printStackTrace(); 

           log.error(driverName +" not found!", e); 

           System.exit(1); 

       } catch (SQLException e){ 

           e.printStackTrace(); 

           log.error("Connectionerror!", e); 

           System.exit(1); 

       } finally { 

           try {

               if(res != null){

                   res.close();

               }

              

               if (stmt != null){ 

                   stmt.close(); 

               }

              

               if (conn != null){ 

                  conn.close();  

               }

           } catch (SQLException e){ 

               e.printStackTrace();

           

       

   }

   private static voidcountData(Statement stmt, String tableName) 

           throws SQLException{ 

       sql = "select count(1)from " + tableName; 

       log.info("Running:"+ sql); 

       res =stmt.executeQuery(sql); 

       log.info("执行“regular hive query”运行结果:"); 

       while (res.next()) { 

           log.info("count------>" + res.getString(1)); 

       

   

 

   private static voidselectData(Statement stmt, String tableName) 

           throws SQLException{ 

       sql = "select * from" + tableName; 

       log.info("Running:"+ sql); 

       res =stmt.executeQuery(sql); 

       log.info("执行 select * query 运行结果:"); 

       while (res.next()) { 

           log.info(res.getString(1)+ "\t" + res.getString(2)); 

       

   

 

    private static void loadData(Statement stmt,String tableName) 

            throws SQLException{ 

        String filepath ="/home/hadoop/software/test/t_hive.txt"; 

        sql = "load data localinpath '" + filepath + "' into table " 

                + tableName; 

       log.info("Running:" + sql); 

        stmt.execute(sql); 

    

 

    private static voiddescribeTables(Statement stmt, String tableName) 

            throws SQLException{ 

        sql = "describe "+ tableName; 

       log.info("Running:" + sql); 

        res =stmt.executeQuery(sql); 

        log.info("执行 describe table 运行结果:"); 

        while (res.next()) { 

           log.info(res.getString(1) + "\t" + res.getString(2)); 

        

    

 

    private static voidshowTables(Statement stmt, String tableName) 

            throws SQLException{ 

        sql = "show tables'" + tableName + "'"; 

       log.info("Running:" + sql); 

        res =stmt.executeQuery(sql); 

        log.info("执行 show tables 运行结果:"); 

        if (res.next()) { 

           log.info(res.getString(1)); 

        

    

 

    private static voidcreateTable(Statement stmt, String tableName) 

            throws SQLException{ 

        log.info("执行 create tables:"+tableName); 

        sql = "create table" 

                + tableName 

                + " (a string,b string,c string)  row format delimitedfields terminated by '+'"; 

        stmt.execute(sql); 

    

 

    private static StringdropTable(Statement stmt) throws SQLException { 

        // 创建的表名 

        String tableName ="t_hive"; 

        sql = "drop table" + tableName; 

        stmt.execute(sql); 

        return tableName; 

    

 

    private static ConnectiongetConn() throws ClassNotFoundException, 

            SQLException { 

       Class.forName(driverName); 

        Connection conn =DriverManager.getConnection(url, user, password); 

        return conn; 

    

 

 

用到的jar包如下:



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值