1.下载hive
http://36.250.74.55/ws.cdn.baidupcs.com/file/10e3283a3c9368b097f5a7f3c19d9a73?bkt=p2-nj-3&xcode=355529f5d8d02bac3cd174e84dc03e4b685eba8f91f4baabae97ca166f54709c&fid=487907638-250528-513621830806426&time=1455685690&sign=FDTAXGERLBH-DCb740ccc5511e5e8fedcff06b081203-xYDMahORtx%2B5UuU5B%2BY9r2iyQTs%3D&to=cb&fm=Qin,B,U,nc&sta_dx=26&sta_cs=82&sta_ft=rar&sta_ct=7&fm2=Qingdao,B,U,nc&newver=1&newfm=1&secfm=1&flow_ver=3&pkey=1400ee94d933410626fc42108c195d6f2e72b89b1572000001981524&sl=68878414&expires=8h&rt=sh&r=357567461&mlogid=1102756994755323218&vuk=2451012801&vbdid=3650451138&fin=Hive.rar&fn=Hive.rar&slt=pm&uta=0&rtype=1&iv=0&isw=0&dp-logid=1102756994755323218&dp-callid=0.1.1&wshc_tag=0&wsts_tag=56c4003a&wsid_tag=6fca70c2&wsiphost=ipdbm
2.解压
3.修改配置文件
1)cp hive-default.xml.template hive-site.xml
2) 修改hive-site.xml
<property>
<name>javax.jdo.option.ConnectionURL</name> <value>jdbc:mysql://localhost:3306/hive?createData baseIfNotExist=true</value>
<description>JDBC connect string for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
<description>Driver class name for a JDBC metastore</description>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
<description>username to use against metastore database</description>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive</value>
<description>password to use against metastore database</description>
</property>
5.复制最新/jline-2.12.jar到hadoop目录下
cp /usr/local/hive/lib/jline-2.12.jar /usr/local/hadoop/share/hadoop/yarn/lib/
6.添加mysql用户
create user 'hive'@'%' identified by 'hive';
grant all on *.* to 'hive'@'%';
7.配置环境变量
export HIVE_HOME=/usr/local/hive
export PATH=$PATH:/usr/local/hive/bin
8.启动hive
hive
备注:
1.jdbc开发:
1).启动hiveServer
hive --service hiveserver2 --hiveconf hive.server2.thrift.port=10000
2).pom.xml中添加hive-jdbc-1.2.1.jar
<dependency>
<groupId>org.apache.hive</groupId>
<artifactId>hive-jdbc</artifactId>
<version>1.2.1</version>
</dependency>
3).代码
package hive;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
public class HiveJDBCTest {
public static final String URL = "jdbc:hive2://192.168.126.129:10000/default";
private static final String DRIVER = "org.apache.hive.jdbc.HiveDriver";
private static final String PASSWORD = "hive";
private static final String USER = "hive";
public static void main(String[] args) {
try {
Class.forName(DRIVER);
Connection conn = DriverManager.getConnection(URL,USER,PASSWORD);
Statement st = conn.createStatement();
// create table
String sql = "create external table if not exists hive_test(name string,id int) partitioned by (ds string)";
st.execute(sql);
// show tables
sql = "show tables";
ResultSet rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1));
}
// describe table
sql = "desc hive_test";
rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1));
}
// load hdfs data into table
sql ="alter table hive_test drop if exists partition (datetime='16-02-17-13')";
st.execute(sql);
sql = "load data inpath '/user/hadoop/flume/16-02-17-13/*' overwrite into table hive_test partition (datetime='16-02-17-13')";
st.execute(sql);
// load data local inpath into table
sql = "load data local inpath '/home/watch.log' overwrite into table hive_test partition (datetime='16-02-17-16')";
st.execute(sql);
// select query
sql = "select * from hive_test";
rs = st.executeQuery(sql);
while(rs.next()){
System.out.println(rs.getString(1));
}
// drop table
sql = "drop table if exists hive_test ";
st.execute(sql);
} catch (SQLException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}