wget http://labs.mop.com/apache-mirror/hive/hive-0.10.0/hive-0.10.0.tar.gz
mv hive-0.9.0.tar.gz /home/hadoop/file/
chown -R hadoop hive-0.9.0/
chmod 755 -R hive-0.9.0/
cp hive-env.sh.template hive-env.sh
vi hive-env.sh
export HIVE_CONF_DIR=/usr/local/hive-0.9.0/conf
export HADOOP_HOME=/usr/local/hadoop/hadoop-0.23.6
sudo -i
vi /etc/profile
export HIVE_HOME=/usr/local/hive-0.10.0
export HADOOP_HOME=/usr/local/hadoop/hadoop-0.23.6
export HIVE_CONF_DIR=$HIVE_HOME/conf
export HIVE_LIB=$HIVE_HOME/lib
export CLASSPATH=$HIVE_HOME/lib:$JAVA_HOME/lib:$JAVA_HOME/jre/lib:$HADOOP_HOME
export PATH=$HIVE_HOME/bin:$HADOOP_HOME/bin:$JAVA_HOME/bin:/sbin/:/bin:$PATH
su hadoop
source /etc/profile
sudo -i
mv mysql-connector-java-5.1.18-bin.jar /usr/local/hive-0.10.0/lib/
chown hadoop mysql-connector-java-5.1.18-bin.jar
chmod 755 mysql-connector-java-5.1.18-bin.jar
rpm -ivh MySQL-server-community-5.1
rpm -ivh --replacefiles MySQL-client-5.1.7-0.i386.rpm
netstat -anp | grep mysql
cd /usr/bin
mysql
mysql> create user 'hive' identified by 'hive';
mysql> grant all privileges on *.* to 'hive'@'%' with grant option;
mysql> grant all privileges on *.* to 'hive'@'localhost' with grant option;
mysql> flush privileges;
mysql> create database hive;
vi hive-site.xml
<property>
<name>hive.metastore.local</name>
<value>true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionURL</name>
<value>jdbc:mysql://localhost:3306/hive?createDatabaseIfNotExist=true</value>
</property>
<property>
<name>javax.jdo.option.ConnectionDriverName</name>
<value>com.mysql.jdbc.Driver</value>
</property>
<property>
<name>javax.jdo.option.ConnectionUserName</name>
<value>hive</value>
</property>
<property>
<name>javax.jdo.option.ConnectionPassword</name>
<value>hive</value>
</property>
<property>
<name>datanucleus.fixedDatastore</name>
<value>false</value>
</property>
<property>
<name>hive.metastore.warehouse.dir</name>
<value>/uhive/fsdir/warehouse</value>
<description>location of default database for thewarehouse</description>
</property>
使用hive进入命令行,如果出现如下信息:
WARNING:org.apache.hadoop.metrics.jvm.EventCounter is deprecated. Please useorg.apache.hadoop.log.metrics.EventCounter in all the log4j.properties files
请在hive-log4j.properties中修改将log4j.appender.EventCounter的值修改为org.apache.hadoop.log.metrics.EventCounter
create database metaStore;
use metaStore;
create table hwz2(id int, name string);
create table yiqifa_cps_test (cpsId int,ADVERTISER_ID int,ADVERTISER_NAME string,CAMPAIGN_ID int,CAMPAIGN_NAME string,EARNER_ID int,EARNER_NAME string,WEBSITE_ID int,WEBSITE_NAME string,ORDER_NO string,PRODUCT_NAME string,FEED_BACK_TAG string,VALID_AMOUNT int,ACTUAL_MONEY double,CONFIRM_VALID_AMOUNT int,CONFIRM_ACTUAL_MONEY double,SYS_TOTAL_COMMISION double,SYS_EQIFA_COMMISION double,SYS_WEBSITE_COMMISION double,ACTUAL_TOTAL_COMMISION double,ACTUAL_EQIFA_COMMISION double,ACTUAL_WEBSITE_COMMISION double,START_DATE TIMESTAMP,CONFIRM_STATUS string) row format delimited fields terminated by ',' stored as textfile;
load data inpath '/yiqifa/cps/part-m-00000' into table yiqifa_cps_test;
drop table yiqifa_cps;
create table yiqifa_cps (cpsId int,ADVERTISER_ID int,ADVERTISER_NAME string,CAMPAIGN_ID int,CAMPAIGN_NAME string,EARNER_ID int,EARNER_NAME string,WEBSITE_ID int,WEBSITE_NAME string,ORDER_NO string,PRODUCT_NAME string,FEED_BACK_TAG string,VALID_AMOUNT int,ACTUAL_MONEY double,CONFIRM_VALID_AMOUNT int,CONFIRM_ACTUAL_MONEY double,SYS_TOTAL_COMMISION double,SYS_EQIFA_COMMISION double,SYS_WEBSITE_COMMISION double,ACTUAL_TOTAL_COMMISION double,ACTUAL_EQIFA_COMMISION double,ACTUAL_WEBSITE_COMMISION double,START_DATE TIMESTAMP,CONFIRM_STATUS string) stored as sequencefile;
insert overwrite table yiqifa_cps select * from yiqifa_cps_test;