hive安装 基础学习

修改Linux文件的软件  Nodepad++
1.解压(03)
  tar -zxvf apache-hive-.... -C /master/ 
2.配置
  cd /master/
  chown -R root:root apach-hive...
3.启动cd bin
  cd /master/apache-hive-0.13.0-bin/
  ./hive
  select count(*) from student
  创建表
  create table student (id int, name string);
  show tables;
  show create table student;
  从本地路径下加载数据
  vi student.txt
   1   zhangsan
   2   lisi
   3   wangwu
  load data local inpath '/root/stu.txt' into table student;
  创建表自定义分隔符
  create table teacher (id bigint,name string) row format delimited fields terminated by '\t';
  show create table teacher;
  load data local inpath '/root/teacher.txt' into table teacher;
  select * from teacher;
  降序
  select * from student order by id desc;
  选前两行
  select * from student limit 2;
  求出id的和
  select sum(id) from student;
  创建库
  create database itcast;
  使用库
  use itcast;
  show tables;
  create table user(id int, name string);
  多人使用
  cd /master/apache-hive-0.13.0-bin/
  bin/hive
4.安装Linux的mysql(解决多连接(metastore))
  rpm -qa 
  rpm -qa | grep mysql
  rpm -e mysql-lib-5.1.6.... --nodeps
  rmp -ivh Mysql-server-5.1.73-1.glibc23.i386.rpm
  如果不能安装,强制安装
  rmp -ivh Mysql-server-5.1.73-1.glibc23.i386.rpm --nodeps --force
  rmp -ivh Mysql-client-5.1.73-1.glibc23.i386.rmp --nodeps --force
  /usr/bin/mysql_secure_installation
  回车
  Y
  123
  123
  Y
  n
  n
  y
  mysql -uroot -p123
  show databases;
5.修改hive的配置文件
  hadoop fs -rmr /user/hive
  cd /master/apache-hive-0.13.0-bin/conf/
  1.修改内存
    vi hive-env.sh.template
  2.03上配 05上有MySQL
    mv hive-default.xml.template hive-site.xml
    vi hive-site.xml
       <configuration> 
           <!--  连接MySQL数据库字符串   -->
           <property>
              <name>javax.jdo.option.ConnectionURL</name>
              <value>jdbc:mysql://master:3306/hive?createDatabaseIfNotExist=true</value>
              <description>JDBC connect string for a JDBC metastore</description>
           </property>
           <!--  连接驱动   -->
           <property>
              <name>javax.jdo.option.ConnectionDriverName</name>
              <value>com.mysql.jdbc.Driver</value>
              <description>Driver class name for a JDBC metastore</description>
           </property>
           <!-- 连接用户名    -->
           <property>
              <name>javax.jdo.option.ConnectionUserName</name>
              <value>root</value>
              <description>username to use against metastore</description>
           </property>
           <!-- 连接密码    -->
           <property>
              <name>javax.jdo.option.ConnectionPassword</name>
              <value>123</value>
              <description>password to use against metastore</description>
           </property>
       </configuration>
6.导入MySQL的连接驱动包
   cd /master/apache-hive-0.13.0-bin/bin/ 
  ./hive
  cp mysql-connector-java-5.1.10.jar /master/apache-hive-0.13.0-bin/lib/
  05上
  mysql -uroot -p123
  你想myuser使用mypassword从任何主机连接到mysql服务器的话
  GRANT ALL PRIVILEGES ON *.* TO 'myuser'@'%' IDENTIFIED BY 'mypassword' WITH GRANT OPTION;


FLUSH PRIVILEGES;


  create user 'root'@'localhost' identified by '你的密码';
grant all privileges on *.* to root@'localhost';
  
  如果你想允许用户myuser从ip为192.168.1.6的主机连接到mysql服务器,并使用mypassword作为密码
  GRANT ALL PRIVILEGES ON *.* TO 'myuser'@'192.168.1.3' IDENTIFIED BY 'mypassword' WITH GRANT OPTION;


  FLUSH PRIVILEGES;


  如果你想允许用户myuser从ip为192.168.1.6的主机连接到mysql服务器的dk数据库,并使用mypassword作为密码


  GRANT ALL PRIVILEGES ON dk.* TO 'myuser'@'192.168.1.3' IDENTIFIED BY 'mypassword' WITH GRANT OPTION;


  FLUSH PRIVILEGES;


  我用的第一个方法,刚开始发现不行,在网上查了一下,少执行一个mysql>FLUSH RIVILEGES 使修改生效.就可以了,另外一种方法,不过我没有亲自试过的,
  ./hive
  show tables;

  create table people (id int, name string);


自定义函数
继承 org.apache.hadoop.hive.ql.exec.UDF
添加evaluate方法  
public class NationUDF extends UDF{
  
  public static Map<String, String> nationMap=new HashMap<String,String>();
    static {
    nationMap.put("china", "中国"); 
    nationMap.put("japan", "日本");
    nationMap.put("USA", "美国");
    }
   Text t=new Text(); 
//1000 sum(income)
//中国 getNation (nation)
  public Text evaluate(Text nation){
    String nation_e=nation.toString;
    String name=nationMap.get(nation_e);
    if(name==null){ 
      name="火星人";
    }
    t.set(name); 
  
    return t;  
  }
}
UDF调用过程(hive中)
 1. 加载/注册UDF
    add jar /root/NationUDF.jar;
 2.创建临时函数
    create temporary function getNation(函数名) as 'cn.master.hive.udf.NationUDF'; (对应的类)
 3.调用
    select id, name, size, getNation(nation) as nation_name from beauties order by size desc; 
 4.将查询结果保存到HDFS中
    create table result row format delimited
 5.


*.hive下创建Hadoop文件
  dfs -ls /;
  dfs -mkdir /data;
  dfs -put /root/stu.txt /data/a.txt;
  dfs -put /root/stu.txt /data/b.txt;
*.外部表
  create external table ext_student (id int, name string) row format delimited fields terminated by '\t' location '/data';
  select * from ext_student;
*.分区表
  create external table beauties (id bigint, name string, size double) partitioned by (nation string) row format delimited fields terminated by '\t' location '/beauty';
*.本地数据上传hive的分区表
  load data local inpath '/root/b.a' into table beauties partition (nation='japan');
*.手动上传到hive的表添加到元数据信息
  alter table beauties add partition (nation='japan') location "/beauty/nation=japan";
*.快速查分区表信息
  select * from beauties where nation='china'; 
练习  
 select t.account, u.name, t.income, t.expenses t.surplus from user_info u join ( select account,sum(income) as income, sum(espenses) as expenses, sum(income-expenses as surplus from  trade_detail group by account) t on u.account=t.account)
  ./sqoop import --connect jdbc:mysql://192.168.196.2:3306/master --username root --password 123 --table tarde_detail --hive-import --hive-overwrite --hive-table trade_detail --fields-terminated-by '\t'
   添加环境变量
  :/master/apache-hive-0.13.0-bin/bin
   vi /etc/profile
   添加:  :/master/apache-hive-0.13.0-bin/bin  


  
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值