Hadoop(Day10) -- Eclipse+Hive,DbVisualizer

一.Eclipse + Hive:

HIveJdbcClient.java:

package com.bsr.hive;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

import org.junit.Before;
import org.junit.Test;
 
public class HiveJdbcClient {
    private Connection connection;
    private PreparedStatement ps;
    private ResultSet rs;
    //创建连接
    @Before
    public void getConnection() {
        try {
 
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            connection = DriverManager.getConnection("jdbc:hive2://192.168.16.100:10000/", "root", "root");
            System.out.println(connection);
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
    //关闭连接
    public void close() {
        try {
            if (rs != null) {
                rs.close();
            }
            if (ps != null) {
                ps.close();
            }
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
    
//  // 创建库
//  @Test
//  public void createBase() {
//      String sql = "create database bsr_java_test";
//      try {
//          ps = connection.prepareStatement(sql);
//          ps.execute(sql);
//          close();
//      } catch (SQLException e) {
//          e.printStackTrace();
//      }
//  }
 
//    // 创建表
//    @Test
//    public void createTable() {
//        String sql = "create table bsr_java_test.goods2(id int,name string) row format delimited fields terminated by ' ' ";
//        try {
//            ps = connection.prepareStatement(sql);
//            ps.execute(sql);
//            close();
//        } catch (SQLException e) {
//            e.printStackTrace();
//        }
//    }
    // 删除表
//    @Test
//    public void dropTable() {
//        String sql = "drop table goods2";
//        try {
//            ps = connection.prepareStatement(sql);
//            ps.execute();
//            close();
//        } catch (SQLException e) {
//            e.printStackTrace();
//        }
//    }
//    //添加数据
//    @Test
//    public void insert() throws SQLException{
//    	String sql1 = "use bsr_java_test";
//        String sql = "into load data local inpath '/root/goods.txt' overwrite into table goods2";
//        //记得先在文件系统中上传goods.txt
//        ps = connection.prepareStatement(sql1);
//        ps.execute();
//        ps = connection.prepareStatement(sql);
//        ps.execute();
//        close();
//    }
//    //查询
    @Test
    public void find() throws SQLException {
        String sql = "select * from bsr.bsr";
        ps = connection.prepareStatement(sql);
        rs = ps.executeQuery();
        while (rs.next()) {
            System.out.println(rs.getObject(1) + "---" + rs.getObject(2));
        }
        close();
    }
 
     
}


二.DbVisualizer


1.make sure you have these two jar under DbVisualizer\jdbc\hive


2.connection


*If you connect failed:



三.UDF

1.create a class method in eclipse, and then export as jar.

2.transfer in linux

3.hive>add jar xxx.jar

4.hive> create temporary function xxx as 'com.bsr.bidata.udf.xxx';

5.select xxx(name) from xxx;

Example:

ToLowerCase.java:

package com.bsr.hive;

import java.util.HashMap;

import org.apache.hadoop.hive.ql.exec.UDF;

public class ToLowerCase extends UDF {
	public static HashMap<String, String> provinceMap = new HashMap<String, String>();
	static {
		provinceMap.put("136", "beijing");
		provinceMap.put("137", "shanghai");
		provinceMap.put("138", "shenzhen");
	}

	// 必须是public
	//重载方法
	public String evaluate(String field) {
		String result = field.toLowerCase();
		return result;
	}
  //重载方法
  //截取手机位的前三位
	public String evaluate(int phonenbr) {
		
		String pnb = String.valueOf(phonenbr);
		return provinceMap.get(pnb.substring(0, 3)) == null ? "xxxxxxxx":provinceMap.get(pnb.substring(0,3));

	}

}
ToProvince.java:

package com.bsr.hive;

import java.util.HashMap;

import org.apache.hadoop.hive.ql.exec.UDF;

public class ToProvince extends UDF{
	
	//注意,用代码提示出来的方法定义就是这个样子的
/*	private void evaluate() {
		

	}*/
	
	static HashMap<String, String> provinceMap = new HashMap<String, String>();
	
	static{
		
		provinceMap.put("136", "beijing");
		provinceMap.put("137", "shanghai");
		provinceMap.put("138", "shenzhen");
		provinceMap.put("156", "xxxxxxxx");
		
		
	}
	
	
	
	
	//我们需要重载这个方法,来适应我们的业务逻辑
	public String evaluate(String phonenbr){
		String res = provinceMap.get(phonenbr.substring(0, 3));
		
		return res==null?"wukong":res;
		
	}
	
	
	public int evaluate(int x,int y){
		
		return x+y;
		
	}
	
	

}
If we tend to get the value of json:

(1)Java:
JsopParser.java:

package com.bsr.hive;

import org.apache.hadoop.hive.ql.exec.UDF;

import parquet.org.codehaus.jackson.map.ObjectMapper;

public class JsonParser extends UDF {

	public String evaluate(String jsonLine) {

		ObjectMapper objectMapper = new ObjectMapper();

		try {
			
			MovieRateBean bean = objectMapper.readValue(jsonLine, MovieRateBean.class);
			return bean.toString();
		} catch (Exception e) {

		}
		return "";
	}

}

MovieRateBean.java:

package com.bsr.hive;

//{"movie":"1721","rate":"3","timeStamp":"965440048","uid":"5114"}
public class MovieRateBean {

	private String movie;
	private String rate;
	private String timeStamp;
	private String uid;
	public String getMovie() {
		return movie;
	}
	public void setMovie(String movie) {
		this.movie = movie;
	}
	public String getRate() {
		return rate;
	}
	public void setRate(String rate) {
		this.rate = rate;
	}
	public String getTimeStamp() {
		return timeStamp;
	}
	public void setTimeStamp(String timeStamp) {
		this.timeStamp = timeStamp;
	}
	public String getUid() {
		return uid;
	}
	public void setUid(String uid) {
		this.uid = uid;
	}
	@Override
	public String toString() {
		return movie + "\t" + rate + "\t" + timeStamp + "\t" + uid;
	}


}

(2)shell:




评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值