餐饮大数据-hiveJDBC-工具类

查了很多资料,介绍HiveJDBC的很多文章,总结了一下,自己也封装了一个HiveJDBC的工具类,如下
DaoSupport.java

package util;

import java.io.IOException;
import java.io.InputStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;

/**
 * 工具类Hive数据库连接
 * @author 木叶
 * @blog http://blog.csdn.net/muye_csdn
 */
public class DaoSupport {
    private static String driver_class;
    private static String user_name ;
    private static String url;
    private static String password ;
    protected Connection connection;

    public DaoSupport() {
        load();
    }

    private void load() {
        try {
            // 读取配置文件
            InputStream inputStream = getClass().getResourceAsStream("/db.properties");
            // 创建Properties对象
            Properties prop = new Properties();
            // 加载输入流
            prop.load(inputStream);
            // 获取配置属性
            driver_class = prop.getProperty("driver_class");
            user_name = prop.getProperty("user_name");
            password = prop.getProperty("password");
            url = prop.getProperty("url");
            // 关闭输入流
            inputStream.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    /**
     * 获取数据库连接对象
     *
     * @return 连接对象
     */
    public Connection getConnection() {
        try {
            Class.forName(driver_class);
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
        try {
            connection = DriverManager.getConnection(url, user_name, password);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return connection;
    }

    /**
     * 执行查询操作
     *
     * @param sql
     *            SQL语句
     * @param params
     *            SQL语句中参数值的集合
     * @return 结果集对象
     */
    public ResultSet executeQuery(String sql, Object... params) {
        ResultSet rs = null;
        connection = getConnection();
        PreparedStatement pstmt = null;
        try {
            pstmt = connection.prepareStatement(sql);
            if (params != null) {
                for (int i = 0; i < params.length; i++) {
                    pstmt.setObject(i + 1, params[i]);
                }
            }
            rs = pstmt.executeQuery();
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return rs;
    }


    /**
     * 导入数据到Hive数据库表
     * @param sql
     *            SQL语句
     * @return 是否导入数据成功
     * String sql = "load data local inpath '" + filepath + "' into table "+ tableName;
     */
    public boolean loadData(String sql) {
        //目录 ,我的是hive安装的机子的虚拟机的home目录下
        System.out.println("Running:" + sql);
        connection = getConnection();
        try {
            connection.createStatement().execute(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return true;
    }
    /**
     * 创建内部Hive表
     *
     * @param sql
     *            SQL语句
     * @return 是否创建数据库表成功
     * String sql = "create table "+ tableName + " (key int, value string)  row format delimited fields terminated by '\t'";
     */
    public boolean createTable(String sql) {
        connection = getConnection();
        try {
            connection.createStatement().execute(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return true;
    }
    /**
     * 删除Hive表
     *
     * @param tableName
     *            数据库表名
     * @return 是否删除数据库表成功
     */
    public boolean dropTable(String tableName){
        String sql = "drop table  " + tableName;
        connection = getConnection();
        try {
            connection.createStatement().execute(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        }
        return true;
    }

    /**
     * 关闭Statement对象
     *
     * @param stmt
     */
    protected void close(Statement stmt) {
        try {
            if (stmt != null) {
                stmt.close();
                stmt = null;
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
    /**
     * 关闭连接对象
     *
     * @param connection
     */
    protected void close(Connection connection) {
        try {
            if (connection != null) {
                connection.close();
                connection = null;
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }
    /**
     * 关闭结果集对象
     *
     * @param rs
     */
    protected void close(ResultSet rs) {
        try {
            if (rs != null) {
                rs.close();
                rs = null;
            }
        } catch (SQLException e) {
            e.printStackTrace();
        }
    }

}

配置信息写在db.properties里面。
如下

#key=value
driver_class=org.apache.hive.jdbc.HiveDriver
user_name=hadoop
password=hadoop
url=jdbc:hive2://localhost:10000/dbtaobao

另外自己写了简单的测试类,测试工具类TestDaoSupport.java
如下

package test;

import util.DaoSupport;
import java.sql.ResultSet;
import java.sql.SQLException;

/**
 * Created by hadoop on 9/13/17.
 * @author 木叶
 * @blog http://blog.csdn.net/muye_csdn
 */
public class TestDaoSupport{
    public static void main(String[] args) {
        DaoSupport daoSupport = new DaoSupport();
//        String sql = "SELECT * FROM user_log limit 10";
//        ResultSet rs = daoSupport.executeQuery(sql);
//        try {
//            while (rs.next()) {
//                System.out.print(rs.getInt(1)+"\t");
//                System.out.print(rs.getInt(2)+"\t");
//                System.out.print(rs.getInt(3)+"\t");
//                System.out.println(rs.getInt(4));
//
//            }
//        } catch (SQLException e) {
//            e.printStackTrace();
//        }
        String sql = "create table yz (key int, value string)  row format delimited fields terminated by '\t'";
        daoSupport.createTable(sql);
//        daoSupport.loadData("yz");
//        System.out.print(daoSupport.dropTable("yz"));
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值