Hive JDBC

/etc/init.d/mysql stop
package com.foriseholdings.advlogs.hivetomysql.dao;

import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.ArrayList;

import com.*.advlogs.hivetomysql.bean.LogInfoBean;

public class DBSqlDao {

    // 批量添加
    public static boolean addBatch(String sql, ArrayList<LogInfoBean> list) {
        boolean flag = false;
        PreparedStatement ps = null;
        String url = "jdbc:mysql://192.168.92.7:3306/dev_advs?useUnicode=true&amp;characterEncoding=UTF-8";

        String username = "root";

        String password = "fuhua_1234";

        try {
            Connection conn = DriverManager.getConnection(url, username, password);
            // conn = C3P0Util.getConnection(); // 打开数据库连接
            conn.setAutoCommit(flag); // 关闭事务自动提交
            ps = (PreparedStatement) conn.prepareStatement(sql);
            for (int i = 0; i < list.size(); i++) {
                LogInfoBean trans = list.get(i);
                /*
                 * sql =
                 * "insert into ads_hits(material_id,position_id,period,click_count,display_count,create_time) values('"
                 * + trans.getMaterial_id() + "','" + trans.getPosition_id() + "','" +
                 * trans.getPeriod() + "','" + trans.getClick_count() + "','" +
                 * trans.getDisplay_count() + "','" + trans.getCreate_time() + "')";
                 */
                ps.setObject(1, trans.getMaterial_id());
                ps.setObject(2, trans.getPosition_id());
                ps.setObject(3, trans.getPeriod());
                ps.setObject(4, trans.getClick_count());
                ps.setObject(5, trans.getDisplay_count());
                ps.setObject(6, trans.getCreate_time());

                System.out.println(sql);
                ps.addBatch(); // 添加批量添加语句
            }

            ps.executeBatch(); // 批量添加
            conn.commit(); // 事务提交
            ps.clearBatch();
            flag = true;

        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            try {
                ps.close();
            } catch (SQLException e) {
                e.printStackTrace();
            }
        }

        return flag;
    }
}


import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;

import com.*.advlogs.hivetomysql.bean.LogInfoBean;
//import com.foriseholdings.util.HiveJDBC;

public class HiveSqlDao {

    public static void main(String[] args) {
        HiveSqlDao ss = new HiveSqlDao();
        ss.advHiveToMySql();
    }

    public void advHiveToMySql() {

        try {
            Connection conn = null;
            SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
            System.out.println(sdf.format(new Date()));
            Class.forName("org.apache.hive.jdbc.HiveDriver");
            // conn = HiveJDBC.getConnection(); // 连接Hive
            conn = DriverManager.getConnection("jdbc:hive2://192.168.92.215:10000", "root", "fuhua_1234");

            Statement st = conn.createStatement();
            // 执行Hive查询语句
            String sql1 = "select positionid,materialids,sum(clicktrue),sum(clickfalse),simpledate from adv_logs group by positionid,materialids,simpledate";

            ResultSet rs = st.executeQuery(sql1);

            ArrayList<LogInfoBean> list = new ArrayList<LogInfoBean>();
            // 将查询结果集存放在list集合中
            while (rs.next()) {
                LogInfoBean info = new LogInfoBean();
                info.setMaterial_id(rs.getString(1));
                info.setPosition_id(rs.getString(2));
                info.setClick_count(rs.getString(3));
                info.setDisplay_count(rs.getString(4));
                info.setPeriod(rs.getString(5));
                info.setCreate_time(sdf.format(new Date()));
                list.add(info);
            }
            // 执行批量导入操作
            if (DBSqlDao.addBatch(
                    "insert into ads_hits(material_id,position_id,period,click_count,display_count,create_time) values(?,?,?,?,?,?)",
                    list)) {
                System.out.println("success");
            } else {
                System.out.println("error");
            }
            System.out.println(sdf.format(new Date()));
        } catch (SQLException e) {
            e.printStackTrace();
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        }
    }
}
package com.foriseholdings.userlabel.hivetomysql.dao;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;

import com.foriseholdings.util.HiveJDBC;

public class HSqlTableInfo {

    /**
     * @describe 展示表信息
     * @param tableName
     * @return
     */
    public static String showTable(String tableName) {
        Connection conn = null;
        try {
            conn = HiveJDBC.getConnection();

            Statement stmt = conn.createStatement();
            ResultSet res = null;

            String sql = "show tables '" + tableName + "'";
            System.out.println("Running: " + sql);
            res = stmt.executeQuery(sql);
            if (res.next()) {
                System.out.println(res.getString(1));
            }
            return res.getString(1);
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeConnection(conn);
        }
        return "未查询到";
    }

    /**
     * @describe 表信息描述
     * @param tableName
     * @return
     * @throws SQLException
     */
    public static List<String> describeTable(String tableName) {
        Connection conn = null;
        List<String> describeTableList = new ArrayList<String>();
        try {
            String sql = null;
            conn = HiveJDBC.getConnection();
            Statement stmt = conn.createStatement();
            ResultSet res = null;

            sql = "describe " + tableName;
            System.out.println("Running: " + sql);
            res = stmt.executeQuery(sql);
            while (res.next()) {
                describeTableList.add(res.getString(1) + "\t" + res.getString(2));
            }
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeConnection(conn);
        }
        return describeTableList;
    }

}
package com.foriseholdings.userlabel.hivetomysql.dao;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import com.foriseholdings.util.HiveJDBC;

public class LabelHsqlSelectDao {

    /**
     * @author qisun
     * @param sql
     *            查询语句
     * @param tableName
     *            表名
     * @eg: select * from sell_records;
     * @return
     */
    public List<String> selectFromHSQL(String sql) {
        List<String> resultList = new ArrayList<String>();
        Connection conn = null;
        ResultSet res = null;
        Statement stmt = null;
        try {
            conn = HiveJDBC.getConnection();
            stmt = conn.createStatement();

            System.out.println("Running: " + sql);
            res = stmt.executeQuery(sql);

            int count = res.getMetaData().getColumnCount();
//          StringBuffer sb = new StringBuffer();
            while (res.next()) {

                for (int i = 1; i <= count; i++) {
                    // sb.append(String.valueOf(res.getInt(i)) + ",");
                    resultList.add(res.getString(i));
                }

                // "\t" + res.getString(3)
                // + "\t" + res.getString(4) + "\t" + res.getString(5));
            }
            // if(sb.toString().endsWith(",")) {
            // String prodIds = sb.toString().substring(0,sb.toString().length()-1);
            // resultList.add(prodIds);
            // }

        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeResultSet(res);
            HiveJDBC.closeStatement(stmt);
            HiveJDBC.closeConnection(conn);
        }
        return resultList;
    }

    /**
     * 统计行
     * 
     * @param tableName
     *            表名
     * @return
     */
    public String selectCount(String tableName, String userId) {
        Connection conn = null;
        StringBuffer sql = new StringBuffer();
        try {
            conn = HiveJDBC.getConnection();
            Statement stmt = conn.createStatement();
            ResultSet res = null;
            sql.append("SELECT sum(score) FROM ");
            sql.append(tableName);
            sql.append(" WHERE ");
            sql.append(" userid ");
            sql.append(" = ");
            sql.append(userId);
            sql.append("  AND  ");
            sql.append(" behave ");
            sql.append(" = ");
            // 暂定这个
            sql.append("'bookorder'");

            // 在执行select
            // count(*)
            // 时候会生成mapreduce
            // 操作 ,那么需要启动资源管理器
            // yarn :
            // start-yarn.sh
            System.out.println("Running: " + sql.toString());
            res = stmt.executeQuery(sql.toString());

            while (res.next()) {
                System.out.println("Total lines :" + res.getString(1));
            }
            return res.getString(1);
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeConnection(conn);
        }
        return "";
    }

    public Map<String, String> getShopIdTimes(String sql) {
        Connection conn = null;
        Map<String, String> hashMap = new HashMap<String, String>();
        try {
            conn = HiveJDBC.getConnection();
            Statement stmt = conn.createStatement();
            ResultSet res = null;
            res = stmt.executeQuery(sql.toString());

//          int count = res.getMetaData().getColumnCount();
//          StringBuffer sb = new StringBuffer();
            while (res.next()) {

                // sb.append(String.valueOf(res.getInt(i)) + ",");
                hashMap.put(res.getString(1), res.getString(2));
            }
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeConnection(conn);
        }
        return hashMap;
    }

}
package com.foriseholdings.userlabel.hivetomysql.dao;

import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;

import com.foriseholdings.util.HiveJDBC;

/**
 * 加载hdfs文件到hive load 方式也是用 jdbc load 的目的是将hdfs上的文件 存入到hive中
 * 
 * @author qisun
 * 
 *         暂时弃用 直接放在hive目录下
 */
public class LoadFromFileToHive {

    // public static void main(String[] args) {
    // LoadFromFileToHive lh = new LoadFromFileToHive();
    // lh.loadFileToHive("sell_records");
    // }

    String tableName = null;

    public void loadFileToHive(String tableName) {
        this.tableName = "sell_records";
        Connection conn = null;
        ResultSet res = null;
        Statement stmt = null;
        try {
            conn = HiveJDBC.getConnection();
            stmt = conn.createStatement();
            String sql = null;
            String filepath = "/user/hive/warehouse/t_sell_record/part-r-00001";
            sql = "load data  inpath '" + filepath + "' into table " + tableName;
            System.out.println("Running: " + sql);
            stmt.execute(sql);
        } catch (SQLException e) {
            e.printStackTrace();
        } finally {
            HiveJDBC.closeResultSet(res);
            HiveJDBC.closeStatement(stmt);
            HiveJDBC.closeConnection(conn);
        }

    }

}
package com.foriseholdings.userlabel.hivetomysql.dao;

import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;

import com.foriseholdings.util.C3P0Util;

public class SelectFromTable {

    /**
     * 获取到 商品类别 及数量
     * 
     * @param shopIds
     * @return Map<type_code,count>
     */
    public  Map<String, String> getShopType(String shopIds) {
        Connection conn = null;
        Statement st = null;
        ResultSet rs = null;
        try {
            // 2.从池子中获取连接
            Map<String, String> typeCodeCount = new HashMap<String, String>();
            conn = C3P0Util.getConnection();

            st = conn.createStatement();
            String sql = "select type_code , count(1) as count from ads_prod_type where shop_id in (" + shopIds
                    + ") group by type_code ";
            System.out.println(sql);
            rs = st.executeQuery(sql);
            // int count = rs.getMetaData().getColumnCount();
            while (rs.next()) {
                typeCodeCount.put(rs.getString(1), rs.getString(2));
            }

            return typeCodeCount;
        } catch (Exception e) {
            throw new RuntimeException(e);
        } finally {
            C3P0Util.CloseResource(conn, st, rs);
        }
    }

    // 查询 logic 表获取到商品标签 以及比例
    // dev_personas 获取到这个表 根据用户id 查询Unicode 并插入到dev_personas

    // 查询用户标签库

    /**
     * 获取到 商品类别 及数量
     * 
     * @param shopIds
     * @return Map<type_code,count>
     */
    public  void findFromPersonas(Map<String, String> unicodeLabel) {
        Connection conn = null;
        Statement st = null;
        ResultSet rs = null;
        for (Map.Entry<String, String> entry : unicodeLabel.entrySet()) {
            try {
                // 2.从池子中获取连接
                conn = C3P0Util.getConnection();

                st = conn.createStatement();
                StringBuffer sb = new StringBuffer();
                sb.append("select * from ads_personas where ");
                sb.append(" unicode = ");
                sb.append(entry.getKey().split("_")[0]);
                sb.append(" AND ");
                sb.append(" user_id = ");
                sb.append(entry.getKey().split("_")[1]);
                sb.append(" AND ");
                sb.append(" label_id = ");
                sb.append(entry.getValue());

                rs = st.executeQuery(sb.toString());
                // int count = rs.getMetaData().getColumnCount();
                if (!rs.next()) {
                    // System.out.println("無記錄");
                    inertPersonLabel(entry.getKey().split("_")[0], entry.getKey().split("_")[1], entry.getValue());
                }

            } catch (Exception e) {
                throw new RuntimeException(e);
            } finally {
                C3P0Util.CloseResource(conn, st, rs);
            }
        }
    }

    // 插入到用户标签表
    public  void inertPersonLabel(String unicode, String userId, String labelId) {
        Connection conn = null;
        // Statement st = null;
        PreparedStatement ps = null;
        ResultSet rs = null;
        SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");

        try {
            // 2.从池子中获取连接
            conn = C3P0Util.getConnection();

            StringBuffer sb = new StringBuffer();
            sb.append("INSERT INTO ads_personas (unicode,user_id,label_id,update_time) ");
            sb.append(" VALUES ");
            sb.append("(?,?,?,?) ");
            ps = conn.prepareStatement(sb.toString());

            ps.setString(1, unicode);
            ps.setString(2, userId);
            ps.setString(3, labelId);
            ps.setString(4, sdf.format(new Date()));
            ps.executeUpdate(); // 执行sql语句
            System.out.println("插入成功(* ̄︶ ̄)");
        } catch (Exception e) {
            throw new RuntimeException(e);
        } finally {
            C3P0Util.closeResultSet(rs);
            C3P0Util.closePreparedStatement(ps);
            C3P0Util.closeConn(conn);
        }
    }

}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值