Spark读取写入Hbase、Mysql操作

object Spark_Hbase {
  def main(args: Array[String]): Unit = {
    val sparkConf = new SparkConf()
    sparkConf.setMaster("local[2]").setAppName(this.getClass.getName)
    val sc = new SparkContext(sparkConf)
    sc.setLogLevel("WARN")

    val rdd = sc.parallelize(List((1,"苹果",11), (2,"香蕉",12), (3,"梨",13)))

  // 批量插入
       rdd.foreachPartition(x => {
       val conf = HBaseConfiguration.create();
       conf.set("hbase.zookeeper.quorum","hadoop11,hadoop12,hadoop13");
       val conn = ConnectionFactory.createConnection(conf)
       val table = conn.getTable(TableName.valueOf("yh:spark_fruit"))
         val puts = new java.util.ArrayList[Put]()
      x.foreach(y => {
        // 将数组插入hbase
        val wordPut = new Put(Bytes.toBytes(y._1))
       wordPut.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes(y._2))
       wordPut.addColumn(Bytes.toBytes("info"), Bytes.toBytes("price"), Bytes.toBytes(y._3))
        puts.add(wordPut)
      })
      table.put(puts)
    })
    //读取数据
    val hadoopConfig = new Configuration()
    hadoopConfig.set(HConstants.ZOOKEEPER_QUORUM,"hadoop11,hadoop12,hadoop13")//配置Hbase连接参数
    hadoopConfig.set(TableInputFormat.INPUT_TABLE,"yh:spark_fruit") //配置扫描的表
    sc.newAPIHadoopRDD(hadoopConfig,classOf[TableInputFormat],classOf[ImmutableBytesWritable],classOf[Result])
      .map(t=>{
        var rowKey = Bytes.toInt(t._1.get())
        var name = Bytes.toString(t._2.getValue("info".getBytes(),"name".getBytes()))
        var price = Bytes.toInt(t._2.getValue("info".getBytes(),"price".getBytes()))
        (rowKey,name,price)
      }).collect()
      .foreach(t=>{
        println(t)
      })

  }

}


MySql

object Spark_MySQL {
  def main(args: Array[String]): Unit = {
    val conf = new SparkConf()
    conf.setMaster("local[*]").setAppName(this.getClass.getName)
    val sc = new SparkContext(conf)
    sc.setLogLevel("WARN")

    val data: RDD[String] = sc.parallelize(List("zhangsan", "lisi", "wangwu"), 2)
    data.foreachPartition(iter => {
      val coon = DriverManager.getConnection("jdbc:mysql://hadoop13:3306/test1?characterEncoding=UTF-8","root","119131Zrf0...")
    iter.foreach(data => {
      val ps = coon.prepareStatement("insert into spark_user(name) values (?)")
      ps.setString(1,data)
      ps.executeUpdate()
    })

    coon.close()

    })


  }
}

读取hbase  scan操作

public void test3() throws IOException {
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","hadoop10");

        Connection conn = ConnectionFactory.createConnection(conf);
        Table table = conn.getTable(TableName.valueOf("testns:t_track_info"));

        Scan scanner = new Scan();
        RowFilter rowFilter = new RowFilter(CompareFilter.CompareOp.EQUAL, new RegexStringComparator("京A11111"));
        scanner.setFilter(rowFilter);
        ResultScanner results = table.getScanner(scanner);
        for (Result result : results) {
            byte[] row = result.getRow();
            System.out.println("rowkey = " + Bytes.toString(row));
            System.out.println("车牌 = "+ Bytes.toString(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("car"))));
            System.out.println("卡口编号 = "+ Bytes.toString(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("monitorId"))));
            System.out.println("区域编号 = "+ Bytes.toString(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("areaId"))));
            System.out.println("道路编号 = "+ Bytes.toString(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("roadId"))));
            System.out.println("通过时间 = "+ Bytes.toLong(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("actionTime"))));
            System.out.println("车速 = "+ Bytes.toDouble(result.getValue(Bytes.toBytes("cf1"), Bytes.toBytes("speed"))));
        }

        table.close();
    }

}

读取HBase get操作

public class test_Get3 {
    public static void main(String[] args) throws IOException {
        //1.创建存储配置信息的对象
        Configuration con = HBaseConfiguration.create();
        con.set("hbase.zookeeper.quorum","hadoop10");
        //2.创建一个数据库连接对象
        Connection conn = ConnectionFactory.createConnection(con);
        //3.创建要操作的表
        Table table = conn.getTable(TableName.valueOf("testns:t_person"));
        //4.使用rowkey作为查询条件
        Get get = new Get(Bytes.toBytes("1001"));
        Result result = table.get(get);
        //处理结果集
        String rowkey = result.getRow().toString();
        String name = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("name")).toString();
        String age = result.getValue(Bytes.toBytes("cf"), Bytes.toBytes("age")).toString();
        System.out.println(rowkey+name+age);
    }
}

写入hbase

public void test1() throws IOException {
        //1.创建一个hbase数据库连接
        Configuration conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum","hadoop10");

        Connection conn = ConnectionFactory.createConnection(conf);
        Table table = conn.getTable(TableName.valueOf("testns:t_track_info"));
        String car = "豫A12345";
        Long actionTime = 1649986276000L;
        String rowkey = car + (Long.MAX_VALUE - actionTime);
        Put put = new Put(Bytes.toBytes(rowkey));

        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("car"),Bytes.toBytes("豫A12345"));
        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("actionTime"),Bytes.toBytes(1649986276000L));
        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("monitorId"),Bytes.toBytes("0001"));
        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("roadId"),Bytes.toBytes("111"));
        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("areaId"),Bytes.toBytes("222"));
        put.addColumn(Bytes.toBytes("cf1"),Bytes.toBytes("speed"),Bytes.toBytes("60"));

        table.put(put);
        table.close();
    }

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值