Hbase2.0.2之scala API

import java.util
import java.util.{ArrayList, HashMap, List, Map}
import org.apache.hadoop.hbase.{Cell, CompareOperator, HBaseConfiguration, TableName}
import org.apache.hadoop.hbase.client._
import org.apache.hadoop.hbase.filter._
import org.apache.hadoop.hbase.util.Bytes
import org.apache.spark.SparkConf
import scala.collection.JavaConversions._
object HbaseScala {

  def main(args: Array[String]): Unit = {
     System.setProperty("hadoop.home.dir", "D:\\hadoop-common-bin-2.7.x")
     val sparkConf = new SparkConf().setAppName("HBaseTest")
     val conf = HBaseConfiguration.create
     val tableName:TableName =TableName.valueOf("blog_scala")
     conf.set("hbase.zookeeper.quorum", "hadoop01,hadoop02,hadoop03")
     conf.set("hbase.zookeeper.property.clientPort", "2181")


      //Connection 是操作hbase的入口
      val connection= ConnectionFactory.createConnection(conf)
      //创建表测试(单列簇)
//      createHTable(connection, tableName,"area")

      //创建表(多列簇)
      val columnFamilys: Array[String] = Array("article", "author")
//      createHTable(connection, tableName,columnFamilys)

      val listMap: List[Map[String, AnyRef]] = new ArrayList[Map[String, AnyRef]]
      val map1: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map1.put("rowKey", "ce_shi1")
      map1.put("columnFamily", "article")
      map1.put("columnName", "title")
      map1.put("columnValue", "Head First HBase")
      listMap.add(map1)
      val map2: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map2.put("rowKey", "ce_shi1")
      map2.put("columnFamily", "article")
      map2.put("columnName", "content")
      map2.put("columnValue", "HBase is the Hadoop database")
      listMap.add(map2)
      val map3: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map3.put("rowKey", "ce_shi1")
      map3.put("columnFamily", "article")
      map3.put("columnName", "tag")
      map3.put("columnValue", "Hadoop,HBase,NoSQL")
      listMap.add(map3)
      val map4: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map4.put("rowKey", "ce_shi1")
      map4.put("columnFamily", "author")
      map4.put("columnName", "name")
      map4.put("columnValue", "nicholas")
      listMap.add(map4)
      val map5: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map5.put("rowKey", "ce_shi1")
      map5.put("columnFamily", "author")
      map5.put("columnName", "nickname")
      map5.put("columnValue", "lee")
      listMap.add(map5)
      val map6: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map6.put("rowKey", "ce_shi2")
      map6.put("columnFamily", "author")
      map6.put("columnName", "name")
      map6.put("columnValue", "spark")
      listMap.add(map6)
      val map7: Map[String, AnyRef] = new HashMap[String, AnyRef]
      map7.put("rowKey", "ce_shi2")
      map7.put("columnFamily", "author")
      map7.put("columnName", "nickname")
      map7.put("columnValue", "hadoop")
      listMap.add(map7)
//      insertMany(connection, tableName,listMap);
//      insertMany(connection,tableName)

      //添加单行数据
//      insertSingl
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

泪痕残

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值