spark 存入hbase_Spark 批量存取 HBase

FileAna.scala

object FileAna {

//  val conf: Configuration = HBaseConfiguration.create()

val hdfsPath = "hdfs://master:9000"

val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

def main(args: Array[String]) {

val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").

set("spark.driver.host", "192.168.1.127").

setJars(List("/home/pang/woozoomws/spark-service.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))

val sc = new SparkContext(conf)

val rdd = sc.textFile("hdfs://master:9000/woozoom/msgfile.txt")

val rdd2 = rdd.map(x => convertToHbase(anaMavlink(x)))

val hbaseConf = HBaseConfiguration.create()

hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

val jobConf = new JobConf(hbaseConf, this.getClass)

jobConf.setOutputFormat(classOf[TableOutputFormat])

jobConf.set(TableOutputFormat.OUTPUT_TABLE, "MissionItem")

rdd2.saveAsHadoopDataset(jobConf)

sc.stop()

}

def convertScanToString(scan: Scan) = {

val proto = ProtobufUtil.toScan(scan)

Base64.encodeBytes(proto.toByteArray)

}

def convertToHbase(msg: MAVLinkMessage) = {

val p = new Put(Bytes.toBytes(UUID.randomUUID().toString()))

if (msg.isInstanceOf[msg_mission_item]) {

val missionItem = msg.asInstanceOf[msg_mission_item]

p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("x"), Bytes.toBytes(missionItem.x))

p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("y"), Bytes.toBytes(missionItem.y))

p.addColumn(Bytes.toBytes("data"), Bytes.toBytes("z"), Bytes.toBytes(missionItem.z))

}

(new ImmutableBytesWritable, p)

}

val anaMavlink = (str: String) => {

val bytes = ByteAndHex.hexStringToBytes(str)

QuickParser.parse(bytes).unpack()

}

}

ReadHBase.scala

object ReadHBase {

//  val conf: Configuration = HBaseConfiguration.create()

val hdfsPath = "hdfs://master:9000"

val hdfs = FileSystem.get(new URI(hdfsPath), new Configuration())

def main(args: Array[String]) {

val conf = new SparkConf().setAppName("FileAna").setMaster("spark://master:7077").

set("spark.driver.host", "192.168.1.127").

setJars(List("/home/pang/woozoomws/spark-service.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-common-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-client-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-protocol-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/htrace-core-3.1.0-incubating.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/hbase-server-1.2.2.jar",

"/home/pang/woozoomws/spark-service/lib/hbase/metrics-core-2.2.0.jar"))

val sc = new SparkContext(conf)

val hbaseConf = HBaseConfiguration.create()

hbaseConf.addResource("/home/hadoop/software/hbase-1.2.2/conf/hbase-site.xml");

hbaseConf.set(TableInputFormat.INPUT_TABLE, "MissionItem")

val scan = new Scan()

hbaseConf.set(TableInputFormat.SCAN, convertScanToString(scan))

val readRDD = sc.newAPIHadoopRDD(hbaseConf, classOf[TableInputFormat],

classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable],

classOf[org.apache.hadoop.hbase.client.Result])

val count = readRDD.count()

println("Mission Item Count:" + count)

sc.stop()

}

def convertScanToString(scan: Scan) = {

val proto = ProtobufUtil.toScan(scan)

Base64.encodeBytes(proto.toByteArray)

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值