spark 存入hbase_spark批量导入hbase 方法4

import

org.apache.hadoop.hbase.{HBaseConfiguration,TableName}

import org.apache.hadoop.hbase.mapred.TableOutputFormat

//import org.apache.hadoop.hbase.mapreduce.TableOutputFormat

import org.apache.hadoop.hbase.mapreduce.TableInputFormat

import org.apache.spark.SparkContext._

import org.apache.spark._

import org.apache.spark.SparkConf

import org.apache.spark.SparkContext

import

org.apache.spark.streaming.{Seconds,StreamingContext}

import org.apache.hadoop.mapreduce.Job

import org.apache.hadoop.hbase.io.ImmutableBytesWritable

import org.apache.hadoop.hbase.client.Result

import org.apache.hadoop.hbase.client.{ConnectionFactory,Put}

import org.apache.hadoop.hbase.util.Bytes

import org.apache.spark.rdd.NewHadoopRDD

import org.apache.hadoop.mapred.JobConf

import org.apache.hadoop.mapred.TextOutputFormat

import org.apache.hadoop.conf.Configuration

import

org.apache.hadoop.mapreduce.lib.input.FileInputFormat

import

org.apache.hadoop.mapreduce.lib.output.FileOutputFormat

import org.apache.hadoop.fs.Path

import org.apache.hadoop.hbase.client.HBaseAdmin

import org.apache.hadoop.hbase.client.HTable

import org.apache.hadoop.hbase.HTableDescriptor

import org.apache.hadoop.hbase.client.Scan

import org.apache.hadoop.hbase.client.Get

import org.apache.hadoop.hbase.client.Put

import org.apache.hadoop.hbase.protobuf.ProtobufUtil

import org.apache.hadoop.hbase.util.{Base64,Bytes}

import org.apache.hadoop.hbase.util.Bytes

import org.apache.hadoop.hbase.KeyValue

import

org.apache.hadoop.hbase.mapreduce.HFileOutputFormat

import

org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles

import org.apache.hadoop.hbase.HColumnDescriptor

import org.apache.commons.codec.digest.DigestUtils

import org.apache.hadoop.io.Text

import

org.apache.hadoop.io.IntWritable

object SparkWriteHBase {

def main(args: Array[String]): Unit = {

val sparkConf = new

SparkConf().setAppName("SparkWriteHBase").setMaster("local")

val sc = new

SparkContext(sparkConf)

val readFile =

sc.textFile("/input/test.txt").map(x => x.split(","))

val tableName =

"student5"

val myConf =

HBaseConfiguration.create()

val table = new

HTable(myConf,tableName)

myConf.set(TableOutputFormat.OUTPUT_TABLE,"student5")

val job =

Job.getInstance(myConf)

job.setMapOutputKeyClass(classOf[ImmutableBytesWritable])

job.setMapOutputValueClass(classOf[KeyValue])

HFileOutputFormat.configureIncrementalLoad(job,table)

val rdd =

readFile.sortBy(x=>x(0)).map(x => {

val

kv:KeyValue = new

KeyValue(Bytes.toBytes(x(0)),Bytes.toBytes("prop"),Bytes.toBytes("score"),Bytes.toBytes(x(1)))

(new

ImmutableBytesWritable(kv.getKey),kv)

})

rdd.saveAsNewAPIHadoopFile("/output/iteblog",classOf[ImmutableBytesWritable],classOf[KeyValue],classOf[HFileOutputFormat],myConf)

val bulkLoader = new

LoadIncrementalHFiles(myConf)

bulkLoader.doBulkLoad(new Path("/output/iteblog"),table)

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值