[spark]Spark2.4.6用put写入写入Hbase1.3.1

场景:数据较少,用put写入

1.创建Hbase表

create_namespace 'default'

create 'default:t_test1', 'DATA'

2.测试数据文件 test1.txt

1595638951700,1,1.1939971,1.4677016,1.4034922

1595638951721,1,1.3716854,1.566847,1.4458307

1595638951723,2,1.3352232,1.4566108,1.5208404

1595638951715,1,1.8877013,1.1247256,1.6103745

1595638951696,2,1.2885377,1.7600425,1.4150856

1595638951707,1,1.8486422,1.1446141,1.5813918

1595638951694,3,1.2366319,1.4496765,1.7620823

1595638951740,1,1.9078307,1.7746134,1.337183

1595638951714,3,1.261858,1.2809255,1.4845717

1595638951697,2,1.5660034,1.0154893,1.6899275

3. Spark2.4.6用 put写入Hbase1.3.1

package mySpark;
import org.apache.spark.sql.SparkSession;
import myHDFS.MyHDFS;
import myHbase.MyHbase;
import security.MyLoginCommon;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.spark.JavaHBaseContext;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.PairFlatMapFunction;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.api.java.function.VoidFunction;
import scala.Tuple2;
public class MySpark3 implements Serializable{
public static void main(String[] args) throws Exception {
	
}
public void writeHbaseWithSpark(String tableName) throws Exception
	{
SparkSession spark = SparkSession.builder()
				   .appName("mytest1")
				   .master("local")
				   .getOrCreate();
JavaSparkContext javaSparkContext = new JavaSparkContext(spark.sparkContext());
		   
Configuration hbaseConf=MyLoginCommon.loginHbase();  
JavaHBaseContext hbaseContext = new JavaHBaseContext(javaSparkContext, hbaseConf);
		  
String msg="1595389254875,2,1.9273945,1.0597579,1.1112773";
List<String> list = new ArrayList<>();
list.add(msg);
JavaRDD<String> rdd = javaSparkContext.parallelize(list); 
 hbaseContext.foreachPartition(rdd,new VoidFunction<Tuple2<Iterator<String>, Connection>>()
		   {
				   public void call(Tuple2<Iterator<String>, Connection> t) throws Exception 
				   {
				 Table table = t._2().getTable(TableName.valueOf(tableName));
				    BufferedMutator mutator = t._2().getBufferedMutator(TableName.valueOf(tableName)); 
				    while (t._1().hasNext()) {
				      String a = t._1().next();
				      String[] b=a.split(",");
				     // Result r = table.get(new Get(Bytes.toBytes(b[0])));//查詢
				      Put put=new Put(Bytes.toBytes(b[0]));
				      put.addColumn(Bytes.toBytes("DATA"), Bytes.toBytes("i1"),Bytes.toBytes(b[3]));
				      mutator.mutate(put);
		 
				    }
				    mutator.flush();
				    mutator.close();
				    table.close();
				   }
				  });
		 
		   spark.close();
	}
	spark.close();
		}
}

 

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值