hbase code

 

package com.run.hbase.dataImport;

 

import java.io.BufferedReader;

import java.io.BufferedWriter;

import java.io.File;

import java.io.FileReader;

import java.io.FileWriter;

import java.io.IOException;

 

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.KeyValue;

import org.apache.hadoop.hbase.client.HTable;

import org.apache.hadoop.hbase.client.Put;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.ResultScanner;

import org.apache.hadoop.hbase.client.Scan;

 

/**

 * 将内蒙的数据提取出来并存放在本地

 * 

 * @author Administrator

 * 

 */

public class GetNMData {

 

public static void main(String[] args) throws IOException {

Configuration conf = HBaseConfiguration.create();

conf.set("hbase.zookeeper.quorum", "192.168.5.211");

conf.set("hbase.zookeeper.property.clientPort", "2181");

HTable table = new HTable(conf, "virtualaccount".getBytes());

HTable table1 = new HTable(conf, "virtualaccount1");

// getRelationship(table);

// getGroup(table);

putGroup(table1);

putRelationship(table1);

}

public static void getRelationship(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/relationship1");

FileWriter writer = new FileWriter(path);

BufferedWriter bWriter = new BufferedWriter(writer);

Scan s = new Scan();

s.addFamily("relationship1".getBytes());

ResultScanner ss = table.getScanner(s);

int i = 0 ;

for (Result r : ss) {

for (KeyValue kv : r.list()) {

bWriter.append(new String(kv.getRow(),"UTF-8")+"\t"+"relationship1"+"\t"+new String(kv.getQualifier(),"UTF-8")+"\t"+new String(kv.getValue(),"UTF-8")+"\n");

i++;

if(i%5000==0){

bWriter.flush();

}

}

}

bWriter.close();

ss.close();

table.close();

}

public static void getGroup(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/group");

FileWriter writer = new FileWriter(path);

BufferedWriter bWriter = new BufferedWriter(writer);

Scan s = new Scan();

s.addFamily("group".getBytes());

ResultScanner ss = table.getScanner(s);

int i = 0 ;

for (Result r : ss) {

for (KeyValue kv :  r.list()) {

bWriter.append(new String(kv.getRow(),"UTF-8")+"\t"+"group"+"\t"+new String(kv.getQualifier(),"UTF-8")+"\t"+new String(kv.getValue(),"UTF-8")+"\n");

i++;

if(i%5000==0){

bWriter.flush();

}

}

}

bWriter.close();

ss.close();

table.close();

}

public static void putGroup(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/group");

FileReader reader = new FileReader(path);

BufferedReader bReader = new BufferedReader(reader);

String tmp = bReader.readLine();

while(bReader!=null){

String t[] = tmp.split("\t", 5);

if(t.length==4){

Put p = new Put(t[0].getBytes("UTF-8"));

p.add("group".getBytes(), t[2].getBytes(), t[3].getBytes());

table.put(p);

}

tmp = bReader.readLine();

}

table.close();

bReader.close();

}

public static void putRelationship(HTable table) throws IOException{

File path = new File("/home/hadoop/tmp/relationship1");

FileReader reader = new FileReader(path);

BufferedReader bReader = new BufferedReader(reader);

String tmp = bReader.readLine();

while(bReader!=null){

String t[] = tmp.split("\t", 5);

if(t.length==4){

Put p = new Put(t[0].getBytes("UTF-8"));

p.add("group".getBytes(), t[2].getBytes(), t[3].getBytes());

table.put(p);

}

tmp = bReader.readLine();

}

table.close();

bReader.close();

}

}

 

 

 

 

 

 

 

 

 

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值