package translate1;
import java.io.IOException;
import java.util.Scanner;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.io.*;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;
public class translate1 {
static Configuration cfg=HBaseConfiguration.create();
public static void create(String tablename,String columnFamily)throws Exception{
HBaseAdmin admin=new HBaseAdmin(cfg);
if(admin.tableExists(tablename)){ //在新建hBase的table之前先判断该表是否已经存在
System.out.println("table Exists!");
System.exit(0);
}
else{
@SuppressWarnings("deprecation")
HTableDescriptor tableDesc =new HTableDescriptor(tablename);//若不存在在创建该表
tableDesc.addFamily(new HColumnDescriptor(columnFamily));
admin.createTable(tableDesc);
System.out.println("create table success!");
}
}
public static Job createSubmittableJob(Configuration conf, String TableName,String[] args)throws IOException {
Path inputDir = new Path(args[0]);
@SuppressWarnings("deprecation")
Job job = new Job (conf, "hac_chapter2_recipe3");
job.setJarByClass(HourlyImporter.class);
FileInputFormat.setInputPaths(job, inputDir);
job.setMapperClass(HourlyImporter.class);
TableMapReduceUtil.initTableReducerJob(TableName, null, job);
job.setNumReduceTasks(0);
TableMapReduceUtil.addDependencyJars(job);
return job;
}
public static void qianyi(String TableName,String[] args ) throws Exception{
Job job = createSubmittableJob(cfg, TableName,args);
//System.exit (job.waitForCompletion(true) ? 0 : 1);
if(job.waitForCompletion(true)){
System.out .println("迁移成功!");
}
}
public static void main(String[] args)
throws Exception {
Scanner reader=new Scanner(System.in);
System.out.println("请输入迁移后Hbase数据库中表名:");
String table=reader.next();
System.out.println("请输入迁移后Hbase数据库中表的列族名:");
String n=reader.next();
long start =System.currentTimeMillis();///
translate1.create(table, n);
byte[] family=Bytes.toBytes(n);
translate1.qianyi(table,args);
long end =System.currentTimeMillis();
double sumtime=(double)((end-start)/1000.00);
System.out.println("迁移所用的总时间为"+ sumtime+"s");
}
}
class HourlyImporter extends
Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {//由createSubmittableJob方法调用
private long ts;//实现HBase表的创建和数据的插入
static byte[] family = Bytes.toBytes("n");//要创建的表的列族名
@Override
protected void setup(Context context) {
ts = System.currentTimeMillis();
}
public static String change(String str,int n,boolean j)//把要创建的HBase表格
{//的列族的名字改成n:v001的形式,即把1改成001的形式
if(str==null||str.length()>=n) return str;
String s="";
for(int i=str.length();i<n;i++)
s+="0";
if(j) return s+str;
else return str+s;
}
@SuppressWarnings("deprecation")
public void map(LongWritable offset, Text value, Context
context)throws IOException {//利用Map函数,实现数据的分割和插入
try {
String line = value.toString();//一行一行的分割txt数据
String a[]=line.split("\t");
String stationID = a[0];
String month =a[1];
String day = a[2];
String rowkey = stationID + month + day;
//String stationID = line.substring(0, 4);
//String month = line.substring(5, 7);
//String day = line.substring(7, 9);
//String rowkey = stationID + month + day;
byte[] bRowKey = Bytes.toBytes(rowkey);
ImmutableBytesWritable rowKey = new ImmutableBytesWritable(bRowKey);
Put p = new Put(bRowKey);
for (int i = 3; i <=26 ; i++) {//表中一共有3个列
String columnI =
"v" + change(String.valueOf(i),2,true);
//int beginIndex = i * 2 + 8;
//String valueI =line.substring(beginIndex, beginIndex + 2).trim();
String valueI=a[i];
p.add(family, Bytes.toBytes(columnI),ts, Bytes.toBytes(valueI));
}
context.write(rowKey, p);
}
catch (InterruptedException e) {
e.printStackTrace();
}
}
}