emmm我知道可以使用一个map和一个job写到多张表,但是,貌似没有找到别人像我这么做的,所以我就写出来试试
map1:
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.hbase.client.Put;
static class Mapper1 extends Mapper<LongWritable,Text,ImmutableBytesWritable,Put>{
private ImmutableBytesWritable tbl1 = new ImmutableBytesWritable (Bytes.toBytes(表1));
@Override
public void map(LongWritable key ,Text value, Context context) throws IOException,InterruptedException{
if(逻辑){
byte[] rowKey = Bytes.toBytes(主键名);
Put p =new Put(rowKey);
p.addColumn(Bytes