hbase 跑mapreduce环境搭建

本文详细介绍了在Hadoop上运行HBase MapReduce程序所需的环境搭建步骤,包括在hadoop-env.sh中添加HBase相关jar路径,并提供了一个读取HBase数据的Java示例,展示了如何设置配置、初始化Job以及Mapper和Reducer的实现。
摘要由CSDN通过智能技术生成

 在hadoop上跑hbase 的map reduce程序的时候环境却很多jar,需要在hadoop-env.sh中进行补充

引入hbase lib下面以hbase开头的jar,

 

另外增加metrics-core-2.2.0.jar, 发在extlib下面

 

for f in $HBASE_HOME/lib/hbase*.jar; do

  if [ "$HADOOP_CLASSPATH" ]; then

    export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f

  else

    export HADOOP_CLASSPATH=$f

  fi

done

 

 

for f in $HADOOP_HOME/extlib/*.jar; do

  if [ "$HADOOP_CLASSPATH" ]; then

    export HADOOP_CLASSPATH=$HADOOP_CLASSPATH:$f

  else

    export HADOOP_CLASSPATH=$f

  fi

done

 

 

./hadoop jar /tmp/idStatics.jar sea.hbase.IdStatic

 

 

源代码:

import java.io.IOException;

import java.util.List;

 

import org.apache.commons.collections.CollectionUtils;

import org.apache.commons.lang.StringUtils;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.conf.Configured;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.hbase.Cell;

import org.apache.hadoop.hbase.CellUtil;

import org.apache.hadoop.hbase.HBaseConfiguration;

import org.apache.hadoop.hbase.client.Result;

import org.apache.hadoop.hbase.client.Scan;

import org.apache.hadoop.hbase.io.ImmutableBytesWritable;

import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;

import org.apache.hadoop.hbase.mapreduce.TableMapper;

import org.apache.hadoop.hbase.util.Bytes;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.util.Tool;

import org.apache.hadoop.util.ToolRunner;

 

public class IdStatics extends Configured implements Tool{

 

public static final String table = "id_records";

public static final byte[] column = Bytes.toBytes("idValue");

 

@Override

public int run(String[] arg0) throws Exception {

Configuration conf = HBaseConfiguration.create();

       conf.set("hbase.zookeeper.quorum", "192.168.1.250:2181,192.168.1.250:2182,192.168.1.250:2183");

       conf.set("zookeeper.znode.parent", "/hbase13");

 

       Job job = Job.getInstance(conf,"read_data_from_hbase");

       job.setJarByClass(IdStatics.class);

       job.setMapOutputKeyClass(Text.class);

       job.setMapOutputValueClass(Text.class);

       job.setReducerClass(ReadReducer.class);

       job.setSpeculativeExecution(false);

      

       //对整个CF扫描

       Scan scan = new Scan();

       scan.addFamily(column);

       scan.setMaxVersions(5);

       scan.setCaching(10);

scan.setBatch(2);

 

       TableMapReduceUtil.initTableMapperJob(this.table,

               scan,

               ReadMapper.class,

               Text.class,

               Text.class,

               job);

 

       String output = "/result";

       FileSystem.get(job.getConfiguration()).delete(new Path(output), true);

       FileOutputFormat.setOutputPath(job,new Path(output));

 

       return job.waitForCompletion(true) ? 0 : 1;

}

 

 

static class ReadMapper extends TableMapper<Text,Text> {

 

       @Override

       protected void map(ImmutableBytesWritable key, Result result, Context context) throws IOException,InterruptedException{

           if (result == null || result.isEmpty()) return;

           result.getFamilyMap(column);

 

           List<Cell> cells = result.listCells();   

            if(CollectionUtils.isNotEmpty(cells)){  

                for(Cell cell:cells){  

//String family = Bytes.toString(CellUtil.cloneFamily(cell));

String value = Bytes.toString(CellUtil.cloneValue(cell));

 

context.write(new Text(StringUtils.reverse(value)),new Text("1"));

                }  

            }  

       }

   }

 

 

static class ReadReducer extends Reducer<Text,Text,Text,Text> {

       @Override

       protected void reduce(Text key, Iterable<Text> values, Context context) throws IOException,InterruptedException{

           Integer total=0;

           for(Text each:values) {

                    total=total+Integer.valueOf(each.toString());

           }

           if(total>1) {

           context.write(key,new Text(total.toString()));

           }

       }

   }

 

 

 public static void main(String[] args) throws Exception{

       System.exit(ToolRunner.run(new IdStatics(),args));

   }

 

}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值