java中纵表转横表_Hadoop MapReduce纵表转横表 与 横表转纵表

package seg;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.conf.Configured;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import org.apache.hadoop.util.GenericOptionsParser;

import org.apache.hadoop.util.Tool;

import org.apache.hadoop.util.ToolRunner;

/**

* @author zhf

* @email zhf.thu@gmail.com

* @version 创建时间:2014年8月24日 上午9:56:45

*/

public class Vertical2Horizontal extends Configured implements Tool{

public static void main(String[] args) throws Exception {

int exitCode = ToolRunner.run(new Vertical2Horizontal(), args);

System.exit(exitCode);

}

@Override

public int run(String[] arg0) throws Exception {

String[] args = new GenericOptionsParser(arg0).getRemainingArgs();

if(args.length != 2){

System.out.println("Usage:seg.Horizontal2Vertical ");

System.exit(1);

}

Configuration conf = new Configuration();

FileSystem fs = FileSystem.get(conf);

if(fs.exists(new Path(args[1])))

fs.delete(new Path(args[1]),true);

Job job = new Job(conf);

job.setJarByClass(getClass());

job.setMapperClass(HVMapper.class);

job.setReducerClass(HVReducer.class);

job.setMapOutputKeyClass(Text.class);

job.setMapOutputValueClass(Text.class);

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(Text.class);

FileInputFormat.addInputPath(job, new Path(args[0]));

FileOutputFormat.setOutputPath(job, new Path(args[1]));

return job.waitForCompletion(true) ? 0:1;

}

public static class HVMapper extends Mapper{

private Text text = new Text();

private Text clazz = new Text();

public void map(LongWritable key,Text value,Context context) throws IOException, InterruptedException{

String line = value.toString();

String params[] = line.split("\t");

text.set(params[0]);

clazz.set(params[1]);

context.write(clazz,text);

}

}

public static class HVReducer extends Reducer{

private Text result = new Text();

public void reduce(Text key,Iterable values,Context context) throws IOException, InterruptedException{

String tmp = "";

for(Text val : values){

tmp += val + "\t";

}

result.set(tmp.trim());

context.write(key, result);

}

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
下面是用 MapReduce 排序 Hbase 表的成绩的示例代码: ``` import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableReducer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; public class HBaseSort { public static class HBaseMapper extends TableMapper<Text, IntWritable> { public static final byte[] CF = "cf".getBytes(); public static final byte[] ATTR1 = "grade".getBytes(); private final IntWritable grade = new IntWritable(); private final Text row = new Text(); @Override public void map(ImmutableBytesWritable row, Result value, Context context) throws IOException, InterruptedException { // 获取行键 String rowKey = new String(row.get()); this.row.set(rowKey); // 获取成绩 byte[] bGrade = value.getValue(CF, ATTR1); int intGrade = Bytes.toInt(bGrade); grade.set(intGrade); // 输出行键和成绩 context.write(row, grade); } } public static class HBaseReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> { public static final byte[] CF = "cf".getBytes(); public static final byte[] ATTR1 = "grade".getBytes(); @Override public void reduce(Text row, Iterable<IntWritable> grades, Context context) throws IOException, InterruptedException { // 遍历成绩列表,将成绩写入 HBase 表 for (IntWritable grade : grades) { Put put = new Put(Bytes.toBytes(row.toString())); put.addColumn(CF, ATTR1, Bytes.toBytes(grade.get())); context.write(new ImmutableBytesWritable(Bytes.toBytes(row.toString())), put); } } } public static void main(String[] args) throws Exception { Configuration

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值