MapReduce 编程模板

*一. MapReduce编程模板类结构图

二. MapReduce模板代码骨架
		package com.hadoop.senior.mapreduce;
		
		import java.io.IOException;
		
		import org.apache.hadoop.conf.Configuration;
		import org.apache.hadoop.conf.Configured;
		import org.apache.hadoop.fs.Path;
		import org.apache.hadoop.io.IntWritable;
		import org.apache.hadoop.io.LongWritable;
		import org.apache.hadoop.io.Text;
		import org.apache.hadoop.mapreduce.Job;
		import org.apache.hadoop.mapreduce.Mapper;
		import org.apache.hadoop.mapreduce.Partitioner;
		import org.apache.hadoop.mapreduce.Reducer;
		import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
		import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
		import org.apache.hadoop.util.Tool;
		import org.apache.hadoop.util.ToolRunner;
		
		/**
		 * MapReuce
		 * 
		 * @author 
		 * 
		 */
		public class ModuleMapReduce extends Configured implements Tool {
		
			// step 1: Map Class
			/**
			 * 
			 * public class Mapper<KEYIN, VALUEIN, KEYOUT, VALUEOUT>
			 */
			// TODO
			public static class ModuleMapper extends
					Mapper<LongWritable, Text, Text, IntWritable> {
		
				@Override
				public void setup(Context context) throws IOException,
						InterruptedException {
					// Nothing
				}
		
				@Override
				public void map(LongWritable key, Text value, Context context)
						throws IOException, InterruptedException {
					// TODO
				}
		
				@Override
				public void cleanup(Context context) throws IOException,
						InterruptedException {
					// Nothing
				}
		
			}
		
			// step 2: Reduce Class
			/**
			 * 
			 * public class Reducer<KEYIN,VALUEIN,KEYOUT,VALUEOUT>
			 */
			// TODO
			public static class ModuleReducer extends
					Reducer<Text, IntWritable, Text, IntWritable> {
		
				@Override
				protected void setup(Context context) throws IOException,
						InterruptedException {
					// Nothing
				}
		
				@Override
				public void reduce(Text key, Iterable<IntWritable> values,
						Context context) throws IOException, InterruptedException {
					// TODO
				}
		
				@Override
				protected void cleanup(Context context) throws IOException,
						InterruptedException {
					// Nothing
				}
		
			}
		
			// step 3: Driver ,component job
			public int run(String[] args) throws Exception {
				// 1: get confifuration
				Configuration configuration = getConf();
		
				// 2: create Job
				Job job = Job.getInstance(configuration, //
						this.getClass().getSimpleName());
				// run jar
				job.setJarByClass(this.getClass());
		
				// 3: set job
				// input -> map -> reduce -> output
				// 3.1: input
				Path inPath = new Path(args[0]);
				FileInputFormat.addInputPath(job, inPath);
		
				// 3.2: map
				job.setMapperClass(ModuleMapper.class);
				// TODO
				job.setMapOutputKeyClass(Text.class);
				job.setMapOutputValueClass(IntWritable.class);
				
				//****************************Shuffle*********************************
				// 1) partitioner
		//		job.setPartitionerClass(cls);
				// 2) sort
		//		job.setSortComparatorClass(cls);
				// 3) optional,combiner
		//		job.setCombinerClass(cls);
				// 4) group
		//		job.setGroupingComparatorClass(cls);
				
		//****************************Shuffle*********************************
		
				// 3.3: reduce
				job.setReducerClass(ModuleReducer.class);
				// TODO
				job.setOutputKeyClass(Text.class);
				job.setOutputValueClass(IntWritable.class);
				
				// set reduce number
		//		job.setNumReduceTasks(2);
				
				
				
				// 3.4: output
				Path outPath = new Path(args[1]);
				FileOutputFormat.setOutputPath(job, outPath);
		
				// 4: submit job
				boolean isSuccess = job.waitForCompletion(true);
		
				return isSuccess ? 0 : 1;
			}
			
			// step 4: run program
			public static void main(String[] args) throws Exception {
				// 1: get confifuration
				Configuration configuration = new Configuration();
				
				//set compress
		//		configuration.set("mapreduce.map.output.compress", "true");
		//		configuration.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
		
				// int status = new WordCountMapReduce().run(args);
				int status = ToolRunner.run(configuration,//
						new ModuleMapReduce(),//
						args);
		
				System.exit(status);
			}
		
		}



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

shujuboke

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值