求最大值和最大k个值

来源吴超7天视频

1.求最大K个值

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class TopKnum {

	static final String INPUT_PATH = "hdfs://hadoop0:9000/input";
	static final String OUT_PATH = "hdfs://hadoop0:9000/out";
	static final int K=100;
	
	
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
		final Path outPath = new Path(OUT_PATH);
		if(fileSystem.exists(outPath)){
			fileSystem.delete(outPath, true);
		}
		
		final Job job = new Job(conf , TopKnum.class.getSimpleName());
		FileInputFormat.setInputPaths(job, INPUT_PATH);
		job.setMapperClass(MyMapper.class);
		job.setMapOutputKeyClass(LongWritable.class);
		job.setMapOutputValueClass(LongWritable.class);
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(NullWritable.class);
		FileOutputFormat.setOutputPath(job, outPath);
		job.waitForCompletion(true);
	}
	
	static class MyMapper extends Mapper<LongWritable, Text, LongWritable, LongWritable>{
		private Long[] top = new Long[K];
		{
			for(int j=0;j<100;j++)
				top[j]=0L;
		}
		@Override
		protected void map(LongWritable k1, Text v1,
				org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, LongWritable, LongWritable>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			Long temp=Long.parseLong(v1.toString());
			if(temp>top[0]){
				top[0]=temp;
				int i=0;
				for(;i<99&&temp>top[i+1];i++){
					top[i]=top[i+1];
				}
				top[i]=temp;
			}
			
		}
		
		@Override
		protected void cleanup(
				org.apache.hadoop.mapreduce.Mapper<LongWritable, Text, LongWritable, LongWritable>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			for(int i=0;i<100;i++){
				context.write(new LongWritable(top[i]),new LongWritable(top[i]));
			}
		}
	}
	
	static class MyReducer extends Reducer<LongWritable, LongWritable, LongWritable, NullWritable>{
		private Long[] top = new Long[K];
		{
			for(int j=0;j<100;j++)
				top[j]=0L;
		}
		@Override
		protected void reduce(LongWritable k2, Iterable<LongWritable> v2s,
				org.apache.hadoop.mapreduce.Reducer<LongWritable, LongWritable, LongWritable, NullWritable>.Context arg2)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			for (LongWritable v2 : v2s) {
				Long temp=v2.get();
				if(temp>top[0])
				{
					int i=0;
					for(;i<99&&temp>top[i+1];i++){
						top[i]=top[i+1];
					}
					top[i]=temp;
				}
			}
		}
		
		@Override
		protected void cleanup(
				org.apache.hadoop.mapreduce.Reducer<LongWritable, LongWritable, LongWritable, NullWritable>.Context context)
				throws IOException, InterruptedException {
			// TODO Auto-generated method stub
			for(int i=0;i<100;i++){
				context.write(new LongWritable(top[i]), NullWritable.get());
			}
		}
	}
}


2.求最大值

import java.net.URI;

import mapreduce.WordCountApp;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
/**
 * 作业:求最大的100个值
 */
public class TopKApp {
	static final String INPUT_PATH = "hdfs://hadoop0:9000/input";
	static final String OUT_PATH = "hdfs://hadoop0:9000/out";
	
	public static void main(String[] args) throws Exception {
		Configuration conf = new Configuration();
		final FileSystem fileSystem = FileSystem.get(new URI(INPUT_PATH), conf);
		final Path outPath = new Path(OUT_PATH);
		if(fileSystem.exists(outPath)){
			fileSystem.delete(outPath, true);
		}
		
		final Job job = new Job(conf , TopKApp.class.getSimpleName());
		FileInputFormat.setInputPaths(job, INPUT_PATH);
		job.setMapperClass(MyMapper.class);
		job.setReducerClass(MyReducer.class);
		job.setOutputKeyClass(LongWritable.class);
		job.setOutputValueClass(NullWritable.class);
		FileOutputFormat.setOutputPath(job, outPath);
		job.waitForCompletion(true);
	}
	static class MyMapper extends Mapper<LongWritable, Text, LongWritable, NullWritable>{
		long max = Long.MIN_VALUE;
		protected void map(LongWritable k1, Text v1, Context context) throws java.io.IOException ,InterruptedException {
			final long temp = Long.parseLong(v1.toString());
			if(temp>max){
				max = temp;
			}
		};
		
		protected void cleanup(org.apache.hadoop.mapreduce.Mapper<LongWritable,Text,LongWritable, NullWritable>.Context context) throws java.io.IOException ,InterruptedException {
			context.write(new LongWritable(max), NullWritable.get());
		};
	}
	
	static class MyReducer extends Reducer<LongWritable, NullWritable, LongWritable, NullWritable>{
		long max = Long.MIN_VALUE;
		protected void reduce(LongWritable k2, java.lang.Iterable<NullWritable> arg1, org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context arg2) throws java.io.IOException ,InterruptedException {
			final long temp = k2.get();
			if(temp>max){
				max = temp;
			}
		};
		
		protected void cleanup(org.apache.hadoop.mapreduce.Reducer<LongWritable,NullWritable,LongWritable,NullWritable>.Context context) throws java.io.IOException ,InterruptedException {
			context.write(new LongWritable(max), NullWritable.get());
		};
	}		
}



评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值