人大云计算检测系统——1003题

第1003题:

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class MyMapre {
	
	/**
	 * @param args
	 * @throws IOException 
	 * @throws InterruptedException 
	 * @throws ClassNotFoundException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		// TODO Auto-generated method stub
		Configuration conf = new Configuration();
		Job job = new Job(conf,"qw 1003");
		job.setJarByClass(MyMapre.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		FileInputFormat.setInputPaths(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0:1);
	}
	
	public static class Map extends Mapper<Object, Text, Text, Text> {
		
		public void map(Object key,Text value,Context context)
				throws IOException,InterruptedException{  
			context.write(value, new Text(""));
		}
	}
	
	public static class Reduce extends Reducer<Text , Text, Text, Text> {
		
		public void reduce(Text key,Iterable<Text> values,Context context)
				throws IOException,InterruptedException{  
			context.write(key, new Text(""));
		}
	}
}


第1004题:(原题需要输出字典序,还没有实现,请教高手如何实现)

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class MyMapre {
	
	public static int time = 0;
	/**
	 * @param args
	 * @throws IOException 
	 * @throws InterruptedException 
	 * @throws ClassNotFoundException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		// TODO Auto-generated method stub
		Configuration conf = new Configuration();
		Job job = new Job(conf,"qw 1004");
		job.setJarByClass(MyMapre.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		FileInputFormat.setInputPaths(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0:1);
	}
	
	public static class Map extends Mapper<Object, Text, Text, Text> {
		
		public void map(Object key,Text value,Context context)
				throws IOException,InterruptedException{  
			String childname = new String();
			String parentname = new String();
			String relationtype = new String();
			String line = value.toString();
			int i = 0;
			while(line.charAt(i)!=' ') {
				i++;
			}
			String[] values = {line.substring(0, i),line.substring(i+1)};
			if(values[0].compareTo("child")!=0) {
				childname = values[0];
				parentname = values[1];
				relationtype = "1";
				context.write(new Text(parentname), new Text(relationtype+"+"+childname+"+"+parentname));
				relationtype="2";
				context.write(new Text(childname), new Text(relationtype+"+"+childname+"+"+parentname));
			}
		}
	}
	
	public static class Reduce extends Reducer<Text , Text, Text, Text> {
		
		public void reduce(Text key,Iterable<Text> values,Context context)
				throws IOException,InterruptedException{  
			if(time==0) {
				context.write(new Text("grandchild"), new Text("grandparent"));
				time++;
			}
			int grandchildnum = 0;
			String[] grandchild = new String[10];
			int grandparentnum = 0;
			String[] grandparent = new String[10]; 
			Iterator iterator = values.iterator();
			while(iterator.hasNext()) {
				String record = iterator.next().toString();
				int len = record.length();
				int i = 2;
				if(len==0) {
					continue;
				}
				char relationType = record.charAt(0);
				String childname = new String();
				String parentname = new String();
				while(record.charAt(i)!='+') {
					childname = childname + record.charAt(i++);
				}
				i=i+1;
				while(i<len) {
					parentname = parentname +record.charAt(i++);
				}
				if(relationType=='1') {
					grandchild[grandchildnum++] = childname;
				}else {
					grandparent[grandparentnum++] = parentname;
				}
			}
			if(grandchildnum!=0 && grandparentnum!=0) {
				for(int i=0;i<grandchildnum;i++) {
					for(int j=0;j<grandparentnum;j++) {
						context.write(new Text(grandchild[i]), new Text(grandparent[j]));
					}
				}
			}
		}
	}
}



1005题:(原题需要输出字典序,还没有实现,请教高手如何实现)

import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;


public class MyMapre {
	
	public static int time = 0;
	/**
	 * @param args
	 * @throws IOException 
	 * @throws InterruptedException 
	 * @throws ClassNotFoundException 
	 */
	public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
		// TODO Auto-generated method stub
		Configuration conf = new Configuration();
		Job job = new Job(conf,"qw 1005");
		job.setJarByClass(MyMapre.class);
		job.setMapperClass(Map.class);
		job.setReducerClass(Reduce.class);
		
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(Text.class);
		
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(Text.class);
		
		FileInputFormat.setInputPaths(job, new Path(args[0]));
		FileOutputFormat.setOutputPath(job, new Path(args[1]));
		System.exit(job.waitForCompletion(true) ? 0:1);
	}
	
	public static class Map extends Mapper<Object, Text, Text, Text> {
		
		public void map(Object key,Text value,Context context)
				throws IOException,InterruptedException{  
			String line = value.toString();
			if(line.contains("factoryname")==true || line.contains("addressID")==true) {
				return;
			}
			int i = 0;
			while(line.charAt(i) >= '9' || line.charAt(i) <= '0') {
				i++;
			}
			if(line.charAt(0) >= '9' || line.charAt(0) <= '0') {
				int j= i-1;
				while(line.charAt(j) == ' ') {
					j--;
				}
				String[] values = {line.substring(0,j),line.substring(i) };
				context.write(new Text(values[1]), new Text("1+"+values[0]));
			}
			else {
				int j = i+1;
				while(line.charAt(j) == ' ') {
					j++;
				}
				String[] values = {line.substring(0,i+1),line.substring(j)};
				context.write(new Text(values[0]), new Text("2+"+values[1]));
			}
		}
	}
	
	public static class Reduce extends Reducer<Text , Text, Text, Text> {
		
		public void reduce(Text key,Iterable<Text> values,Context context)
				throws IOException,InterruptedException{  
			if(time == 0) {
				context.write(new Text("factoryname"), new Text("addressname"));
				time++;
			}
			int factorynum = 0;
			String[] factory = new String[10];
			int addressnum = 0;
			String[] address = new String[10];
			
			Iterator iterator = values.iterator();
			while(iterator.hasNext()) {
				String line = iterator.next().toString();
				char relationType = line.charAt(0);
				if(relationType == '1') {
					factory[factorynum++] = line.substring(2);
				}else {
					address[addressnum++] = line.substring(2);
				}
			}
			if(factorynum!=0 && addressnum!=0) {
				for(int i=0;i<factorynum;i++) {
					for(int j=0;j<addressnum;j++) {
						context.write(new Text(factory[i]), new Text(address[j]));
					}
				}
			}
		}
	}
}



  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值