hadoop example


package com.li72.hadoop;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.net.URL;
 import java.net.URLClassLoader;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.jar.JarEntry;
 import java.util.jar.JarOutputStream;
 import java.util.jar.Manifest;
 
 public class EJob {
 
     // To declare global field
     private static List<URL> classPath = new ArrayList<URL>();
 
     // To declare method
     public static File createTempJar(String root) throws IOException {
         if (!new File(root).exists()) {
             return null;
         }
         Manifest manifest = new Manifest();
         manifest.getMainAttributes().putValue("Manifest-Version", "1.0");
         final File jarFile = File.createTempFile("EJob-", ".jar", new File(
                 System.getProperty("java.io.tmpdir")));
 
         Runtime.getRuntime().addShutdownHook(new Thread() {
             public void run() {
                 jarFile.delete();
             }
         });
 
         JarOutputStream out = new JarOutputStream(
                 new FileOutputStream(jarFile), manifest);
         createTempJarInner(out, new File(root), "");
         out.flush();
         out.close();
         return jarFile;
     }
 
     private static void createTempJarInner(JarOutputStream out, File f,
             String base) throws IOException {
         if (f.isDirectory()) {
             File[] fl = f.listFiles();
             if (base.length() > 0) {
                 base = base + "/";
             }
             for (int i = 0; i < fl.length; i++) {
                 createTempJarInner(out, fl[i], base + fl[i].getName());
             }
         } else {
             out.putNextEntry(new JarEntry(base));
             FileInputStream in = new FileInputStream(f);
             byte[] buffer = new byte[1024];
             int n = in.read(buffer);
             while (n != -1) {
                 out.write(buffer, 0, n);
                 n = in.read(buffer);
             }
             in.close();
         }
     }
 
     public static ClassLoader getClassLoader() {
         ClassLoader parent = Thread.currentThread().getContextClassLoader();
         if (parent == null) {
             parent = EJob.class.getClassLoader();
         }
         if (parent == null) {
             parent = ClassLoader.getSystemClassLoader();
         }
         return new URLClassLoader(classPath.toArray(new URL[0]), parent);
     }
 
     public static void addClasspath(String component) {
 
         if ((component != null) && (component.length() > 0)) {
             try {
                 File f = new File(component);
 
                 if (f.exists()) {
                     URL key = f.getCanonicalFile().toURL();
                     if (!classPath.contains(key)) {
                         classPath.add(key);
                     }
                 }
             } catch (IOException e) {
             }
         }
     }
 
 }



package com.li72.hadoop;



import java.io.File;
import java.io.IOException;
import java.util.Iterator;
import java.util.StringTokenizer;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;


/**
 * @author root
 *
 */
public class PersonHadoop implements Tool {

    Configuration conf = new Configuration();




public static class PersonMap extends
Mapper<LongWritable, Text, Text, IntWritable> {
public void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line, "#");
while (tokenizer.hasMoreTokens()) {
StringTokenizer tokenizerline = new StringTokenizer(
tokenizer.nextToken());
while (tokenizerline.hasMoreTokens()) {
String strName = tokenizerline.nextToken();
if (tokenizerline.hasMoreTokens()) {
String strScore = tokenizerline.nextToken();
Text name = new Text(strName);
int scoreInt = Integer.parseInt(strScore);
context.write(name, new IntWritable(scoreInt));
}
}
}
}
}

public static class PersonReduce extends
Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
int count = 0;
Iterator<IntWritable> iterator = values.iterator();
while (iterator.hasNext()) {
sum += iterator.next().get();
count++;
}
int average = (int) sum / count;
context.write(key, new IntWritable(average));
}
}



public int run(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

String inputPath="hdfs://192.168.119.128:9000/home/root/personin";       
        String outputPath="hdfs://192.168.119.128:9000/home/root/personout";  
        File jarFile = EJob.createTempJar("bin");
   EJob.addClasspath("/home/bigdata/hadoop/conf");
   ClassLoader classLoader = EJob.getClassLoader();
   Thread.currentThread().setContextClassLoader(classLoader);
        
        conf.set("mapred.job.tracker", "master:9001");
Job job= new Job(getConf());
// DistributedCache.addCacheFile(new Path(args[0]).toUri(), conf);
((JobConf) job.getConfiguration()).setJar(jarFile.toString()); 
job.setJarByClass(PersonHadoop.class);
job.setJobName("Person");
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setMapperClass(PersonMap.class);
job.setCombinerClass(PersonReduce.class);
job.setReducerClass(PersonReduce.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);

//设置输入和输出目录
    FileOutputFormat.setOutputPath(job, new Path(outputPath));
    FileInputFormat.setInputPaths(job,new Path(inputPath));

boolean success =job.waitForCompletion(true);
return success ? 0:1;
}

public static void main(String[] args) throws Exception {
int ret =ToolRunner.run(new PersonHadoop(), new String []{"",""});
 System.exit(ret);
}


@Override
public void setConf(Configuration conf) {
conf = this.conf;
}


@Override
public Configuration getConf() {
return conf;
}


}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值