Windows运行Hadoop遇到的问题

http://blog.csdn.net/xubo245/article/details/50587660

window上连接集群跑hadoop问题之java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.NativeIO$Windows.

标签: windowhadoopwordcount
1172人阅读 评论(0) 收藏 举报
本文章已收录于:

环境:

window7 64位

集群hadoop2.6.0,ubuntu



window上连接集群跑Hadoop问题之Java.lang.UnsatisfiedLinkError: org.apache.hadoop.io.nativeio.$Windows.

参照http://blog.csdn.NET/congcong68/article/details/42043093即可解决



主要在修改NativeIO文件


在project下新建package:org.apache.hadoop.io.nativeio

将NativeIO.java文件copy过来。

修改557行:

  1. return true;  
  2.    return access0(path, desiredAccess.accessRight());  
    	return true;
//      return access0(path, desiredAccess.accessRight());

然后运行Wordcount:

  1. //public class WordCount {  
  2. //  
  3. //}  
  4.   
  5. import java.io.IOException;    
  6. import java.util.StringTokenizer;    
  7.     
  8. import org.apache.hadoop.conf.Configuration;    
  9. import org.apache.hadoop.fs.Path;    
  10. import org.apache.hadoop.io.IntWritable;    
  11. import org.apache.hadoop.io.Text;    
  12. import org.apache.hadoop.mapreduce.Job;    
  13. import org.apache.hadoop.mapreduce.Mapper;    
  14. import org.apache.hadoop.mapreduce.Reducer;    
  15. import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;    
  16. import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;    
  17.     
  18. public class WordCount {    
  19.     
  20.   public static class TokenizerMapper    
  21.        extends Mapper<Object, Text, Text, IntWritable>{    
  22.     
  23.     private final static IntWritable one = new IntWritable(1);    
  24.     private Text word = new Text();    
  25.     
  26.     public void map(Object key, Text value, Context context    
  27.                     ) throws IOException, InterruptedException {    
  28.       StringTokenizer itr = new StringTokenizer(value.toString());    
  29.       while (itr.hasMoreTokens()) {    
  30.         word.set(itr.nextToken());    
  31.         context.write(word, one);    
  32.       }    
  33.     }    
  34.   }    
  35.     
  36.   public static class IntSumReducer    
  37.        extends Reducer<Text,IntWritable,Text,IntWritable> {    
  38.     private IntWritable result = new IntWritable();    
  39.     
  40.     public void reduce(Text key, Iterable<IntWritable> values,    
  41.                        Context context    
  42.                        ) throws IOException, InterruptedException {    
  43.       int sum = 0;    
  44.       for (IntWritable val : values) {    
  45.         sum += val.get();    
  46.       }    
  47.       result.set(sum);    
  48.       context.write(key, result);    
  49.     }    
  50.   }    
  51.     
  52.   public static void main(String[] args) throws Exception {    
  53.     Configuration conf = new Configuration();    
  54.     Job job = Job.getInstance(conf, "word count");    
  55.     job.setJarByClass(WordCount.class);    
  56.     job.setMapperClass(TokenizerMapper.class);    
  57.     job.setCombinerClass(IntSumReducer.class);    
  58.     job.setReducerClass(IntSumReducer.class);    
  59.     job.setOutputKeyClass(Text.class);    
  60.     job.setOutputValueClass(IntWritable.class);    
  61.     FileInputFormat.addInputPath(job, new Path(args[0]));    
  62.     FileOutputFormat.setOutputPath(job, new Path(args[1]));    
  63.     System.exit(job.waitForCompletion(true) ? 0 : 1);    
  64.   }    
  65. }   
//public class WordCount {
//
//}

import java.io.IOException;  
import java.util.StringTokenizer;  
  
import org.apache.hadoop.conf.Configuration;  
import org.apache.hadoop.fs.Path;  
import org.apache.hadoop.io.IntWritable;  
import org.apache.hadoop.io.Text;  
import org.apache.hadoop.mapreduce.Job;  
import org.apache.hadoop.mapreduce.Mapper;  
import org.apache.hadoop.mapreduce.Reducer;  
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;  
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;  
  
public class WordCount {  
  
  public static class TokenizerMapper  
       extends Mapper<Object, Text, Text, IntWritable>{  
  
    private final static IntWritable one = new IntWritable(1);  
    private Text word = new Text();  
  
    public void map(Object key, Text value, Context context  
                    ) throws IOException, InterruptedException {  
      StringTokenizer itr = new StringTokenizer(value.toString());  
      while (itr.hasMoreTokens()) {  
        word.set(itr.nextToken());  
        context.write(word, one);  
      }  
    }  
  }  
  
  public static class IntSumReducer  
       extends Reducer<Text,IntWritable,Text,IntWritable> {  
    private IntWritable result = new IntWritable();  
  
    public void reduce(Text key, Iterable<IntWritable> values,  
                       Context context  
                       ) throws IOException, InterruptedException {  
      int sum = 0;  
      for (IntWritable val : values) {  
        sum += val.get();  
      }  
      result.set(sum);  
      context.write(key, result);  
    }  
  }  
  
  public static void main(String[] args) throws Exception {  
    Configuration conf = new Configuration();  
    Job job = Job.getInstance(conf, "word count");  
    job.setJarByClass(WordCount.class);  
    job.setMapperClass(TokenizerMapper.class);  
    job.setCombinerClass(IntSumReducer.class);  
    job.setReducerClass(IntSumReducer.class);  
    job.setOutputKeyClass(Text.class);  
    job.setOutputValueClass(IntWritable.class);  
    FileInputFormat.addInputPath(job, new Path(args[0]));  
    FileOutputFormat.setOutputPath(job, new Path(args[1]));  
    System.exit(job.waitForCompletion(true) ? 0 : 1);  
  }  
} 

arg为:

  1. hdfs://219.219.220.149:9000/input  
  2. hdfs://219.219.220.149:9000/output0126  
hdfs://219.219.220.149:9000/input
hdfs://219.219.220.149:9000/output0126

源文件:


结果文件:


0
1
 
 
我的同类文章
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值