Eclipse 安装 Hadoop 插件
1、首先下载对应版本的hadoop插件
2、进入Eclipse的目录,找到Plugins文件夹,讲刚才下载的插件拷贝进去(注意,Eclipse最好要处于关闭状态)
3、打开,Eclipse,会发现左侧多了一个DFS Location,说明已经安装成功
4、打开Windows–Show View
5、选择Other
6、选择Map/Reduce Location
7、这是,下面应该会出现
8、空白部分,右键,New Hadoop Location,然后双击,编辑
如果出现 ,
请检查 hdfs 里面是否有input和output目录
新建hdfs input目录:
hdfs dfs -mkdir /user
hdfs dfs -mkdir /user/root
hdfs dfs -mkdir /user/root/input
9、安装到这里,还缺少一个一个小插件
到网上搜索下载 hadoop-common-2.2.0-bin-master.jar
1、解压jar包
2、将jar包里面的 hadoop.dll 拷贝到系统盘目录 Windows/System32 下
3、将bin目录配置到环境变量中
###10、这些配置完成之后,就可以新建一个项目,然后导入jar包开始写mapreduce程序了,,,,但是要注意的是,,,,这个时候还需要加载一个配置文件,,,可以拷贝hadoop的配置文件 core-site.xml 到项目中(可以在main下面新建一个resource文件,放入即可)。。。
##附WordCount:
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class TestWordCount {
public static class WcMap extends Mapper<LongWritable, Text, Text, IntWritable>{
private final static IntWritable one = new IntWritable(1) ;
private Text word = new Text() ;
@Override
protected void map(LongWritable key, Text value, Context context)
throws IOException, InterruptedException {
StringTokenizer str = new StringTokenizer(value.toString()) ;
while(str.hasMoreTokens()){
String words = str.nextToken() ;
word.set(words);
context.write(word, one);
}
}
}
public static class WcReduce extends Reducer<Text, IntWritable, Text, IntWritable>{
private IntWritable times = new IntWritable() ;
@Override
protected void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
int sum=0 ;
for(IntWritable i :values){
sum +=i.get() ;
}
times.set(sum);
context.write(key, times);
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration() ;
Job job = Job.getInstance(conf) ;
job.setJarByClass(TestWordCount.class);
job.setMapperClass(WcMap.class);
job.setReducerClass(WcReduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path("/input"));
FileOutputFormat.setOutputPath(job, new Path("/output"));
System.exit(job.waitForCompletion(true)?0:1);
}
}