hadoop学习笔记3 hadoop2.5.2单机模式环境配置和eclipse运行

core-site.xml

<property>
<name>hadoop.tmp.dir</name>
<value>/home/mouap/hadoop/tmp</value>
</property>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
<property>
<name>io.file.buffer.size</name>
<value>4096</value>
</properyt>



hdfs-site.xml

   <property>  
        <name>dfs.nameservices</name>  
        <value>hadoop-cluster1</value>  
        <description>集群别名,可以有多个,逗号分割</description>  
    </property>
<property>  
        <name>dfs.namenode.name.dir</name>  
        <value>file:///home/mouap/hadoop/dfs/name</value>  
    </property>
<property>  
        <name>dfs.datanode.data.dir</name>  
        <value>file:///home/mouap/hadoop/dfs/data</value>  
    </property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
 <property>  
        <name>dfs.namenode.secondary.http-address</name>  
        <value>localhost:50090</value>  
    </property>
    <property>  
        <name>dfs.webhdfs.enabled</name>  
        <value>true</value>  
    </property>


mapred-site.xml  2.5是没有这个文件 参考网上一些人说 将 .temple去掉 然后修改里面的配置

<property>  
        <name>mapreduce.framework.name</name>  
        <value>yarn</value>  
        <description>指定运行mapreduce的环境是yarn</description>  
    </property>  
    <property>  
        <name>mapreduce.jobtracker.http.address</name>  
        <value>localhost:50030</value>  
    </property>  
    <property>  
        <name>mapreduce.jobhistory.address</name>  
        <value>localhost:10020</value>  
    </property>  
    <property>  
        <name>mapreduce.jobhistory.webapp.address</name>  
        <value>localhost:19888</value>  
    </property>
 
yarn-site.xml

 <property>  
        <name>yarn.nodemanager.aux-services</name>  
        <value>mapreduce_shuffle</value>  
    </property>
<property>  
        <name>mapreduce.framework.name</name>  
        <value>yarn</value>  
        <description>指定运行mapreduce的环境是yarn</description>  

    </property>


以上几个几个配置文件

2 然后 删除 /home/mouap/hadoop/  路径下  tmp 和 dfs文件里面的内容 

3 格式化  /home/mouap/hadoop/    路径下 bin/hdfs namenode -format

4 然后将 文件放入等几个命令

/usr/lib/jdk/jdk1.8.0_45/bin/jps 查看进程
格式化文件   在 mouap/hadoop/目录下
bin/hdfs namenode -format
bin/hadoop dfs -put input /in   --放文件
bin/hadoop dfsadmin -safemode leave  如果报错 
bin/hadoop dfs -ls /in  查看 
bin/hadoop jar /usr/app/hadoop-eclipse-plugin/build/contrib/eclipse-plugin/lib/hadoop-mapreduce-examples-2.5.2.jar  wordcount /in out  --设置运行的jar
bin/hadoop dfs -cat out/*   --运行输出结果
bin/hadoop fs -rmr out --删除out目录


5 然后在 /home/mouap/hadoop/sbin 路径下 启动进程 

./start-dfs.sh  ./start-yarn.sh

启动2个完毕后 jps查看下进程

9778 Jps
7939 NameNode
4825 org.eclipse.equinox.launcher_1.3.0.v20140415-2008.jar
8457 NodeManager
8331 ResourceManager
8060 DataNode


查看页面是否显示正常 http://127.0.1.1:50070/
http://localhost:8088/


6然后 设置运行类

bin/hadoop jar /usr/app/hadoop-eclipse-plugin/build/contrib/eclipse-plugin/lib/hadoop-mapreduce-examples-2.5.2.jar  wordcount /in out 


7然后 计算

bin/hadoop dfs -cat out/*


8 输出

hadoop 2
hello 3
mouap 2
welcom 1
world 2


eclipse里面的代码

-===================================

package mouapTest;


import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;


import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.StringUtils;

public class WorldCount2_5_2 {

public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> {

static enum CountersEnum { INPUT_WORDS }

private final static IntWritable one = new IntWritable(1);
private Text word = new Text();

private boolean caseSensitive;
private Set<String> patternsToSkip = new HashSet<String>();

private Configuration conf;
private BufferedReader fis;

@Override
public void setup(Context context) throws IOException,InterruptedException {
//   conf = context.getConfiguration();
//   caseSensitive = conf.getBoolean("wordcount.case.sensitive", true);
//   if (conf.getBoolean("wordcount.skip.patterns", true)) {
//   
//     URI[] patternsURIs = Job.getInstance(conf).getCacheFiles();
//     
//     for (URI patternsURI : patternsURIs) {
//       Path patternsPath = new Path(patternsURI.getPath());
//       String patternsFileName = patternsPath.getName().toString();
//       parseSkipFile(patternsFileName);
//     }
//   }
}

private void parseSkipFile(String fileName) {
  try {
    fis = new BufferedReader(new FileReader(fileName));
    String pattern = null;
    while ((pattern = fis.readLine()) != null) {
      patternsToSkip.add(pattern);
    }
  } catch (IOException ioe) {
    System.err.println("Caught exception while parsing the cached file '"+ StringUtils.stringifyException(ioe));
  }
}

@Override
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
  String line = (caseSensitive) ?
      value.toString() : value.toString().toLowerCase();
  for (String pattern : patternsToSkip) {
    line = line.replaceAll(pattern, "");
  }
  
  StringTokenizer itr = new StringTokenizer(line);
  
  while (itr.hasMoreTokens()) {
    word.set(itr.nextToken());
    context.write(word, one);
    Counter counter = context.getCounter(CountersEnum.class.getName(),
        CountersEnum.INPUT_WORDS.toString());
    counter.increment(1);
  }
}
}


public static class IntSumReducer extends Reducer<Text,IntWritable,Text,IntWritable> {
private IntWritable result = new IntWritable();

public void reduce(Text key, Iterable<IntWritable> values,Context context) throws IOException, InterruptedException {
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
}
}

public static void main(String[] args) throws Exception {  

Configuration conf = new Configuration();
 
// System.setProperty("hadoop.home.dir", "c:/home/mouap/hadoop");
 
GenericOptionsParser optionParser = new GenericOptionsParser(conf, args);
 
String[] remainingArgs = optionParser.getRemainingArgs();
 
System.out.println("test1 = " +  remainingArgs.length );
 
if (!(remainingArgs.length != 2 || remainingArgs.length != 4)) {
 
   System.err.println("Usage: wordcount <in> <out> [-skip skipPatternFile]");
   System.exit(2);
   
}
 
System.out.println("test2 url:" + conf.get("fs.defaultFS"));
 
Job job = Job.getInstance(conf, "word count");//??????????
 
System.out.println("test2222 url:" + conf.get("fs.defaultFS"));
 
job.setJarByClass(WorldCount2_5_2.class);
 
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
 
job.setMapperClass(TokenizerMapper.class);
job.setReducerClass(IntSumReducer.class);

/**/
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
 
job.setCombinerClass(IntSumReducer.class);  

System.out.println("test3 = " +  remainingArgs.length );
 
List<String> otherArgs = new ArrayList<String>();
for (int i=0; i < remainingArgs.length; ++i) {
  if ("-skip".equals(remainingArgs[i])) {
    job.addCacheFile(new Path(remainingArgs[++i]).toUri());
    job.getConfiguration().setBoolean("wordcount.skip.patterns", true);
  } else {
    otherArgs.add(remainingArgs[i]);
  }
}  
System.out.println("test4 = " +  remainingArgs.length );
 
FileInputFormat.addInputPath(job, new Path(otherArgs.get(0)));
 
System.out.println("test5 = " +  FileInputFormat.getInputPaths(job) );
 
FileOutputFormat.setOutputPath(job, new Path(otherArgs.get(1)));
 
System.out.println("test6 = " +  FileOutputFormat.getOutputPath(job) );
 
System.out.println("otherArgs.get(0) ="+otherArgs.get(0)+"   otherArgs.get(1)"+otherArgs.get(1));
 
System.out.println("job result ================ "+(job.waitForCompletion(true) ? 0 : 1));
 
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}

=====================================


eclipse里面的 dfs location设置


eclipse里面run的参数配置 特别注意那个里面 输入 输出的参数 用全路径 




运行后显示的效果为




输出日志

test1 = 2
test2 url:file:///
2015-05-03 15:19:20,146 WARN  [main] util.NativeCodeLoader (NativeCodeLoader.java:<clinit>(62)) - Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
test2222 url:file:///
test3 = 2
test4 = 2
test5 = [Lorg.apache.hadoop.fs.Path;@10aa41f2
test6 = hdfs://127.0.0.1:9000/user/root/out
otherArgs.get(0) =hdfs://127.0.0.1:9000/user/root/in   otherArgs.get(1)hdfs://127.0.0.1:9000/user/root/out
2015-05-03 15:19:20,663 INFO  [main] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - session.id is deprecated. Instead, use dfs.metrics.session-id
2015-05-03 15:19:20,664 INFO  [main] jvm.JvmMetrics (JvmMetrics.java:init(76)) - Initializing JVM Metrics with processName=JobTracker, sessionId=
2015-05-03 15:19:20,903 WARN  [main] mapreduce.JobSubmitter (JobSubmitter.java:copyAndConfigureFiles(259)) - No job jar file set.  User classes may not be found. See Job or Job#setJar(String).
2015-05-03 15:19:20,962 INFO  [main] input.FileInputFormat (FileInputFormat.java:listStatus(281)) - Total input paths to process : 2
2015-05-03 15:19:21,005 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:submitJobInternal(396)) - number of splits:2
2015-05-03 15:19:21,129 INFO  [main] mapreduce.JobSubmitter (JobSubmitter.java:printTokens(479)) - Submitting tokens for job: job_local395912683_0001
2015-05-03 15:19:21,166 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-root/mapred/staging/root395912683/.staging/job_local395912683_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2015-05-03 15:19:21,170 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-root/mapred/staging/root395912683/.staging/job_local395912683_0001/job.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2015-05-03 15:19:21,264 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-root/mapred/local/localRunner/root/job_local395912683_0001/job_local395912683_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.retry.interval;  Ignoring.
2015-05-03 15:19:21,270 WARN  [main] conf.Configuration (Configuration.java:loadProperty(2368)) - file:/tmp/hadoop-root/mapred/local/localRunner/root/job_local395912683_0001/job_local395912683_0001.xml:an attempt to override final parameter: mapreduce.job.end-notification.max.attempts;  Ignoring.
2015-05-03 15:19:21,277 INFO  [main] mapreduce.Job (Job.java:submit(1289)) - The url to track the job: http://localhost:8080/
2015-05-03 15:19:21,278 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1334)) - Running job: job_local395912683_0001
2015-05-03 15:19:21,279 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(471)) - OutputCommitter set in config null
2015-05-03 15:19:21,284 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:createOutputCommitter(489)) - OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
2015-05-03 15:19:21,367 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for map tasks
2015-05-03 15:19:21,367 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(224)) - Starting task: attempt_local395912683_0001_m_000000_0
2015-05-03 15:19:21,451 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : [ ]
2015-05-03 15:19:21,458 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:runNewMapper(733)) - Processing split: hdfs://127.0.0.1:9000/user/root/in/test1mou.txt:0+37
2015-05-03 15:19:21,471 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:createSortingCollector(388)) - Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
2015-05-03 15:19:21,502 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:setEquator(1182)) - (EQUATOR) 0 kvi 26214396(104857584)
2015-05-03 15:19:21,502 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(975)) - mapreduce.task.io.sort.mb: 100
2015-05-03 15:19:21,502 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(976)) - soft limit at 83886080
2015-05-03 15:19:21,502 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(977)) - bufstart = 0; bufvoid = 104857600
2015-05-03 15:19:21,503 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(978)) - kvstart = 26214396; length = 6553600
2015-05-03 15:19:21,687 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 
2015-05-03 15:19:21,709 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1437)) - Starting flush of map output
2015-05-03 15:19:21,709 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1455)) - Spilling map output
2015-05-03 15:19:21,709 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1456)) - bufstart = 0; bufend = 61; bufvoid = 104857600
2015-05-03 15:19:21,709 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1458)) - kvstart = 26214396(104857584); kvend = 26214376(104857504); length = 21/6553600
2015-05-03 15:19:21,726 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:sortAndSpill(1641)) - Finished spill 0
2015-05-03 15:19:21,729 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:done(1001)) - Task:attempt_local395912683_0001_m_000000_0 is done. And is in the process of committing
2015-05-03 15:19:21,744 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - map
2015-05-03 15:19:21,744 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local395912683_0001_m_000000_0' done.
2015-05-03 15:19:21,744 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(249)) - Finishing task: attempt_local395912683_0001_m_000000_0
2015-05-03 15:19:21,744 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(224)) - Starting task: attempt_local395912683_0001_m_000001_0
2015-05-03 15:19:21,746 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : [ ]
2015-05-03 15:19:21,748 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:runNewMapper(733)) - Processing split: hdfs://127.0.0.1:9000/user/root/in/test2mou.txt:0+26
2015-05-03 15:19:21,749 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:createSortingCollector(388)) - Map output collector class = org.apache.hadoop.mapred.MapTask$MapOutputBuffer
2015-05-03 15:19:21,772 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:setEquator(1182)) - (EQUATOR) 0 kvi 26214396(104857584)
2015-05-03 15:19:21,772 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(975)) - mapreduce.task.io.sort.mb: 100
2015-05-03 15:19:21,772 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(976)) - soft limit at 83886080
2015-05-03 15:19:21,772 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(977)) - bufstart = 0; bufvoid = 104857600
2015-05-03 15:19:21,773 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:init(978)) - kvstart = 26214396; length = 6553600
2015-05-03 15:19:21,799 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 
2015-05-03 15:19:21,800 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1437)) - Starting flush of map output
2015-05-03 15:19:21,800 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1455)) - Spilling map output
2015-05-03 15:19:21,800 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1456)) - bufstart = 0; bufend = 42; bufvoid = 104857600
2015-05-03 15:19:21,800 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:flush(1458)) - kvstart = 26214396(104857584); kvend = 26214384(104857536); length = 13/6553600
2015-05-03 15:19:21,803 INFO  [LocalJobRunner Map Task Executor #0] mapred.MapTask (MapTask.java:sortAndSpill(1641)) - Finished spill 0
2015-05-03 15:19:21,807 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:done(1001)) - Task:attempt_local395912683_0001_m_000001_0 is done. And is in the process of committing
2015-05-03 15:19:21,813 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - map
2015-05-03 15:19:21,813 INFO  [LocalJobRunner Map Task Executor #0] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local395912683_0001_m_000001_0' done.
2015-05-03 15:19:21,814 INFO  [LocalJobRunner Map Task Executor #0] mapred.LocalJobRunner (LocalJobRunner.java:run(249)) - Finishing task: attempt_local395912683_0001_m_000001_0
2015-05-03 15:19:21,814 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - map task executor complete.
2015-05-03 15:19:21,816 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(448)) - Waiting for reduce tasks
2015-05-03 15:19:21,816 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(302)) - Starting task: attempt_local395912683_0001_r_000000_0
2015-05-03 15:19:21,824 INFO  [pool-6-thread-1] mapred.Task (Task.java:initialize(587)) -  Using ResourceCalculatorProcessTree : [ ]
2015-05-03 15:19:21,827 INFO  [pool-6-thread-1] mapred.ReduceTask (ReduceTask.java:run(362)) - Using ShuffleConsumerPlugin: org.apache.hadoop.mapreduce.task.reduce.Shuffle@5251c987
2015-05-03 15:19:21,839 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:<init>(193)) - MergerManager: memoryLimit=619865664, maxSingleShuffleLimit=154966416, mergeThreshold=409111360, ioSortFactor=10, memToMemMergeOutputsThreshold=10
2015-05-03 15:19:21,842 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(61)) - attempt_local395912683_0001_r_000000_0 Thread started: EventFetcher for fetching Map Completion Events
2015-05-03 15:19:21,877 INFO  [localfetcher#1] reduce.LocalFetcher (LocalFetcher.java:copyMapOutput(140)) - localfetcher#1 about to shuffle output of map attempt_local395912683_0001_m_000001_0 decomp: 39 len: 43 to MEMORY
2015-05-03 15:19:21,881 INFO  [localfetcher#1] reduce.InMemoryMapOutput (InMemoryMapOutput.java:shuffle(100)) - Read 39 bytes from map-output for attempt_local395912683_0001_m_000001_0
2015-05-03 15:19:21,884 INFO  [localfetcher#1] reduce.MergeManagerImpl (MergeManagerImpl.java:closeInMemoryFile(307)) - closeInMemoryFile -> map-output of size: 39, inMemoryMapOutputs.size() -> 1, commitMemory -> 0, usedMemory ->39
2015-05-03 15:19:21,886 INFO  [localfetcher#1] reduce.LocalFetcher (LocalFetcher.java:copyMapOutput(140)) - localfetcher#1 about to shuffle output of map attempt_local395912683_0001_m_000000_0 decomp: 51 len: 55 to MEMORY
2015-05-03 15:19:21,887 INFO  [localfetcher#1] reduce.InMemoryMapOutput (InMemoryMapOutput.java:shuffle(100)) - Read 51 bytes from map-output for attempt_local395912683_0001_m_000000_0
2015-05-03 15:19:21,887 INFO  [localfetcher#1] reduce.MergeManagerImpl (MergeManagerImpl.java:closeInMemoryFile(307)) - closeInMemoryFile -> map-output of size: 51, inMemoryMapOutputs.size() -> 2, commitMemory -> 39, usedMemory ->90
2015-05-03 15:19:21,888 INFO  [EventFetcher for fetching Map Completion Events] reduce.EventFetcher (EventFetcher.java:run(76)) - EventFetcher is interrupted.. Returning
2015-05-03 15:19:21,889 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 2 / 2 copied.
2015-05-03 15:19:21,889 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(667)) - finalMerge called with 2 in-memory map-outputs and 0 on-disk map-outputs
2015-05-03 15:19:21,909 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 2 sorted segments
2015-05-03 15:19:21,910 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 2 segments left of total size: 73 bytes
2015-05-03 15:19:21,914 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(742)) - Merged 2 segments, 90 bytes to disk to satisfy reduce memory limit
2015-05-03 15:19:21,915 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(772)) - Merging 1 files, 92 bytes from disk
2015-05-03 15:19:21,917 INFO  [pool-6-thread-1] reduce.MergeManagerImpl (MergeManagerImpl.java:finalMerge(787)) - Merging 0 segments, 0 bytes from memory into reduce
2015-05-03 15:19:21,918 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(591)) - Merging 1 sorted segments
2015-05-03 15:19:21,918 INFO  [pool-6-thread-1] mapred.Merger (Merger.java:merge(690)) - Down to the last merge-pass, with 1 segments left of total size: 79 bytes
2015-05-03 15:19:21,920 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 2 / 2 copied.
2015-05-03 15:19:21,984 INFO  [pool-6-thread-1] Configuration.deprecation (Configuration.java:warnOnceIfDeprecated(1019)) - mapred.skip.on is deprecated. Instead, use mapreduce.job.skiprecords
2015-05-03 15:19:22,227 INFO  [pool-6-thread-1] mapred.Task (Task.java:done(1001)) - Task:attempt_local395912683_0001_r_000000_0 is done. And is in the process of committing
2015-05-03 15:19:22,232 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - 2 / 2 copied.
2015-05-03 15:19:22,232 INFO  [pool-6-thread-1] mapred.Task (Task.java:commit(1162)) - Task attempt_local395912683_0001_r_000000_0 is allowed to commit now
2015-05-03 15:19:22,273 INFO  [pool-6-thread-1] output.FileOutputCommitter (FileOutputCommitter.java:commitTask(439)) - Saved output of task 'attempt_local395912683_0001_r_000000_0' to hdfs://127.0.0.1:9000/user/root/out/_temporary/0/task_local395912683_0001_r_000000
2015-05-03 15:19:22,275 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:statusUpdate(591)) - reduce > reduce
2015-05-03 15:19:22,275 INFO  [pool-6-thread-1] mapred.Task (Task.java:sendDone(1121)) - Task 'attempt_local395912683_0001_r_000000_0' done.
2015-05-03 15:19:22,275 INFO  [pool-6-thread-1] mapred.LocalJobRunner (LocalJobRunner.java:run(325)) - Finishing task: attempt_local395912683_0001_r_000000_0
2015-05-03 15:19:22,276 INFO  [Thread-12] mapred.LocalJobRunner (LocalJobRunner.java:runTasks(456)) - reduce task executor complete.
2015-05-03 15:19:22,280 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1355)) - Job job_local395912683_0001 running in uber mode : false
2015-05-03 15:19:22,282 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 100% reduce 100%
2015-05-03 15:19:23,285 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1373)) - Job job_local395912683_0001 completed successfully
2015-05-03 15:19:23,312 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1380)) - Counters: 39

job result ================ 0
2015-05-03 15:19:23,312 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1334)) - Running job: job_local395912683_0001
2015-05-03 15:19:23,313 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1355)) - Job job_local395912683_0001 running in uber mode : false
2015-05-03 15:19:23,313 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1362)) -  map 100% reduce 100%
2015-05-03 15:19:23,313 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1373)) - Job job_local395912683_0001 completed successfully
2015-05-03 15:19:23,320 INFO  [main] mapreduce.Job (Job.java:monitorAndPrintJob(1380)) - Counters: 39
File System Counters
FILE: Number of bytes read=1607
FILE: Number of bytes written=688670
FILE: Number of read operations=0
FILE: Number of large read operations=0
FILE: Number of write operations=0
HDFS: Number of bytes read=163
HDFS: Number of bytes written=42
HDFS: Number of read operations=25
HDFS: Number of large read operations=0
HDFS: Number of write operations=5
Map-Reduce Framework
Map input records=2
Map output records=10
Map output bytes=103
Map output materialized bytes=98
Input split bytes=224
Combine input records=10
Combine output records=7
Reduce input groups=5
Reduce shuffle bytes=98
Reduce input records=7
Reduce output records=5
Spilled Records=14
Shuffled Maps =2
Failed Shuffles=0
Merged Map outputs=2
GC time elapsed (ms)=0
CPU time spent (ms)=0
Physical memory (bytes) snapshot=0
Virtual memory (bytes) snapshot=0
Total committed heap usage (bytes)=824180736
Shuffle Errors
BAD_ID=0
CONNECTION=0
IO_ERROR=0
WRONG_LENGTH=0
WRONG_MAP=0
WRONG_REDUCE=0
mouapTest.WorldCount2_5_2$TokenizerMapper$CountersEnum
INPUT_WORDS=10
File Input Format Counters 
Bytes Read=63
File Output Format Counters 
Bytes Written=42


备注,eclipse启动是用root在终端的。注意用户权限问题.




  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值