步骤:
在eclipse中运行
右键--》run as -->run configuration
在Programs argument中添加:/user/hadoop/output(此处随你自己修改)
代码:
/**
* Licensed to the Apache Software Foundation (ASF) under one* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.examples;
import java.io.IOException;
import java.util.Date;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.ClusterStatus;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.util.GenericOptionsParser;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
/**
* This program uses map/reduce to just run a distributed job where there is no
* interaction between the tasks and each task write a large unsorted random
* binary sequence file of BytesWritable. In order for this program to generate
* data for terasort with 10-byte keys and 90-byte values, have the following
* config: <xmp> <?xml version="1.0"?> <?xml-stylesheet type="text/xsl"
* href="configuration.xsl"?> <configuration> <property>
* <name>test.randomwrite.min_key</name> <value>10</value> </property>
* <property> <name>test.randomwrite.max_key</name> <value>10</value>
* </property> <property> <name>test.randomwrite.min_value</name>
* <value>90</value> </property> <property>
* <name>test.randomwrite.max_value</name> <value>90</value> </property>
* <property> <name>test.randomwrite.total_bytes</name>
* <value>1099511627776</value> </property> </configuration></xmp>
*
* Equivalently, {@link RandomWriter} also supports all the above options and
* ones supported by {@link GenericOptionsParser} via the command-line.
*/
public class RandomWriter extends Configured implements Tool {
/**
* User counters
*/
static enum Counters {
RECORDS_WRITTEN, BYTES_WRITTEN
}
/**
* A custom input format that creates virtual inputs of a single string for
* each map.
*/
static class RandomInputFormat extends Configured implements
InputFormat<Text, Text> {
/**
* Generate the requested number of file splits, with the filename set
* to the filename of the output file.
*/
public InputSplit[] getSplits(JobConf job, int numSplits)
throws IOException {
/** 设置输入分片的个数 在run方法中设在的Map数量只是建议性质的 Map的数量是由InputFormat的getSplits方法放回的数组个数决定的 所以在这里进行设置**/
JobClient client = new JobClient(job);
ClusterStatus cluster = client.getClusterStatus();
/** 如果属性不存在 则返回默认的值 **/
int numMapsPerHost = job.getInt("test.randomwriter.maps_per_host",
10);
long numBytesToWritePerMap = job.getLong(
"test.randomwrite.bytes_per_map", 1 * 1024 * 1024 * 1024);
if (numBytesToWritePerMap == 0) {
System.err
.println("Cannot have test.randomwrite.bytes_per_map set to 0");
}
long totalBytesToWrite = job