MapReduce打jar包上传hdfs运行
上传原数据文件到hdfs
hdfs dfs -mkdir /datas/
hdfs dfs -put data1.txt /datas/
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>wdDemo</artifactId>
<version>1.0-SNAPSHOT</version>
<repositories>
<repository>
<id>cloudera</id>
<url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>org.apache.Hadoop</groupId>
<artifactId>Hadoop-client</artifactId>
<version>2.6.0-mr1-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.Hadoop</groupId>
<artifactId>Hadoop-common</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.Hadoop</groupId>
<artifactId>Hadoop-hdfs</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>org.apache.Hadoop</groupId>
<artifactId>Hadoop-mapreduce-client-core</artifactId>
<version>2.6.0-cdh5.14.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>RELEASE</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<minimizeJar>true</minimizeJar>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
MapReduce源码
map代码
package test;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class test1Map extends Mapper<LongWritable, Text, Text, NullWritable> {
int index = 0;
Text k = new Text();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
//获取一行
String line = value.toString();
//剔除标题
if (line.startsWith("酒店")) {
return;
}
String[] data = line.split(",");
String str = "";
if (data.length < 7) {
return;
}
for (int i = 0; i < data.length; i++) {
str += data[i] + ",";
}
str = str.substring(0, str.length() - 1);
k.set(str);
context.write(k, NullWritable.get());
}
}
Reduce代码
package test;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
import java.io.IOException;
public class test1Reduce extends Reducer<Text, NullWritable,Text,NullWritable> {
@Override
protected void reduce(Text key, Iterable<NullWritable> values, Context context) throws IOException, InterruptedException {
context.write(key,NullWritable.get());
}
}
Driver代码
package test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException;
public class test1Driver{
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
Configuration conf = new Configuration();
// conf.set("hadoop.tmp.dir","D:\\file\\hdfs_temp");
Job job = Job.getInstance(conf);
job.setJarByClass(test1Driver.class);
job.setMapperClass(test1Map.class);
job.setReducerClass(test1Reduce.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(NullWritable.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(NullWritable.class);
Path p = setPath(job);
p.getFileSystem(conf).delete(p,true);
Boolean result = job.waitForCompletion(true);
System.out.println(result);
System.exit(result?0:1);
}
private static Path setPath(Job job) throws IOException {
//在本地运行的路径
// Path inpath = new Path("D:\\file\\source\\data1.txt");
// Path outpath = new Path("D:\\file\\output");
//打jar包到HDFS上运行输出在HDFS上的配置,注意hdfs的ip地址和对应的端口号
//hdfs getconf -confKey fs.default.name查看hdfs端口号
Path inpath = new Path("hdfs://192.168.1.66:9000/datas");
//MapReduce程序的输入如果是一个目录,则会计算目录下所有文件
//如果指定某文件,则只计算这个文件。
//这里就是输入一个目录,就会计算目录下所有文件,也就是上传的那个文件。
Path outpath = new Path("hdfs://192.168.1.66:9000/datas_out");
//输出的文件目录不能存在
FileInputFormat.addInputPath(job,inpath);
FileOutputFormat.setOutputPath(job,outpath);
return outpath;
}
}
打jar包
名字短的体积比较大,里面包括了运行所需要的所有jar包
名字长的体积小,里面只有核心程序。不包括运行所需要的jar包
所以,使用体积比较大的,复制,粘贴到桌面,改名为test.jar
把它先上传到服务器里,然后再上传到hdfs里
上传hdfs
hadoop dfs -put test.jar /
在hdfs上运行jar
hadoop jar test.jar test.test1Driver