package com.zjs.mr2;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class JobRun {
public static void main(String[] args)
{
JobRun jobRun=new JobRun();
try {
System.out.println(jobRun.run()?"执行成功":"执行失败");
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public boolean run() throws IOException, ClassNotFoundException,
InterruptedException {
Configuration conf = new Configuration();
conf.set("fs.defaultFS", "hdfs://node6:8020");
conf.set("yarn.resourcemanager.hostname", "node7");
FileSystem fs = FileSystem.get(conf);
DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",
"jdbc:mysql://localhost:3306/test", "root", "123456");
Job job = Job.getInstance(conf);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setMapperClass(MyMapper.class);
job.setReducerClass(MyReduce.class);
String[] fields = { "name","id"};
DBInputFormat.setInput(job, TestRecord.class,"test", null,"id",fields);
Path outPath = new Path("/usr/output/test");
if (fs.exists(outPath)) {
fs.delete(outPath, true);
}
FileOutputFormat.setOutputPath(job, outPath);
return job.waitForCompletion(true);
}
static class MyMapper extends
Mapper<LongWritable, TestRecord, Text, IntWritable> {
private IntWritable value = new IntWritable(1);
private Text k = new Text("count");
@Override
protected void map(
LongWritable key,
TestRecord tr,
Mapper<LongWritable, TestRecord, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
context.write(k, value);
}
}
static class MyReduce extends Reducer<Text, IntWritable, Text, IntWritable> {
IntWritable v = new IntWritable();
@Override
protected void reduce(Text key, Iterable<IntWritable> value,
Reducer<Text, IntWritable, Text, IntWritable>.Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable i : value) {
sum += i.get();
}
v.set(sum);
context.write(key, v);
}
}
}
package com.zjs.mr2;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
public class TestRecord implements Writable,DBWritable{
private String name;
private int id;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
@Override
public void readFields(ResultSet in) throws SQLException {
this.name=in.getString(1);
this.id=in.getInt(2);
}
@Override
public void write(PreparedStatement out) throws SQLException {
out.setString(1,this.name);
out.setInt(2,this.id);
}
@Override
public void readFields(DataInput in) throws IOException {
this.name=Text.readString(in);
this.id=in.readInt();
}
@Override
public void write(DataOutput out) throws IOException {
Text.writeString(out, this.name);
out.writeInt(this.id);
}
@Override
public String toString() {
return this.name+" "+this.id;
}
}