需求见https://blog.csdn.net/luoyunfan6/article/details/100629006
测试主类
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.log4j.BasicConfigurator;
import java.io.IOException;
public class MysqlMR {
public static class MyMapper extends Mapper<LongWritable, UserInfo, IntWritable, IntWritable>{
@Override
protected void map(
LongWritable key, UserInfo value, Context context)
throws IOException, InterruptedException {
context.write(new IntWritable(value.getAge()), new IntWritable(1));
}
}
public static class MyReducer extends Reducer<IntWritable, IntWritable, UserInfo2, Text>{
@Override
protected void reduce(IntWritable key, Iterable<IntWritable> values,Context context)
throws IOException, InterruptedException {
int num = 0;
//直接写出
for(IntWritable a : values){
num+=a.get();
}
UserInfo2 user = new UserInfo2();
user.setAge(key.get());
user.setCnt(num);
context.write(user, new Text(""));
}
}
public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {
BasicConfigurator.configure();
Configuration conf = new Configuration();
//设置msyql的连接方式
DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/xxx", "root", "root");
Job job = Job.getInstance(conf, "mapreduce/mysql");
job.setJarByClass(MysqlMR.class);
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(IntWritable.class);
job.setMapOutputValueClass(IntWritable.class);
job.setReducerClass(MyReducer.class);
job.setOutputKeyClass(UserInfo2.class);
job.setOutputValueClass(Text.class);
String [] filedsName1 = {"id","student_id","name","age","sex","birthday"};
String [] fieldsName = {"age","cnt"};
//设置输入输出
DBInputFormat.setInput(job, UserInfo.class, "stu", null, "id", filedsName1);
DBOutputFormat.setOutput(job, "stu_res", fieldsName);
int success = job.waitForCompletion(true) ? 0 : 1;
System.exit(success);
}
}
UserInfo
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class UserInfo implements DBWritable,Writable{
private int id;
private int student_id;
private String name;
private int age;
private String sex;
private String birthday;
public UserInfo(){}
public UserInfo(int id, int student_id, String name, int age, String sex, String birthday) {
this.id = id;
this.student_id = student_id;
this.name = name;
this.age = age;
this.sex = sex;
this.birthday = birthday;
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(this.id);
out.writeInt(this.student_id);
out.writeUTF(this.name);
out.writeInt(this.age);
out.writeUTF(this.sex);
out.writeUTF(this.birthday);
}
@Override
public void readFields(DataInput in) throws IOException {
this.id = in.readInt();
this.student_id=in.readInt();
this.name = in.readUTF();
this.age=in.readInt();
this.sex=in.readUTF();
this.birthday=in.readUTF();
}
@Override
public void write(PreparedStatement statement) throws SQLException {
statement.setInt(1, this.id);
statement.setInt(2,this.student_id);
statement.setString(3, this.name);
statement.setInt(4,this.age);
statement.setString(5,this.sex);
statement.setString(6,this.birthday);
}
@Override
public void readFields(ResultSet resultSet) throws SQLException {
this.id = resultSet.getInt(1);
this.student_id=resultSet.getInt(2);
this.name = resultSet.getString(3);
this.age = resultSet.getInt(4);
this.sex = resultSet.getString(5);
this.birthday = resultSet.getString(6);
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public int getStudent_id() {
return student_id;
}
public void setStudent_id(int student_id) {
this.student_id = student_id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public String getSex() {
return sex;
}
public void setSex(String sex) {
this.sex = sex;
}
public String getBirthday() {
return birthday;
}
public void setBirthday(String birthday) {
this.birthday = birthday;
}
}
UserInfo2
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class UserInfo2 implements DBWritable, Writable {
int age;
int cnt;
public UserInfo2() {
}
public UserInfo2(int age, int cnt) {
this.age = age;
this.cnt = cnt;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public int getCnt() {
return cnt;
}
public void setCnt(int cnt) {
this.cnt = cnt;
}
@Override
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeInt(this.age);
dataOutput.writeInt(this.cnt);
}
@Override
public void readFields(DataInput dataInput) throws IOException {
this.age = dataInput.readInt();
this.cnt=dataInput.readInt();
}
@Override
public void write(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, this.age);
preparedStatement.setInt(2, this.cnt);
}
@Override
public void readFields(ResultSet resultSet) throws SQLException {
this.age = resultSet.getInt(1);
this.cnt = resultSet.getInt(2);
}
}