原始数据:
c1 225.0
c2 228.0
c3 228.0
c4 227.0
- 自定义:Co81Baen
package com.fjh.course.course8.co1;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
public class Co81Baen implements WritableComparable<Co81Baen>, DBWritable {
String classId;
double fenshu;
public Co81Baen() {
}
public Co81Baen(String classId, double fenshu) {
this.classId = classId;
this.fenshu = fenshu;
}
@Override
public String toString() {
return classId +
"\t" + fenshu;
}
public String getClassId() {
return classId;
}
public void setClassId(String classId) {
this.classId = classId;
}
public double getFenshu() {
return fenshu;
}
public void setFenshu(double fenshu) {
this.fenshu = fenshu;
}
@Override
public int compareTo(Co81Baen o) {
return 0;
}
@Override
public void write(DataOutput dataOutput) throws IOException {
dataOutput.writeUTF(classId);
dataOutput.writeDouble(fenshu);
}
@Override
public void readFields(DataInput dataInput) throws IOException {
this.classId = dataInput.readUTF();
this.fenshu = dataInput.readDouble();
}
@Override
public void write(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setString(1,classId);
preparedStatement.setDouble(2,fenshu);
}
@Override
public void readFields(ResultSet resultSet) throws SQLException {
this.classId = resultSet.getString(1);
this.fenshu = resultSet.getDouble(2);
}
}
- Co81Mapper
package com.fjh.course.course8.co1;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;
public class Co81Mapper extends Mapper<LongWritable, Text,Co81Baen, NullWritable> {
Co81Baen co81Baen = new Co81Baen();
@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] split = value.toString().split("\t");
co81Baen.setClassId(split[0]);
co81Baen.setFenshu(Long.parseLong(split[1]));
context.write(co81Baen,NullWritable.get());
}
}
- Co81Driver
```java
package com.fjh.course.course8.co1;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import java.io.IOException;
public class Co81Driver {
public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {
String sqlconn = "com.mysql.jdbc.Driver";
//String jdbcconn="jdbc:mysql://127.0.0.1/course?userSSL=true&useUnicode=true&characterEncoding=UTF8";
Configuration configuration=new Configuration();
DBConfiguration.configureDB(configuration,sqlconn,"jdbc:mysql://localhost:3306/course?useSSL=false&serverTimezone=UTC&user=root&password=&useUnicode=true&characterEncoding=UTF8&autoReconnect=true&failOverReadOnly=false","root","123456");
Job job = Job.getInstance(configuration);
job.setJarByClass(Co81Driver.class);
job.setMapperClass(Co81Mapper.class);
job.setOutputKeyClass(Co81Baen.class);
job.setOutputValueClass(NullWritable.class);
//calss_id
//FileInputFormat.setInputPaths(job,new Path("src/main/resources/output/course52/part-r-00000"));
job.setOutputFormatClass(DBOutputFormat.class);
//数据库的表名:course52 字段名:calss_id,fraction
DBOutputFormat.setOutput(job,"course52","calss_id","fraction");
//提交
if(!job.waitForCompletion(true)){
return;
}
}
}