【MapReduce】Mapreduce读取MySQL中的数据统计年龄

本文介绍了如何使用MapReduce技术,通过Java代码从MySQL数据库中读取学生信息,统计每个年龄段的人数,并将结果存储到新的数据库表中。详细步骤包括配置DBInputFormat和DBOutputFormat,以及Mapper和Reducer的实现。
摘要由CSDN通过智能技术生成

导入MySQL的jar包

在这里插入图片描述
在这里插入图片描述
找到想要加入的jar包即可

数据

在这里插入图片描述
数据库中有上述数据,我们要求统计每个年龄的人数,然后将统计结果存入数据框中
创建一个表专门存储结果数据
在这里插入图片描述

代码实现

实例化类

存储数据类

import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

public class Student implements Writable, DBWritable {
    private int id;
    private int sid;
    private String name;
    private int age;
    private String sex;
    private String birthday;

    public void write(DataOutput out) throws IOException {
        out.writeInt(this.id);
        out.writeInt(this.sid);
        out.writeUTF(this.name);
        out.writeInt(this.age);
        out.writeUTF(this.sex);
        out.writeUTF(this.birthday);
    }


    public void readFields(DataInput in) throws IOException {
        this.id = in.readInt();
        this.sid = in.readInt();
        this.name = in.readUTF();
        this.age = in.readInt();
        this.sex = in.readUTF();
        this.birthday = in.readUTF();
    }


    public void write(PreparedStatement statement) throws SQLException {
        statement.setInt(1, this.id);
        statement.setInt(2, this.sid);
        statement.setString(3, this.name);
        statement.setInt(4, this.age);
        statement.setString(5, this.sex);
        statement.setString(6, this.birthday);
    }


    public void readFields(ResultSet resultSet) throws SQLException {
        this.id = resultSet.getInt(1);
        this.sid = resultSet.getInt(2);
        this.name = resultSet.getString(3);
        this.age = resultSet.getInt(4);
        this.sex = resultSet.getString(5);
        this.birthday = resultSet.getString(6);
    }

    public int getStudent_id() {
        return sid;
    }

    public String getName() {
        return name;
    }

    public int getAge() {
        return age;
    }

    public String getSex() {
        return sex;
    }
}

存储结果类

import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;

public class result implements Writable, DBWritable {
    private int age;
    private int count;

    public void write(DataOutput out) throws IOException {
        out.writeInt(age);
        out.writeInt(count);
    }


    public void readFields(DataInput in) throws IOException {
        this.age = in.readInt();
        this.count = in.readInt();
    }

    public void write(PreparedStatement statement) throws SQLException {
        statement.setInt(1, this.age);
        statement.setInt(2, this.count);
    }


    public void readFields(ResultSet resultSet) throws SQLException {
        this.age = resultSet.getInt(1);
        this.count = resultSet.getInt(2);
    }

    public void setAge(int age) {
        this.age = age;
    }

    public void setCount(int count) {
        this.count = count;
    }
}

Mapper

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;

public class MapTest extends Mapper<LongWritable,Student, IntWritable,IntWritable> {
    @Override
    protected void map(LongWritable key, Student value, Context context) throws IOException, InterruptedException {
        context.write(new IntWritable(value.getAge()),new IntWritable(1));
    }
}

Reduce

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;

public class RedTest extends Reducer<IntWritable, IntWritable, result, NullWritable> {
    @Override
    protected void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        int count = 0;
        for (IntWritable iw : values) {
            count += iw.get();
        }
        result sr = new result();
        sr.setAge(key.get());
        sr.setCount(count);
        context.write(sr, NullWritable.get());
    }
}

Driver

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBOutputFormat;
import org.apache.log4j.BasicConfigurator;

public class Driver {
    public static void main(String[] args) throws Exception {
        BasicConfigurator.configure();
        Configuration conf = new Configuration();

        DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver", "jdbc:mysql://localhost:3306/data", "root", "123456");
        Job job = Job.getInstance(conf);
        job.setJarByClass(Driver.class);

        job.setMapperClass(MapTest.class);
        job.setMapOutputKeyClass(IntWritable.class);
        job.setMapOutputValueClass(IntWritable.class);

        job.setReducerClass(RedTest.class);
        job.setOutputKeyClass(result.class);
        job.setOutputValueClass(NullWritable.class);

        String[] f1 = {"id", "sid", "name", "age", "sex", "birthday"};
        String[] f2 = {"age", "count"};
        DBInputFormat.setInput(job, Student.class, "Student", null, "sid", f1);
        DBOutputFormat.setOutput(job, "result", f2);
        System.exit(job.waitForCompletion(true) ? 0 : 1);
    }
}

运行结果

在这里插入图片描述

  • 0
    点赞
  • 6
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值