使用MapReduce直接从关系型数据库中取数据,需要将数据库驱动包放到hadoop的classpath下,
执行hadoop classpath可以查看路径信息,放到显示的任一个路径下即可。
package com.bigdata.hadoop.mapred;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.URI;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;
import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;
import org.apache.hadoop.mapreduce.lib.db.DBWritable;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
public class DBInputFormatApp {
private static final String OUTPUT_PATH = "hdfs://hadoop1:9000/out";
public static void main(String[] args) throws Exception {
Configuration configuration = new Configuration();
Job job = new Job(configuration, KpiApp.class.getSimpleName());
DBConfiguration.configureDB(configuration, "com.mysql.jdbc.Driver",
"jdbc:mysql://localhost:3306/test", "root", "hadoop");
final FileSystem fileSystem = FileSystem.get(new URI(OUTPUT_PATH),
configuration);
fileSystem.delete(new Path(OUTPUT_PATH), true);
job.setJarByClass(DBInputFormatApp.class);
job.setInputFormatClass(DBInputFormat.class);
//Job job, Class inputClass, String tableName, String conditions, String orderBy, String... fieldNames
DBInputFormat.setInput(job, MyUser.class, "myuser", null,null,"id","name");
// 当map输出类型和reduce输出类型一致时,可以不设置
job.setMapperClass(MyMapper.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(LongWritable.class);
//指定不需要使用reduce,直接把map任务的输出写入到HDFS中
job.setNumReduceTasks(0);
FileOutputFormat.setOutputPath(job, new Path(OUTPUT_PATH));
DistributedCache.addFileToClassPath(new Path("/usr/local/mysql-connector-java-5.1.10.jar"), configuration,fileSystem);
job.waitForCompletion(true);
}
public static class MyMapper extends
Mapper<LongWritable, MyUser, Text, LongWritable> {
final Text k2 = new Text();
@Override
protected void map(LongWritable key, MyUser value,
Mapper<LongWritable, MyUser, Text, LongWritable>.Context context)
throws IOException, InterruptedException {
final String line = value.toString();
final String[] splited = line.split("\t");
for (String string : splited) {
context.write(new Text(string), new LongWritable(1));
}
}
}
public static class MyUser implements Writable,DBWritable{
int id;
String name;
@Override
public void readFields(DataInput in) throws IOException {
this.id = in.readInt();
this.name = Text.readString(in);
}
@Override
public void write(DataOutput out) throws IOException {
out.writeInt(id);
Text.writeString(out, name);
}
@Override
public void readFields(ResultSet resultSet) throws SQLException {
this.id = resultSet.getInt(1);
this.name = resultSet.getString(2);
}
@Override
public void write(PreparedStatement statement) throws SQLException {
statement.setInt(1, id);
statement.setString(2, name);
}
@Override
public String toString() {
return this.id + ":" + this.name;
}
}
}