packagecom.sun.mysql;importjava.io.DataInput;importjava.io.DataOutput;importjava.io.IOException;importjava.sql.PreparedStatement;importjava.sql.ResultSet;importjava.sql.SQLException;importjava.util.Iterator;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.fs.Path;importorg.apache.hadoop.io.LongWritable;importorg.apache.hadoop.io.Text;importorg.apache.hadoop.io.Writable;importorg.apache.hadoop.mapreduce.Job;importorg.apache.hadoop.mapreduce.Mapper;importorg.apache.hadoop.mapreduce.Reducer;importorg.apache.hadoop.mapreduce.lib.db.DBConfiguration;importorg.apache.hadoop.mapreduce.lib.db.DBOutputFormat;importorg.apache.hadoop.mapreduce.lib.db.DBWritable;importorg.apache.hadoop.mapreduce.lib.input.FileInputFormat;importorg.apache.hadoop.mapreduce.lib.input.TextInputFormat;/*** 将mapreduce的结果数据写入mysql中
*@authorasheng*/
public classWriteDataToMysql {/*** 重写DBWritable
*@authorasheng
* TblsWritable需要向mysql中写入数据*/
public static class TblsWritable implementsWritable, DBWritable
{
String tbl_name;
String tbl_type;publicTblsWritable()
{
}publicTblsWritable(String tbl_name,String tab_type)
{this.tbl_name =tbl_name;this.tbl_type =tab_type;
}
@Overridepublic void write(PreparedStatement statement) throwsSQLException
{
statement.setString(1, this.tbl_name);
statement.setString(2, this.tbl_type);
}
@Overridepublic void readFields(ResultSet resultSet) throwsSQLException
{this.tbl_name = resultSet.getString(1);this.tbl_type = resultSet.getString(2);
}
@Overridepublic void write(DataOutput out) throwsIOException
{
out.writeUTF(this.tbl_name);
out.writeUTF(this.tbl_type);
}
@Overridepublic void readFields(DataInput in) throwsIOException
{this.tbl_name =in.readUTF();this.tbl_type =in.readUTF();
}publicString toString()
{return new String(this.tbl_name + " " + this.tbl_type);
}
}public static class ConnMysqlMapper extends Mapper
//TblsRecord是自定义的类型,也就是上面重写的DBWritable类
{public void map(LongWritable key,Text value,Context context)throwsIOException,InterruptedException
{//接收进来,然后处理value,将abc和x作为map的输出//key对于本程序没有太大的意义,没有使用
String name = value.toString().split(" ")[0];
String type= value.toString().split(" ")[1];
context.write(new Text(name),newText(type));
}
}public static class ConnMysqlReducer extends Reducer{public void reduce(Text key,Iterable values,Context context)throwsIOException,
InterruptedException
{//接收到的key value对即为要输入数据库的字段,所以在reduce中://wirte的第一个参数,类型是自定义类型TblsWritable,利用key和value将其组合成TblsWritable,
然后等待写入数据库//wirte的第二个参数,wirte的第一个参数已经涵盖了要输出的类型,所以第二个类型没有用,设为null
for(Iterator itr =values.iterator();itr.hasNext();)
{
context.write(new TblsWritable(key.toString(),itr.next().toString()),null);
}
}
}public static void main(String args[]) throwsIOException, InterruptedException, ClassNotFoundException
{
Configuration conf= newConfiguration();
DBConfiguration.configureDB(conf,"com.mysql.jdbc.Driver","jdbc:mysql://127.0.0.1:3306/mapreduce_test","root", "root");
Job job= new Job(conf,"test mysql connection");
job.setJarByClass(ReadDataFromMysql.class);
job.setMapperClass(ConnMysqlMapper.class);
job.setReducerClass(ConnMysqlReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(DBOutputFormat.class);
FileInputFormat.addInputPath(job,new Path(args[0]));
DBOutputFormat.setOutput(job,"lxw_tabls", "TBL_NAME","TBL_TYPE");
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}//执行输入参数为/home/asheng/hadoop/in/test3.txt//test3.txt中的内容为/*abc x
def y
chd z*/
//即将abc x分别做为TBL_NAME,和TBL_TYPE插入数据库中//输出结果在mysql数据库中查看//select * from lxw_tabls;//发现新增三行/*abc x
def y
chd z*/