importjava.io.IOException;importjava.util.Iterator;importorg.apache.hadoop.conf.Configuration;importorg.apache.hadoop.fs.Path;importorg.apache.hadoop.io.LongWritable;importorg.apache.hadoop.io.Text;importorg.apache.hadoop.mapreduce.Job;importorg.apache.hadoop.mapreduce.Mapper;importorg.apache.hadoop.mapreduce.Reducer;importorg.apache.hadoop.mapreduce.lib.db.DBConfiguration;importorg.apache.hadoop.mapreduce.lib.db.DBInputFormat;importorg.apache.hadoop.mapreduce.lib.output.FileOutputFormat;/***@author*@version创建时间:Jul 24, 2014 2:09:22 AM
* 类说明*/
public classAccessData {public static class DataAccessMap extends Mapper{
@Overrideprotected voidmap(LongWritable key, YqBean value,Context context)throwsIOException, InterruptedException {
System.out.println(value.toString());
context.write(new Text(), newText(value.toString()));
}
}public static class DataAccessReducer extends Reducer{protected void reduce(Text key, Iterablevalues,
Context context)throwsIOException, InterruptedException {for(Iterator itr =values.iterator();itr.hasNext();)
{
context.write(key, itr.next());
}
}
}public static void main(String[] args) throwsException {
Configuration conf= newConfiguration();//mysql的jdbc驱动DBConfiguration.configureDB(conf,"com.mysql.jdbc.Driver", "jdbc:mysql://ip:3306/tablename?useUnicode=true&characterEncoding=utf8", "username", "passwd");
Job job= new Job(conf,"test mysql connection");
job.setJarByClass(AccessData.class);
job.setMapperClass(DataAccessMap.class);
job.setReducerClass(DataAccessReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
job.setInputFormatClass(DBInputFormat.class);
FileOutputFormat.setOutputPath(job,new Path("hdfs://ip:9000/hdfsFile"));//对应数据库中的列名(实体类字段)
String[] fields = {"id","title","price","author","quantity","description","category_id","imgUrl"};
DBInputFormat.setInput(job, YqBean.class,"tablename", "sql语句 ", "title", fields);
System.exit(job.waitForCompletion(true)? 0 : 1);
}
}