简单的hadoop程序开发

程序要求:搭建hadoop环境,功能要求:将本文中各项书籍的销量,利润统计出来,并按照地区进行分区

源代码:

MyBookBean.java

 

package com.cjlu.lx;

 

import java.io.DataInput;

import java.io.DataOutput;

import java.io.IOException;

 

import org.apache.hadoop.io.Writable;

 

public class MyBookBean implementsWritable{

      

       privateString location;

       privateString bookname;

       privateint unit_price;

       privateint num;

       privateint allprice;

      

       publicString getLocation() {

              returnlocation;

       }

 

       publicvoid setLocation(String location) {

              this.location= location;

       }

 

       publicString getBookname() {

              returnbookname;

       }

 

       publicvoid setBookname(String bookname) {

              this.bookname= bookname;

       }

 

       publicint getUnit_price() {

              returnunit_price;

       }

 

       publicvoid setUnit_price(int unit_price) {

              this.unit_price= unit_price;

       }

 

       publicint getNum() {

              returnnum;

       }

 

       publicvoid setNum(int num) {

              this.num= num;

       }

      

       publicint getAllprice() {

              returnallprice;

       }

 

       publicvoid setAllprice(int allprice) {

              this.allprice= allprice;

       }

 

       @Override

       publicString toString() {

              return"MyBookDrive [location=" + location + ", bookname=" +bookname + ", unit_price=" + unit_price + ",num=" + num +",  allprice=" + allprice +"]";

       }

       @Override

       publicvoid readFields(DataInput in) throws IOException {

              //TODO Auto-generated method stub

              this.location=in.readUTF();

              this.bookname=in.readUTF();

              this.unit_price=in.readInt();

              this.num=in.readInt();

              this.allprice=in.readInt();

       }

 

       @Override

       publicvoid write(DataOutput out) throws IOException {

              //TODO Auto-generated method stub

              out.writeUTF(location);

              out.writeUTF(bookname);

              out.writeInt(unit_price);

              out.writeInt(num);

              out.writeInt(allprice);

       }

 

 

 

 

 

 

MyBookDrive.java

 

package com.cjlu.lx;

 

import java.io.IOException;

import java.net.URI;

import java.net.URISyntaxException;

 

importorg.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.DoubleWritable;

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Job;

importorg.apache.hadoop.mapreduce.lib.input.FileInputFormat;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

 

public class MyBookDrive {

 

       publicstatic void main(String[] args) throws IOException, ClassNotFoundException,InterruptedException, URISyntaxException {

              //TODO Auto-generated method stub

              Stringuri = "hdfs://192.168.133.128:9000";

              Stringpath = "/book";

              String  result = "/book1/result";

              Configurationconf = new Configuration();

             

              Jobjob = Job.getInstance(conf);

      

              job.setJarByClass(MyBookDrive.class);

              job.setMapperClass(MybookMapper.class);

              job.setReducerClass(MybookReducer.class);

             

             

              job.setOutputKeyClass(Text.class);

              job.setOutputValueClass(MyBookBean.class);

             

              job.setNumReduceTasks(3);

              job.setPartitionerClass(MyBookPartitioner.class);

             

              job.setMapOutputKeyClass(Text.class);

              job.setMapOutputValueClass(MyBookBean.class);

             

             

              FileSystemfs = FileSystem.get(new URI(uri),conf);

              fs.delete(newPath(result),true);

             

              FileInputFormat.setInputPaths(job,newPath(uri+path));

              FileOutputFormat.setOutputPath(job,new Path(uri+result));

             

             

               job.waitForCompletion(true);

       }

 

}

 

MyBookMapper.java

 

package com.cjlu.lx;

 

import java.io.IOException;

 

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Mapper;

 

public class MybookMapper extendsMapper<LongWritable,Text,Text,MyBookBean>{

       @Override

       protectedvoid map(LongWritable key, Text value, Mapper<LongWritable, Text, Text,MyBookBean>.Context context)

                     throwsIOException, InterruptedException {

              //TODO Auto-generated method stub

              Stringline=value.toString();

              MyBookBeanmb = new MyBookBean();

              String[]data = line.split(" ");

              intunit_price=Integer.parseInt(data[2]);

              intnum=Integer.parseInt(data[3]);

              mb.setLocation(data[0]);

              mb.setBookname(data[1]);

              mb.setUnit_price(unit_price);

              mb.setNum(num);

              mb.setAllprice(0);

              context.write(newText(data[1]),mb);

       }

 

}

MyBookPartitioner.java

 

package com.cjlu.lx;

 

import org.apache.hadoop.io.IntWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapred.JobConf;

importorg.apache.hadoop.mapreduce.Partitioner;

 

 

public class MyBookPartitioner extendsPartitioner<Text, MyBookBean>{

 

      

 

       @Override

       publicint getPartition(Text key, MyBookBean value, int numPartitions) {

             

              if("南京".equals(value.getLocation())){

                     return0;

              }

              if("南通".equals(value.getLocation())){

                     return1;

              }

              if("无锡".equals(value.getLocation())){

                     return2;

              }

              returnnumPartitions;

       }

}

 

 

MyBookReduce.java

 

package com.cjlu.lx;

 

import java.io.IOException;

 

import org.apache.hadoop.io.Text;

import org.apache.hadoop.mapreduce.Reducer;

 

public class MybookReducer extendsReducer<Text, MyBookBean, Text, MyBookBean>{

 

      

       @Override

       protectedvoid reduce(Text key, Iterable<MyBookBean> values,

                     Reducer<Text,MyBookBean, Text, MyBookBean>.Context context)

                     throwsIOException, InterruptedException {

              MyBookBeanmb=new MyBookBean();

              mb.setBookname(key.toString());

              mb.setAllprice(0);

              for(MyBookBean b : values) {

                     mb.setLocation(b.getLocation());

                     mb.setUnit_price(b.getUnit_price());

                     mb.setNum(b.getNum()+mb.getNum());

                     mb.setAllprice(b.getAllprice()+mb.getUnit_price()*mb.getNum());

              }

             

              context.write(key,mb);

       }

}


  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值