【大数据头歌实验八】大数据综合实验(旅游网站之数据分析)

第1关:统计每个城市的宾馆平均价格

package com.processdata;

import java.io.IOException;
import java.util.Scanner;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import com.util.HBaseUtil;

/**
 * 使用MapReduce程序处理HBase中的数据并将最终结果存入到另一张表 1中
 */
public class HBaseMapReduce extends Configured implements Tool {
    public static class MyMapper extends TableMapper<Text, DoubleWritable> {
        public static final byte[] column = "price".getBytes();
        public static final byte[] family = "hotel_info".getBytes();
        @Override
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
                throws IOException, InterruptedException {
            /********** Begin *********/
		    String cityId = Bytes.toString(result.getValue("cityInfo".getBytes(), "cityId".getBytes()));     
            byte[] value = result.getValue(family, column);     
            Double value1 = Double.parseDouble(Bytes.toString(value));     
            DoubleWritable i = new DoubleWritable(value1);     
            String priceKey = cityId;     
            context.write(new Text(priceKey),i); 
		  	/********** End *********/
        }
    }
    public static class MyTableReducer extends TableReducer<Text, DoubleWritable, ImmutableBytesWritable> {
        @Override
        public void reduce(Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
            /********** Begin *********/
		    double sum = 0; 
            int len = 0; 
            for (DoubleWritable price : values){ 
                len ++; 
                sum += price.get(); 
            } 
            Put put = new Put(Bytes.toBytes(key.toString())); 
            put.addColumn("average_infos".getBytes(),"price".getBytes(),Bytes.toBytes(String.valueOf(sum / len))); 
            context.write(null, put);
			/********** End *********/
        }
    }
    
    public int run(String[] args) throws Exception {
        //配置Job
        Configuration conf = HBaseConfiguration.create(getConf());
        conf.set("hbase.zookeeper.quorum", "127.0.0.1");  //hbase 服务地址
        conf.set("hbase.zookeeper.property.clientPort", "2181"); //端口号
        Scanner sc = new Scanner(System.in);
        String arg1 = sc.next();
        String arg2 = sc.next();
        try {
			HBaseUtil.createTable("average_table", new String[] {"average_infos"});
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
        Job job = configureJob(conf,new String[]{arg1,arg2});
        return job.waitForCompletion(true) ? 0 : 1;
    }
    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];
        String targetTable = args[1];
        Job job = new Job(conf,tablename);
        Scan scan = new Scan();
        scan.setCaching(300);
        scan.setCacheBlocks(false);//在mapreduce程序中千万不要设置允许缓存
        //初始化Mapreduce程序
        TableMapReduceUtil.initTableMapperJob(tablename,scan,MyMapper.class, Text.class, DoubleWritable.class,job);
        //初始化Reduce
        TableMapReduceUtil.initTableReducerJob(
                targetTable,        // output table
                MyTableReducer.class,    // reducer class
                job);
        job.setNumReduceTasks(1);
        return job;
    }
}

第2关:统计酒店评论中词频较高的词

package com.processdata;
import java.io.IOException;
import java.util.List;
import java.util.Scanner;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apdplat.word.WordSegmenter;
import org.apdplat.word.segmentation.Word;
import com.util.HBaseUtil;
import com.vdurmont.emoji.EmojiParser;

/**
 * 词频统计
 *
 */
public class WorldCountMapReduce extends Configured implements Tool {
    

    public static class MyMapper extends TableMapper<Text, IntWritable> {
        private static byte[] family = "comment_info".getBytes();
    	private static byte[] column = "content".getBytes();
        
        @Override
        protected void map(ImmutableBytesWritable rowKey, Result result, Context context)
                throws IOException, InterruptedException {
            /********** Begin *********/
            byte[] value = result.getValue(family, column); 
            String word = new String(value,"utf-8"); 
            if(!word.isEmpty())
            { 
                String filter = EmojiParser.removeAllEmojis(word); 
                List<Word> segs = WordSegmenter.seg(filter); 
                for(Word cont : segs) 
                { 
                    Text text = new Text(cont.getText()); 
                    IntWritable v = new IntWritable(1); 
                    context.write(text,v); 
                } 
            }
			/********** End *********/
    	}
    }

    public static class MyReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {
        private static byte[] family =  "word_info".getBytes();
        private static byte[] column = "count".getBytes();
        
        @Override
        public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
            /********** Begin *********/
		    int sum = 0; 
            for (IntWritable value : values) 
                sum += value.get(); 
            Put put = new Put(Bytes.toBytes(key.toString())); 
            put.addColumn(family,column,Bytes.toBytes(sum)); 
            context.write(null,put);
		  	/********** End *********/
        }

    }
    public int run(String[] args) throws Exception {
        //配置Job
        Configuration conf = HBaseConfiguration.create(getConf());
        conf.set("hbase.zookeeper.quorum", "127.0.0.1");  //hbase 服务地址
        conf.set("hbase.zookeeper.property.clientPort", "2181"); //端口号
        Scanner sc = new Scanner(System.in);
        String arg1 = sc.next();
        String arg2 = sc.next();
        try {
			HBaseUtil.createTable("comment_word_count", new String[] {"word_info"});
		} catch (Exception e) {
			// 创建表失败
			e.printStackTrace();
		}
        Job job = configureJob(conf,new String[]{arg1,arg2});
        return job.waitForCompletion(true) ? 0 : 1;
    }

    private Job configureJob(Configuration conf, String[] args) throws IOException {
        String tablename = args[0];
        String targetTable = args[1];
        Job job = new Job(conf,tablename);
        Scan scan = new Scan();
        scan.setCaching(300);
        scan.setCacheBlocks(false);//在mapreduce程序中千万不要设置允许缓存
        //初始化Mapper Reduce程序
        TableMapReduceUtil.initTableMapperJob(tablename,scan,MyMapper.class, Text.class, IntWritable.class,job);
        TableMapReduceUtil.initTableReducerJob(targetTable,MyReducer.class,job);
        job.setNumReduceTasks(1);
        return job;
    }

}

  • 2
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值