使用mapreduce对电影评论内容进行词频统计

题目简述

通过java实现mapreduce对电影评论内容的分词进行词频统计,以此来制作电影评论内容的词云,基于hadoop集群实现。

代码如下:

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.*;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Mapper.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.huaban.analysis.jieba.JiebaSegmenter;

public class emotion_analysis_wordcount {
    public static void main(String[] args) throws Exception {        
        Configuration conf = new Configuration();
        String[] otherArgs = (new GenericOptionsParser(conf, args)).getRemainingArgs();
        if(otherArgs.length < 2) {
            System.err.println("Usage: Emotion_Analysis <in> [<in>...] <out>");
            System.exit(2);
        }
        Job job = Job.getInstance(conf, "Emotion Analysis");
        job.setJarByClass(emotion_analysis_wordcount.class);
        job.setMapperClass(emotion_analysis_wordcount.TokenizerMapper.class);
        //job.setCombinerClass(Emotion_Analysis.IntSumReducer.class);
        job.setReducerClass(emotion_analysis_wordcount.IntSumReducer.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class); 
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(DoubleWritable.class);
        FileInputFormat.addInputPath(job, new Path("/user/hadoop/movie/test2.csv"));
        //设置文件输出
        FileOutputFormat.setOutputPath(job, new Path("/user/hadoop/output2"));
        //解决输出路径已经存在的问题
        FileSystem fileSystem = FileSystem.get(conf);
        Path outputPath = new Path("/user/hadoop/output2");
        if (fileSystem.exists(outputPath)) {
            fileSystem.delete(outputPath, true);
        }
        //3.执行
        job.waitForCompletion(true);
    }
    
    public static class TokenizerMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
        public TokenizerMapper() {
        }
        public static List<String> most = new ArrayList<>();
    	public static  List<String> very = new ArrayList<>();
    	public static List<String> more = new ArrayList<>();
    	public static List<String> ish = new ArrayList<>();
    	public static List<String> insufficiently = new ArrayList<>();
    	public static List<String> over = new ArrayList<>();
    	public static List<String> negative_words = new ArrayList<>();
    	public static List<String> postive_words = new ArrayList<>();
    	public static List<String> stop_words = new ArrayList<>();
    	public static void read() throws IOException {
    		String temp=null;
    		String filepath="/home/hadoop/Chinese_English_stopwords.txt";
    		File file =new File(filepath);
    		BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
            while ((temp = bufferedReader.readLine()) != null) {
                stop_words.add(temp.replace(" ", ""));
            }
    	}
    	public static List<String> withoutstopwords(String oldstring) throws IOException{
    		String newString = oldstring;
    		JiebaSegmenter segmenter = new JiebaSegmenter();
    		List<String>termlist=segmenter.sentenceProcess(newString);
            termlist.removeAll(stop_words);
            return termlist;
    	}
    	public static boolean isNumeric(String str){
            Pattern pattern = Pattern.compile("[0-9]*");
            Matcher isNum = pattern.matcher(str);
            if( !isNum.matches() ){
                return true;
            }
            return false;
     }
    	public static boolean checkname(String name)
        {
            int n = 0;
            for(int i = 0; i < name.length(); i++) {
                n = (int)name.charAt(i);
                if(!(19968 <= n && n <40869)) {
                    return false;
                }
            }
            return true;
        }
    	public void setup() throws IOException, InterruptedException {
    		System.out.println("setup");
        	read();   	
		}
    	public void run(Context context) throws IOException, InterruptedException {
    		setup(); 
    		try {
    		while (context.nextKeyValue()) {
    			map(context.getCurrentKey(), context.getCurrentValue(), context);
    			}
    		} 
    		finally {
    		cleanup(context);
    			}
    		}
        public void map(LongWritable key, Text value, Mapper<LongWritable, Text, Text, IntWritable>.Context context) throws IOException, InterruptedException {

        	String line=value.toString();
        	String[] words = line.split(",");
        	if (words.length-1<=8)
        		return;
        	String[] pre=Arrays.copyOfRange(words,0,8);
        	String[] comment_lines=Arrays.copyOfRange(words, 8,words.length-1);
        	String commentString="";
        	for(String comment:comment_lines)
        	{
        		commentString+=comment;
        	}
        	if (isNumeric(pre[0]))
        	{
        		return;
        	}
        	List<String> comment=withoutstopwords(commentString);
			for (String g : comment) {
				if (!checkname(g.replace(" ", "")))
				{
					return;
				}
				context.write(new Text(pre[1]+","+g),new IntWritable(1));
				}
        }
    }
public static class IntSumReducer extends Reducer<Text, IntWritable, Text, DoubleWritable> {
        public IntSumReducer() {
        }
        public static Configuration configuration;
        public static Connection connection;
        public static Admin admin;
        public static Table table;
        public static void insertData(String rowKey,String colFamily,String col,String val) throws IOException {
            Put put = new Put(Bytes.toBytes(rowKey));
            put.addColumn(colFamily.getBytes(),col.getBytes(), val.getBytes());
            table.put(put);
        }
        public void setup() throws IOException, InterruptedException {
        	configuration  = HBaseConfiguration.create();
            configuration.set("hbase.zookeeper.quorum","slave1");
            connection = ConnectionFactory.createConnection(configuration);
            admin = connection.getAdmin();
        	TableName tableName=TableName.valueOf("movie2");
        	String[] colFamily= {"information"};
        	if (admin.tableExists(tableName))
        	{
        		System.out.println("文件存在,我要删了他");
        		admin.disableTable(tableName);
        		admin.deleteTable(tableName);
        	}
        	TableDescriptorBuilder tableDescriptor = TableDescriptorBuilder.newBuilder(tableName);
        	for(String str:colFamily){
        		ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(str)).build();
                tableDescriptor.setColumnFamily(family);
                }
    		admin.createTable(tableDescriptor.build());
            table = connection.getTable(tableName);
        	
		}
    	public void run(Context context) throws IOException, InterruptedException {
    		setup(); 
    		try {
    		while (context.nextKeyValue()) {
    			reduce(context.getCurrentKey(), context.getValues(), context);
    		}
    		} finally {
    		cleanup(context);
    		}
    		}
        public void reduce(Text key, Iterable<IntWritable> values, Reducer<Text, IntWritable, Text, DoubleWritable>.Context context) throws IOException, InterruptedException {
        	int sum = 0;
            IntWritable val;
            for(Iterator i$ = values.iterator(); i$.hasNext(); sum += val.get()) {
                val = (IntWritable)i$.next();
            }
            if (sum<5)
            	return;
        	insertData(key.toString(),"information","number",""+sum);
            context.write(key, new DoubleWritable(sum));
        }
    }
}


  • 1
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值