Hadoop客户端 mapreducer

package cn.itning.job.wordcount

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Job
import org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat

object WordCountRunner {
    @JvmStatic
    fun main(args: Array<String>) {
        val configuration = Configuration()
        val job = Job.getInstance(configuration)
        //设置job中的资源所在的jar包
        job.setJarByClass(WordCountRunner::class.java)
        //job要使用哪个mapper类
        job.mapperClass = WordCountMapper().javaClass
        //job要使用哪个reducer类
        job.reducerClass = WordCountReducer().javaClass

        /**
        Combiner组件:
        1、是在每一个map task的本地运行,能收到map输出的每一个key的valuelist,所以可以做局部汇总处理
        2、因为在map task的本地进行了局部汇总,就会让map端的输出数据量大幅精简,减小shuffle过程的网络IO
        3、combiner其实就是一个reducer组件,跟真实的reducer的区别就在于,combiner运行maptask的本地
        4、combiner在使用时需要注意,输入输出KV数据类型要跟map和reduce的相应数据类型匹配
        5、要注意业务逻辑不能因为combiner的加入而受影响
         *
         *
         */
        //指定本job所使用的combiner类定义
        job.combinerClass = WordCountReducer().javaClass

        //job的mapper类输出的kv数据类型
        job.mapOutputKeyClass = Text().javaClass
        job.mapOutputValueClass = LongWritable().javaClass

        //job的reducer类输出的kv数据类型
        job.outputKeyClass = Text().javaClass
        job.outputValueClass = LongWritable().javaClass

        //默认使用TextInputFormat
        job.inputFormatClass=TextInputFormat::class.java
        //合并文件分片CombineFileInputFormat
        /*job.inputFormatClass=CombineFileInputFormat::class.java
        CombineFileInputFormat.setMinInputSplitSize(job,4194304)
        CombineFileInputFormat.setMaxInputSplitSize(job,2097152)*/

        //指定要处理的原始数据所存放的路径
        FileInputFormat.setInputPaths(job, "hdfs://192.168.84.132:9000/test")
        //指定处理之后的结果输出到哪个路径
        FileOutputFormat.setOutputPath(job, Path("hdfs://192.168.84.132:9000/test/output"))

        //true if the job succeeded
        val res = job.waitForCompletion(true)
    }
}
package cn.itning.job.wordcount

import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Mapper


class WordCountMapper : Mapper<LongWritable, Text, Text, LongWritable>() {

    override fun map(key: LongWritable?, value: Text?, context: Context?) {
        val strs = value?.toString()?.split(" ")
        for (str in strs!!) {
            context?.write(Text(str), LongWritable(1))
        }
    }
}
package cn.itning.job.wordcount

import org.apache.hadoop.io.LongWritable
import org.apache.hadoop.io.Text
import org.apache.hadoop.mapreduce.Reducer

class WordCountReducer : Reducer<Text, LongWritable, Text, LongWritable>() {
    override fun reduce(key: Text?, values: MutableIterable<LongWritable>?, context: Context?) {
        var count: Long = 0
        for (value in values!!) {
            count += value.get()
        }
        context?.write(key, LongWritable(count))
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值