Kafka-Steam GroupByKey分组和Reduce汇总

Kafka-Steam GroupByKey分组和Reduce汇总

package com.hyr.kafka.demo.streams.high.dsl.operator;

import com.hyr.kafka.demo.BaseApi.Producer;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.kstream.*;

import java.util.HashMap;
import java.util.Map;

/*******************************************************************************
 * @date 2017-12-28 下午 5:19
 * @author: <a href=mailto:>黄跃然</a>
 * @Description: GroupByKey Reduce 分组 和 汇总
 ******************************************************************************/
public class GroupByKeyAndReduceStreams {

    public static void main(String[] args) {
        Map<String, Object> props = new HashMap<String, Object>();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "my-stream-processing-application");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.0.133:9092");
        // 制定K-V 格式
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass()); // Serdes : Data Types and Serialization
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass()); // Serdes : Data Types and Serialization
        StreamsConfig config = new StreamsConfig(props);

        KStreamBuilder builder = new KStreamBuilder();

        KStream<String, String> kStream = builder.stream("my-input-topic");

        // 按照key值进行分组
        KGroupedStream<String, String> groupByKey = kStream.groupByKey();

        Reducer<String> reducer = new Reducer<String>() {
            @Override
            public String apply(String key, String value) {
                System.out.println(key + "===" + value);
                return value;
            }
        };

        // 对分组后的每一组数据进行汇总
        KTable<String, String> table = groupByKey.reduce(reducer);

        table.toStream().to("my-output-topic");

        KafkaStreams kafkaStreams = new KafkaStreams(builder, config);
        kafkaStreams.start();

        // 发送数据
        Producer producer = new Producer();
        producer.PRODUCERTOPIC1 = "my-input-topic";
        producer.PRODUCERTOPIC2 = "my-input-topic";
        producer.runProducer();
    }

}

整个KafkaDemo项目代码已提交到Github。

https://github.com/huangyueranbbc/KafkaDemo 

 

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值