【Kafka】(十七)Kafka Streams 示例 TemperatureDemo

最近想统计一些消息数据,原计划接收kakfa消息后自行统计然后存入数据库(统计相对比较简单,所以没有考虑使用Apache Storm), 突然想起来Kafka已经提供Kakfa Stream功能,于是开始看Kafka Stream。 下面的例子非常简单,只是在Kafka提供的例子上做了一点修改。

添加依赖

因为我们使用的Kafka Stream所以添加的依赖是kafka-streams, 不是以前经常使用的kafka-clients.
我的kafka安装在Windows 10上面(为了方便测试,平时在公司时可以直接连接到Kafka集群,开发时先在本地运行,于是在Windows10上安装了Kafka)。 版本kafka_2.12-1.0.0

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-streams</artifactId>
            <version>1.0.2</version>
        </dependency>

主要代码

官方示例的代码在`这里

官方示例中向topic直接发送了温度数据。 我修改一下。 向topic发送json格式的数据,里面包含了温度和湿度。例如 {“temp”:19, “humidity”: 25}

注意:该代码只在官方示例上修该数据格式,其他部分和官方示例一样。启动程序后直接向topic iot-temperature发送格式为{“temp”:19, “humidity”: 25}的消息即可看到运行效果。

public class TemperatureDemo {

    // threshold used for filtering max temperature values
    private static final int TEMPERATURE_THRESHOLD = 20;
    // window size within which the filtering is applied
    private static final int TEMPERATURE_WINDOW_SIZE = 5;

    public static void main(String[] args) throws Exception {

        Properties props = new Properties();
        props.put(StreamsConfig.APPLICATION_ID_CONFIG, "streams-temperature");
        props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
        props.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
        props.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass());

        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        props.put(StreamsConfig.CACHE_MAX_BYTES_BUFFERING_CONFIG, 0);

        StreamsBuilder builder = new StreamsBuilder();

        //从topic iot-temperature读取设备发送的传感器信息
        KStream<String, String> source = builder.stream("iot-temperature");

        KStream<Windowed<String>, String> max = source
                // temperature values are sent without a key (null), so in order
                // to group and reduce them, a key is needed ("temp" has been chosen)
                .selectKey(new KeyValueMapper<String, String, String>() {
                    @Override
                    public String apply(String key, String value) {
                        return "temp";
                    }
                })
                .groupByKey()
                .windowedBy(TimeWindows.of(TimeUnit.SECONDS.toMillis(TEMPERATURE_WINDOW_SIZE)))
                .reduce(new Reducer<String>() {
                    @Override
                    public String apply(String value1, String value2) {
                        System.out.println("value1=" + value1+ ", value2=" + value2);
                        JSONObject json = JSON.parseObject(value1);
                        Integer temperature = json.getInteger("temp");
                        if (temperature > Integer.parseInt(value2)) {
                            return temperature.toString();
                        }
                        else {
                            return value2;
                        }
                    }
                })
                .toStream()
                //过滤条件就是温度大于20
                .filter(new Predicate<Windowed<String>, String>() {
                    @Override
                    public boolean test(Windowed<String> key, String value) {
                        System.out.println("key=" + key+ ", value=" + value);
                        JSONObject json = JSON.parseObject(value);
                        Integer temperature = json.getInteger("temp");
                        return temperature > TEMPERATURE_THRESHOLD;
                    }
                });

        WindowedSerializer<String> windowedSerializer = new WindowedSerializer<>(Serdes.String().serializer());
        WindowedDeserializer<String> windowedDeserializer = new WindowedDeserializer<>(Serdes.String().deserializer(), TEMPERATURE_WINDOW_SIZE);
        Serde<Windowed<String>> windowedSerde = Serdes.serdeFrom(windowedSerializer, windowedDeserializer);

        // need to override key serde to Windowed<String> type
        max.to("iot-temperature-max", Produced.with(windowedSerde, Serdes.String()));

        final KafkaStreams streams = new KafkaStreams(builder.build(), props);
        final CountDownLatch latch = new CountDownLatch(1);

        // attach shutdown handler to catch control-c
        Runtime.getRuntime().addShutdownHook(new Thread("streams-temperature-shutdown-hook") {
            @Override
            public void run() {
                streams.close();
                latch.countDown();
            }
        });

        try {
            streams.start();
            latch.await();
        } catch (Throwable e) {
            System.exit(1);
        }
        System.exit(0);
    }
}

运行结果

在这里插入图片描述

Stream运行结果存放在topic iot-temperature-max中, 我们查看该topic的数据。 只有大于TEMPERATURE_THRESHOLD (20)被存入该topic

在这里插入图片描述

  • 1
    点赞
  • 3
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
以下是一个使用 Kafka Streams 处理实时行情的示例代码,其中我们使用了 Kafka 的 C++ 客户端库 RdKafka: ```c++ #include <iostream> #include <string> #include <vector> #include <librdkafka/rdkafkacpp.h> using std::string; using std::vector; using std::cout; using std::endl; class ExampleDeliveryReportCb : public RdKafka::DeliveryReportCb { public: void dr_cb(RdKafka::Message& message) { if (message.err()) { std::cerr << "Failed to deliver message: " << message.errstr() << std::endl; } else { std::cout << "Message delivered to " << message.topic_name() << " [" << message.partition() << "]" << std::endl; } } }; class ExampleEventCb : public RdKafka::EventCb { public: void event_cb(RdKafka::Event& event) { switch (event.type()) { case RdKafka::Event::EVENT_ERROR: std::cerr << "ERROR (" << RdKafka::err2str(event.err()) << "): " << event.str() << std::endl; if (event.err() == RdKafka::ERR__ALL_BROKERS_DOWN) { exit(1); } break; case RdKafka::Event::EVENT_STATS: std::cerr << "\"STATS\": " << event.str() << std::endl; break; case RdKafka::Event::EVENT_LOG: std::cerr << "LOG-" << event.severity() << "-" << event.fac().c_str() << ": " << event.str().c_str() << std::endl; break; default: std::cerr << "EVENT " << event.type() << " (" << RdKafka::err2str(event.err()) << "): " << event.str() << std::endl; break; } } }; int main(int argc, char* argv[]) { if (argc != 4) { std::cerr << "Usage: " << argv[0] << " <broker> <input-topic> <output-topic>" << std::endl; return 1; } string brokers = argv[1]; string input_topic = argv[2]; string output_topic = argv[3]; RdKafka::Conf *conf = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL); conf->set("metadata.broker.list", brokers, errstr); conf->set("event_cb", &example_event_cb, errstr); ExampleDeliveryReportCb ex_dr_cb; conf->set("dr_cb", &ex_dr_cb, errstr); RdKafka::Producer *producer = RdKafka::Producer::create(conf, errstr); if (!producer) { std::cerr << "Failed to create producer: " << errstr << std::endl; return 1; } RdKafka::Conf *conf_consumer = RdKafka::Conf::create(RdKafka::Conf::CONF_GLOBAL); conf_consumer->set("metadata.broker.list", brokers, errstr); conf_consumer->set("event_cb", &example_event_cb, errstr); RdKafka::Consumer *consumer = RdKafka::Consumer::create(conf_consumer, errstr); if (!consumer) { std::cerr << "Failed to create consumer: " << errstr << std::endl; return 1; } vector<string> topics; topics.push_back(input_topic); RdKafka::ErrorCode err = consumer->subscribe(topics); if (err) { std::cerr << "Failed to subscribe to " << input_topic << ": " << RdKafka::err2str(err) << std::endl; return 1; } while (true) { RdKafka::Message *msg = consumer->consume(1000); if (msg->err() == RdKafka::ERR_NO_ERROR) { string data((const char*)msg->payload(), msg->len()); // TODO: process data RdKafka::ErrorCode resp = producer->produce(output_topic, -1, RdKafka::Producer::RK_MSG_COPY, const_cast<char *>(data.c_str()), data.size(), NULL, NULL); if (resp != RdKafka::ERR_NO_ERROR) { std::cerr << "Failed to produce message: " << RdKafka::err2str(resp) << std::endl; } producer->poll(0); } else if (msg->err() != RdKafka::ERR__TIMED_OUT) { std::cerr << "Failed to consume message: " << RdKafka::err2str(msg->err()) << std::endl; } delete msg; } delete consumer; delete producer; return 0; } ``` 在上面的示例代码中,我们创建了一个生产者和一个消费者,分别用于从输入主题中消费数据并将处理后的数据写入输出主题中。你可以根据实际需求修改上面的示例代码,并添加各种流式处理操作符来实现实时行情处理。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值