kafka的生产者和消费者读取股票数据

生产者


import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Properties;

public class product {
    public static void main(String[] args) {
        // 1. 创建 Kafka 生产者的配置对象
        Properties properties = new Properties();
        properties.put("bootstrap.servers", "172.20.192.1:9095");
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");

        // 2. 创建 Kafka 生产者对象
        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);

        // 3. 指定要读取的文件路径
        String filePath = "a.csv";

        // 4. 读取文件并将内容发送到 Kafka
        int messageCount = 0;
        try (BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "GBK"))) {
            String line;
            while ((line = reader.readLine()) != null) {
                ProducerRecord<String, String> record = new ProducerRecord<>("test", line);
                kafkaProducer.send(record);
                messageCount++;
            }
        } catch (IOException e) {
            e.printStackTrace();
        }

        // 5. 输出发送消息总量并关闭 Kafka 生产者对象
        System.out.println("Total messages sent: " + messageCount);
        kafkaProducer.close();
    }
}

2.消费者


import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;

public class customer {
    public static void main(String[] args) {
        // 1.创建消费者的配置对象
        Properties properties = new Properties();
        // 2.给消费者配置对象添加参数
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "172.20.192.1:9095");
        // 配置序列化 必须
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        // 配置消费者组(组名任意起名) 必须
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test");
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false");

        // 创建消费者对象
        KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
        // 注册要消费的主题(可以消费多个主题)
        ArrayList<String> topics = new ArrayList<>();
        topics.add("test");
        kafkaConsumer.subscribe(topics);

        // 统计买入和卖出的数量,以及交易金额
        int buyCount = 0;
        int sellCount = 0;
        int buyTransactionAmount = 0;
        int sellTransactionAmount = 0;

        // 拉取数据打印
        while (true) {
            // 设置 100ms 中消费一批数据
            ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofMillis(100));
            // 打印消费到的数据
            for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
                System.out.println(consumerRecord);

                // 解析消息值
                String[] fields = consumerRecord.value().split(",");
                if (fields.length == 9) {
                    String stockCode = fields[2];
                    int quantity = Integer.parseInt(fields[4]);
//                  double amount = Double.parseDouble(fields[3]) * quantity;
                    String transactionType = fields[5];

                    // 更新买入和卖出的数量和交易总量
                    if (transactionType.equalsIgnoreCase("买入")) {
                        buyCount++;
                        buyTransactionAmount += quantity;
                    } else if (transactionType.equalsIgnoreCase("卖出")) {
                        sellCount++;
                        sellTransactionAmount += quantity;
                    }

                }
            }

            // 输出买入和卖出的统计信息
            System.out.println("\n买入交易笔数:" + buyCount);
            System.out.println("卖出交易笔数:" + sellCount);
            System.out.println("买入交易总量:" + buyTransactionAmount);
            System.out.println("卖出交易总量:" + sellTransactionAmount);

            // 手动提交消费位移
            kafkaConsumer.commitSync();
        }
    }
}

评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值