kafka生产者消费者练习

需求:写一个生产者,不断的去生产用户行为数据,写入到kafka的一个topic中
生产的数据格式: 造数据
{“guid”:1,“eventId”:“pageview”,“timestamp”:1637868346789} isNew = 1
{“guid”:1,“eventId”:“addcard”,“timestamp”:1637868347625} isNew = 0
{“guid”:2,“eventId”:“collect”,“timestamp”:16378683463219}
{“guid”:3,“eventId”:“paid”,“timestamp”:16378683467829}

再写一个消费者,不断的从kafka中消费上面的用户行为数据,做一个统计
1.每5s输出一次当前来了多少用户(去重) uv
2.将每条数据添加一个字段来标识,如果这个用户的id是第一次出现,那么就标注1,否则就是0

首先添加依赖:

<dependencies>
    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka-clients</artifactId>
        <version>${kafka.version}</version>
    </dependency>

    <dependency>
        <groupId>com.alibaba</groupId>
        <artifactId>fastjson</artifactId>
        <version>1.2.75</version>
    </dependency>

    <dependency>
        <groupId>org.apache.commons</groupId>
        <artifactId>commons-lang3</artifactId>
        <version>3.8.1</version>
    </dependency>


    <dependency>
        <groupId>org.projectlombok</groupId>
        <artifactId>lombok</artifactId>
        <version>1.18.22</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/org.roaringbitmap/RoaringBitmap -->
    <dependency>
        <groupId>org.roaringbitmap</groupId>
        <artifactId>RoaringBitmap</artifactId>
        <version>0.9.0</version>
    </dependency>

    <!-- https://mvnrepository.com/artifact/com.google.guava/guava -->
    <dependency>
        <groupId>com.google.guava</groupId>
        <artifactId>guava</artifactId>
        <version>31.1-jre</version>
    </dependency>

    <dependency>
        <groupId>mysql</groupId>
        <artifactId>mysql-connector-java</artifactId>
        <version>8.0.28</version>
    </dependency>

生产者代码示例:

package com.doitedu;

import com.alibaba.fastjson.JSON;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.apache.commons.lang3.RandomStringUtils;
import org.apache.commons.lang3.RandomUtils;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;

import java.util.Properties;

/**
 * 验证数据:
 * 创建topic
 * kafka-topics.sh --create --topic event-log --zookeeper linux01:2181 --partitions 3 --replication-factor 3
 * 搞个消费者消费数据
 * kafka-console-consumer.sh  --bootstrap-server linux01:9092 --topic event-log
 * {"eventId":"zTUAbXcWbn","guid":7170,"timeStamp":1659944455262}
 * {"eventId":"KSzaaNmczb","guid":9743,"timeStamp":1659944455823}
 * {"eventId":"FNUERLlCNu","guid":7922,"timeStamp":1659944456295}
 * {"eventId":"VmXVJHlpOF","guid":2505,"timeStamp":1659944458267}
 * {"eventId":"pMIHwLzSIE","guid":7668,"timeStamp":1659944460088}
 * {"eventId":"ZvGYIvmKTx","guid":3636,"timeStamp":1659944460461}
 * {"eventId":"jBanTDSlCO","guid":3468,"timeStamp":1659944460787}
 * {"eventId":"vXregpYeHu","guid":1107,"timeStamp":1659944462525}
 * {"eventId":"PComosCafr","guid":7765,"timeStamp":1659944463640}
 * {"eventId":"xCHFOYIJlb","guid":3443,"timeStamp":1659944464697}
 * {"eventId":"xDToApWwFo","guid":5034,"timeStamp":1659944465953}
 */
public class Exercise_kafka编程练习 {
    public static void main(String[] args) throws InterruptedException {
        MyData myData = new MyData();
        myData.genData();
    }
}

class MyData{
    KafkaProducer<String, String> producer = null;
    public MyData(){
        Properties props = new Properties();
        props.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"linux01:9092,linux02:9092,linux03:9092");
        props.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        props.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        producer = new KafkaProducer<String, String>(props);
    }
    
    public void genData() throws InterruptedException {
        UserEvent userEvent = new UserEvent();
        while (true){
            //造数据
            userEvent.setGuid(RandomUtils.nextInt(0,10000));
            userEvent.setEventId(RandomStringUtils.randomAlphabetic(10));
            userEvent.setTimeStamp(System.currentTimeMillis());
            String json = JSON.toJSONString(userEvent);
            //数据造完了就往kafka中写
            ProducerRecord<String, String> stringProducerRecord = new ProducerRecord<>("event-log", json);
            Thread.sleep(RandomUtils.nextInt(200,1000));
            producer.send(stringProducerRecord);
        }
    }
}
/*
{"guid":1,"eventId":"pageview","timestamp":1637868346789}
{"guid":1,"eventId":"addcard","timestamp":1637868347625}
{"guid":2,"eventId":"collect","timestamp":16378683463219}
 */
@NoArgsConstructor
@AllArgsConstructor
@Getter
@Setter
class UserEvent{
    private Integer guid;
    private String eventId;
    private long timeStamp;
}

消费者代码示例:用hashset来实现:

package com.doitedu;

import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.*;

/**
 * 分两步走:
 * 第一步:一个消费者不断的去消费数据
 * 第二步:5分钟计算一次,返回用户数这个结果
 */
public class Exercise_consumerDemo {
    public static void main(String[] args) {
        HashSet<Integer> set = new HashSet<>();
        new Thread(new ConsumerThread(set)).start();
        //定时的任务调度
        Timer timer = new Timer();
        //调度,第一个参数,你给我一个任务,
        //第二个参数代表过多久之后我开始执行任务
        //第三个参数代表每隔多久执行一次
        timer.schedule(new ConsumerTask(set),5000,10000);

    }
}

class ConsumerThread implements Runnable {
    HashSet<Integer> set = null;
    KafkaConsumer<String, String> consumer = null;

    public ConsumerThread(HashSet<Integer> set) {
        this.set = set;
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux01:9092");
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test001");

        consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList("event-log"));
    }
    /**
     * 重写run方法的话,我需要在里面实现什么逻辑?
     * 消费者消费数据,拿到数据以后,只需要获取到用户id
     * 将用户id写到hashset集合里面
     */
    @Override
    public void run() {
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            for (ConsumerRecord<String, String> record : records) {
                String json = record.value();
                UserEvent userEvent = JSON.parseObject(json, UserEvent.class);
                Integer guid = userEvent.getGuid();
                set.add(guid);
            }
        }
    }
}

class ConsumerTask extends TimerTask {
    HashSet<Integer> set = null;

    public ConsumerTask(HashSet<Integer> set) {
        this.set = set;
    }
    /**
     * 这里面就是返回的一个用户数
     */
    @Override
    public void run() {
        int userCount = set.size();
        System.out.println(System.currentTimeMillis() + ",截至到当前为止的一个用户数为:"+userCount);
    }
}

用hashset来实现很显然会出问题,如果数据量一直往上增长,会出现oom的问题,而且占用资源越来越多,影响电脑性能!!!
方案二:将HashSet改成bitMap来计数,就很完美,大逻辑不变,小逻辑就是将HashMap改成bitMap

package com.doitedu;

import com.alibaba.fastjson.JSON;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.roaringbitmap.RoaringBitmap;

import java.time.Duration;
import java.util.*;

/**
 * 分两步走:
 * 第一步:一个消费者不断的去消费数据
 * 第二步:5分钟计算一次,返回用户数这个结果
 */
public class BitMap_consumerDemo {
    public static void main(String[] args) {
        //原来我用的是Hashset来记录,现在我用RoaringBitmap来记录
        RoaringBitmap bitMap = RoaringBitmap.bitmapOf();

        new Thread(new BitMapConsumerThread(bitMap)).start();
        //定时的任务调度
        Timer timer = new Timer();
        timer.schedule(new BitMapConsumerTask(bitMap),1000,5000);

    }
}

class BitMapConsumerThread implements Runnable {
    RoaringBitmap bitMap = null;
    KafkaConsumer<String, String> consumer = null;

    public BitMapConsumerThread(RoaringBitmap bitMap) {
        this.bitMap = bitMap;
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux01:9092");
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test001");

        consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList("event-log"));
    }
    /**
     * 重写run方法的话,我需要在里面实现什么逻辑?
     * 消费者消费数据,拿到数据以后,只需要获取到用户id
     * 将用户id写到hashset集合里面
     */
    @Override
    public void run() {
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            for (ConsumerRecord<String, String> record : records) {
                String json = record.value();
                UserEvent userEvent = JSON.parseObject(json, UserEvent.class);
                Integer guid = userEvent.getGuid();
                bitMap.add(guid);
            }
        }
    }
}


class BitMapConsumerTask extends TimerTask {
    RoaringBitmap bitMap = null;

    public BitMapConsumerTask(RoaringBitmap bitMap) {
        this.bitMap = bitMap;
    }
    /**
     * 这里面就是返回的一个用户数
     */
    @Override
    public void run() {
        int userCount = bitMap.getCardinality();
        System.out.println(System.currentTimeMillis() + ",截至到当前为止的一个用户数为:"+userCount);
    }
}

需求二:判断来没来过的问题,可以用bitmap来搞,当然还可以用布隆过滤器来搞

package com.doitedu;

import com.alibaba.fastjson.JSON;
import com.google.common.hash.BloomFilter;
import com.google.common.hash.Funnels;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
import java.util.Timer;
import java.util.TimerTask;

/**
 * 用布隆过滤器来判定是否重复,当然,bitMap也是可以操作的
 */
public class BloomFilter_consumerDemo {
    public static void main(String[] args) {

        BloomFilter<Long> longBloomFilter = BloomFilter.create(Funnels.longFunnel(), 100000);

        new Thread(new BloomFilterConsumerThread(longBloomFilter)).start();
    }
}

class BloomFilterConsumerThread implements Runnable {
    BloomFilter<Long> longBloomFilter = null;
    KafkaConsumer<String, String> consumer = null;

    public BloomFilterConsumerThread(BloomFilter<Long> longBloomFilter) {
        this.longBloomFilter = longBloomFilter;
        Properties props = new Properties();
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "linux01:9092");
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG, "test001");
        consumer = new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList("event-log"));
    }

    /**
     * 重写run方法的话,我需要在里面实现什么逻辑?
     * 消费者消费数据,拿到数据以后,只需要获取到用户id
     * 将用户id写到hashset集合里面
     */
    @Override
    public void run() {
        while (true) {
            ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            for (ConsumerRecord<String, String> record : records) {
                String json = record.value();
                UserEvent userEvent = JSON.parseObject(json, UserEvent.class);
                Integer guid = userEvent.getGuid();
                boolean flag = longBloomFilter.mightContain((long) guid);
                if (flag) {
                    userEvent.setIsNew(0);
                } else {
                    userEvent.setIsNew(1);
                }
                //判断完成以后,得把他加进去
                longBloomFilter.put((long) guid);
                System.out.println(JSON.toJSONString(userEvent));
            }
        }
    }
}
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值