kafka producer consumer工具类

/*
        此类用来构建kafka的消费者工具类
 */
public class KafkaConsumerUtil {
    public  KafkaConsumer<String, String>  createConsumer() {

        //String topicName = "rjht01";
        String groupID = "consumer02";

        Properties prop = new Properties();
        //指定broker地址以及通信端口,用于创建socket连接
        prop.put("bootstrap.servers","node01:9092");
        //指定消费者组
        prop.put("group.id",groupID);
        //是否自动提交offset
        prop.put("enable.auto.commit", "true");
        //自动确认offset的时间间隔
        prop.put("auto.commit.interval.ms", "1000");
        // key的序列化类
        prop.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        // value的序列化类
        prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        //
        prop.put("heartbeat.interval.ms",3000);
        //指定coordinator监控consumer group中的consumer的心跳状态
        prop.put("session.timeout.ms",10*1000);
        //消息处理逻辑的最大时间
        prop.put("max.poll.interval.ms",5*1000);
        //指定consumer单词获取数据的最大字节数
        prop.put("fetch.max.bytes",10*1024*1024);//
        //最大拉取数量
        prop.put("max.poll.records",1000);
        //
        prop.put("connection.max.idle.ms",-1);
        //指定了无位移消息和位移越界时kafka 的应对策略   earliest(指定从最早的位移处消费)   latest(最新位移处进行消费)   none(抛出异常)
        prop.put("auto.offset.reset","latest");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);

        return consumer;

    }
}

 

============================

/*
        此类用来常见kafka生产者的工具类
 */
public class KafkaProducerUtil {

    public KafkaProducer<String, String> createKafkaProducer(){
        //1.创建Kafka的配置信息
        Properties properties = new Properties();
        //2.指定broker,会自动从broker中获取元数据信息
        properties.put("bootstrap.servers","node01:9092");
        //3.执行ack应答机制
        properties.put("acks","all");
        //4.指定重试次数  以及重试间隔
        properties.put("retries","3");
        properties.put("retry","200");
        //5.指定批处理的字节数
        properties.put("batch.size","16384");
        //6.等待时间
        //如果批次未满 间隔一定时间发出去
        properties.put("linger.ms","100");
        //7.RecordAccumulator缓冲区大小
        properties.put("buffer.memory","33554432");
        //8.指定序列化
        properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
        //9.开启幂等性
        properties.put("enable.idempotence","true");

        properties.put("max.request.size", "524288");
        //os  page cache 中压缩格式
        //properties.put("compression.type", "snappy");
        properties.put("request.timeout.ms", "180000");
        properties.put("max.block.ms", "180000");

        KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);
        return kafkaProducer;
    }
}

 

=======

public class MyProducer {
    public static void main(String[] args) {
        KafkaProducerUtil kafkaProducerUtil = new KafkaProducerUtil();
        KafkaProducer<String, String> kafkaProducer = kafkaProducerUtil.createKafkaProducer();

        //10.发送数据
        for (int i = 0 ;i < 20;i++){
            //这里指定Key  和 不指定key的 区别就是  指定Key后,会将数据根据此key的 hash值,如hash值相同则发送到同同一分区,
            //如果不指定,则会以轮询的方式将数据发送到不同的分区中

            TestOrder testOrder = new TestOrder();
            String name = testOrder.getName()+"kafka_"+i;
            int age = testOrder.getAge()+i;

            String data = JSON.toJSONString(testOrder);

            System.out.println(data);

            kafkaProducer.send(new ProducerRecord<String, String>("rjht03", data), new Callback() {
                @Override
                public void onCompletion(RecordMetadata metadata, Exception exception) {
                    if (exception == null ){
                        System.out.println("消息发送成功");
                    }else {
                        System.out.println("消息发送失败");
                    }
                }
            });
        }

        //11.关闭资源
        kafkaProducer.close();

    }
}

=====

public class MyConsumer {
    public static void main(String[] args) {
        //创建线程池
        ExecutorService threadPool = Executors.newFixedThreadPool(20);

        KafkaConsumerUtil kafkaConsumerUtil = new KafkaConsumerUtil();
        KafkaConsumer<String, String> consumer = kafkaConsumerUtil.createConsumer();


        consumer.subscribe(Arrays.asList("rjht03"));
        while (true){
            ConsumerRecords<String, String> records = consumer.poll(100);
            for (ConsumerRecord<String, String> record : records) {
                JSONObject test = JSONObject.parseObject(record.value());
                threadPool.submit(new TestTask(test));
            }

        }
    }

    public static class TestTask implements Runnable{

        private JSONObject test;


        public TestTask(JSONObject test){
            this.test=test;
        }

        @Override
        public void run() {
            //业务处理代码
            System.out.println("消费者" + "获取数据 开始业务处理");
            TestOrder order = JSONObject.toJavaObject(test, TestOrder.class);
            System.out.println(order.getAge()+"_"+order.getName().concat("_哈哈"));
        }
    }
}

===

public class TestOrder {
    private String name ;
    private int age;

    public String getName() {
        return name;
    }

    public void setName(String name) {
        this.name = name;
    }

    public int getAge() {
        return age;
    }

    public void setAge(int age) {
        this.age = age;
    }

    @Override
    public String toString() {
        return "TestOrder{" +
                "name='" + name + '\'' +
                ", age=" + age +
                '}';
    }
}
  • 0
    点赞
  • 2
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值