Kafka使用Java客户端进行访问

本文环境如下: 
操作系统:CentOS 6 64位 
JDK版本:1.8.0_171 64位 
Kafka版本:kafka_2.12-2.1.1

 

1. maven依赖包

<dependency>
  <groupId>org.apache.kafka</groupId>
  <artifactId>kafka_2.12</artifactId>
  <version>2.1.1</version>
</dependency>

 

2. 生产者代码

 

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;

/**
 * @创建人 yang
 * @创建时间 2019/2/22
 */
public class producertest {

    public static void main(String[] args) {
        Properties pros=new Properties();
        String groupID="test-group";
        pros.put("bootstrap.servers","192.168.17.21:9092");
        pros.put("group.id",groupID);
        pros.put("key.serializer","org.apache.kafka.common.serialization.StringSerializer");
        pros.put("value.serializer","org.apache.kafka.common.serialization.StringSerializer");
        pros.put("acks","-1");
        pros.put("retries",3);
        pros.put("batch.size",323840);
        pros.put("linger.ms",10);
        pros.put("buffer.memory",33554432);
        pros.put("max.block.ms",3000);
        Producer<String,String> producer = new KafkaProducer<String,String>(pros);
        for (int i =0 ;i<100;i++)
        {
            producer.send(new ProducerRecord<String,String>("shuaige",Integer.toString(i),Integer.toString(i)));

            System.out.println("发送");
        }
        producer.close();
    }
}

3. 消费者代码

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.util.Arrays;
import java.util.Properties;

/**
 * @创建人 yang
 * @创建时间 2019/2/25
 */
public class ConsumrTest {

    public static void main(String[] args) {
        
        String topicNmae="shuaige";
        String groupID="test-group";
        Properties props= new Properties();
        props.put("bootstrap.servers","192.168.17.21:9092");
        props.put("group.id",groupID);
        props.put("enable.auto.commit","true");
        props.put("auto.commit.interval.ms","1000");
        props.put("auto.offset.reset","earliest");
        props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");

        KafkaConsumer<String,String> consumer=new KafkaConsumer<String, String>(props);
        consumer.subscribe(Arrays.asList(topicNmae));
        try {
            while (true){
                ConsumerRecords<String,String> records=consumer.poll(1000);
                for (ConsumerRecord<String,String> record:records){
                    System.out.printf("offset = %d ,key =%s, value= %s%n" ,record.offset(),record.key(),record.value());
                }
            }
        }finally {
            consumer.close();
        }
    }

}

 

4. 执行程序

1、执行生产者代码;

2、执行消费者代码。

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值