为了更方便引入依赖包,所以使用maven工程。这里使用的是Eclipse新建maven工程。更多内容可以参考官方网页:官方文档
- 新建maven工程
- 在pom.xml中引入依赖包
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.walter</groupId> <artifactId>kafkaDemo</artifactId> <version>0.0.1-SNAPSHOT</version> <dependencies> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>2.3.0</version> </dependency> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2.12</artifactId> <version>2.3.0</version> </dependency> </dependencies> </project>
- 生产者代码
package com.walter.demo; import java.util.Properties; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; public class ProducerTest { public static void main(String[] args) { // 创建配置对象 Properties prop = new Properties(); // kafka集群 prop.setProperty("bootstrap.servers", "walter101:9092"); // key, value 序列化 prop.setProperty("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); prop.setProperty("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); // 应答机制 prop.setProperty("acks", "1"); // 创建生产者 Producer<String, String> producer = new KafkaProducer<String, String>(prop); // 准备数据 String topic = "team"; String value = "developer"; ProducerRecord<String, String> record = new ProducerRecord<String, String>(topic, value); // 生产(发送)数据 producer.send(record); // 关闭生产者 producer.close(); System.out.println("生产完成"); } }
- 消费者代码
package com.walter.demo; import java.time.Duration; import java.util.Arrays; import java.util.Properties; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; public class ConsumerTest { public static void main(String[] args) { // 创建配置对象 Properties props = new Properties(); props.setProperty("bootstrap.servers", "walter101:9092"); props.setProperty("group.id", "test"); props.setProperty("enable.auto.commit", "true"); props.setProperty("auto.commit.interval.ms", "1000"); props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); // 创建消费者对象 KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props); // 订阅主题 consumer.subscribe(Arrays.asList("team")); try { while (true) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(100)); for (ConsumerRecord<String, String> record : records) System.out.println(record); } } finally { consumer.close(); } } }
- 测试,先运行生产者,在运行消费者,即可看到消费者控制台打印出新的消息。
我这里会有关于slf4j的提示,但是目前并不影响测试结果。SLF4J: Failed to load class "org.slf4j.impl.StaticLoggerBinder". SLF4J: Defaulting to no-operation (NOP) logger implementation SLF4J: See http://www.slf4j.org/codes.html#StaticLoggerBinder for further details. ConsumerRecord(topic = team, partition = 0, leaderEpoch = 0, offset = 4, CreateTime = 1568211003035, serialized key size = -1, serialized value size = 9, headers = RecordHeaders(headers = [], isReadOnly = false), key = null, value = developer)