执行下述操作前需要先部署kafka环境,参考kafka环境部署
1、添加maven依赖
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>0.10.2.0</version>
</dependency>
2、生产者代码
import java.util.Properties;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.utils.Utils;
/**
* 生产者
*
*/
public class SimpleProducer {
public static void main(String[] args) throws Exception {
String topic = "test";
// String topic2 = "test2";
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.0.181:9092");
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("buffer.memory", 33554432);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
Producer<String, String> producer = new KafkaProducer<>(props);
for (int i = 1000000; i < 2000000; i++) {
Utils.sleep(100);
String msg = topic + "-" + i; // 要发送的消息内容
producer.send(new ProducerRecord<String, String>(topic, msg), new Callback() {
@Override
public void onCompletion(RecordMetadata metadata, Exception e) {
if (e != null) {
e.printStackTrace();
} else {
System.out.printf("topic = %s, offset = %d%n", metadata.topic(),
metadata.offset());
}
}
});
// producer.send(new ProducerRecord<String, String>(topic2, msg));
}
producer.close();
}
}
消费者代码
import java.util.Arrays;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
/**
* 消费者
*/
public class SimpleConsumer {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.0.181:9092");
props.put("group.id", "test");
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Arrays.asList("test","test2")); // 可同时接收多个topic的消息
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100); // 每次读取100条消息
for (ConsumerRecord<String, String> record : records)
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(),
record.key(), record.value());
}
}
}
先运行生产者代码,再运行消费者代码,测试能否接收到消息。