kafka使用案例
单生产单消费案例
生产者
import java.util.Properties;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
public class KafkaProducerExample {
public static void main(String[] args) {
String topicName = "test-topic";
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer<String, String> producer = new KafkaProducer<>(props);
for (int i = 0; i < 10; i++) {
String key = "key-" + i;
String value = "value-" + i;
ProducerRecord<String, String> record = new ProducerRecord<>(topicName, key, value);
producer.send(record);
}
producer.close();
}
}
消费者
import java.util.Collections;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
public class KafkaConsumerExample {
public static void main(String[] args) {
String topicName = "test-topic";
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("group.id", "test-group");
props.put("key.deserializer", StringDeserializer.class.getName());
props.put("value.deserializer", StringDeserializer.class.getName());
props.put("auto.offset.reset", "earliest");
KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singletonList(topicName));
while (true) {
ConsumerRecords<String, String> records = consumer.poll(1000);
records.forEach(record -> {
System.out.println("Received message: (" + record.key() + ", " + record.value() + ") at offset " + record.offset());
});
}
}
}
说明
在这个例子中,我们使用了Kafka的Java客户端API来创建一个生产者和一个消费者。生产者将10条消息发送到名为“test-topic”的主题中,而消费者订阅了该主题并从中读取消息。注意,我们在消费者中设置了“auto.offset.reset”属性为“earliest”,这意味着消费者将从最早的可用偏移量开始读取消息。
多生产多消费
生产者
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class MultiProducerExample {
public static void main(String[] args) {
String topicName = "test-topic";
int numProducers = 3;
// Set up producer properties
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
// Create multiple producers
Producer<String, String>[] producers = new Producer[numProducers];
for (int i = 0; i < numProducers; i++) {
producers[i] = new KafkaProducer<>(props);
}
try {
// Send messages using multiple producers
for (int i = 0; i < 100; i++) {
int producerIndex = i % numProducers;
String message = "Message " + i + " from producer " + producerIndex;
ProducerRecord<String, String> record = new ProducerRecord<>(topicName, message);
producers[producerIndex].send(record);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
// Close all producers
for (Producer<String, String> producer : producers) {
producer.close();
}
}
}
}
消费者
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaMultiConsumerExample {
public static void main(String[] args) {
String topicName = "my-topic";
String groupId = "my-group";
String bootstrapServers = "localhost:9092";
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
KafkaConsumer<String, String> consumer1 = new KafkaConsumer<>(props);
KafkaConsumer<String, String> consumer2 = new KafkaConsumer<>(props);
consumer1.subscribe(Arrays.asList(topicName));
consumer2.subscribe(Arrays.asList(topicName));
while (true) {
ConsumerRecords<String, String> records1 = consumer1.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records1) {
System.out.println("Consumer 1: " + record.value());
}
ConsumerRecords<String, String> records2 = consumer2.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records2) {
System.out.println("Consumer 2: " + record.value());
}
}
}
}
避免重复消费案例
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.util.Properties;
public class MultiProducerExample {
public static void main(String[] args) {
String topicName = "test-topic";
int numProducers = 3;
// Set up producer properties
Properties props = new Properties();
props.put("bootstrap.servers", "localhost:9092");
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
// Create multiple producers
Producer<String, String>[] producers = new Producer[numProducers];
for (int i = 0; i < numProducers; i++) {
producers[i] = new KafkaProducer<>(props);
}
try {
// Send messages using multiple producers
for (int i = 0; i < 100; i++) {
int producerIndex = i % numProducers;
String message = "Message " + i + " from producer " + producerIndex;
ProducerRecord<String, String> record = new ProducerRecord<>(topicName, message);
producers[producerIndex].send(record);
}
} catch (Exception e) {
e.printStackTrace();
} finally {
// Close all producers
for (Producer<String, String> producer : producers) {
producer.close();
}
}
}
}
说明
在这个示例中,我们使用了一个变量lastOffset
来跟踪上一次消费的偏移量。在每次消费时,我们检查当前记录的偏移量是否小于或等于上一次消费的偏移量。如果是,我们就跳过这条记录,因为它已经被消费过了。否则,我们打印出记录的内容,并将当前偏移量设置为lastOffset
。
注意,我们在消费完成后调用了commitSync()
方法来提交偏移量。这样可以确保即使消费者在处理消息时崩溃,它也可以从上次提交的偏移量继续消费。