需要导入的依赖
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/log4j/log4j -->
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>1.2.17</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-api -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.25</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.slf4j/slf4j-log4j12 -->
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
<version>1.7.25</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
<version>3.8.1</version>
</dependency>
生产者向topic02中写入数据
package com.baizhi.jsy.transaction;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.UUID;
public class ProductKafkaTransactionnOnly {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "Centos:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 1024 * 1024);
properties.put(ProducerConfig.LINGER_MS_CONFIG, 500);
properties.put(ProducerConfig.ACKS_CONFIG,"-1");
properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,5000);
properties.put(ProducerConfig.RETRIES_CONFIG,3);
properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG,true);
properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"transaction-id"+ UUID.randomUUID());
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>(properties);
kafkaProducer.initTransactions();
try {
kafkaProducer.beginTransaction();
for (int i=0;i<5;i++){
ProducerRecord<String, String> record = new ProducerRecord<>(
"topic02",
"Transaction",
"Test committed Transaction1");
kafkaProducer.send(record);
kafkaProducer.flush();
if (i==3){
}
}
kafkaProducer.commitTransaction();
} catch (ProducerFencedException e) {
kafkaProducer.abortTransaction();
e.printStackTrace();
}
kafkaProducer.close();
}
}
中间件 从topic02中读取数据 然后将数据写入topic01中去
package com.baizhi.jsy.transaction;
import org.apache.kafka.clients.consumer.*;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.ProducerFencedException;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import java.time.Duration;
import java.util.*;
public class ProductConsumerKafkaTransactionn {
public static void main(String[] args) {
String groupId = "g1";
KafkaProducer<String, String> kafkaProducer = buildKafkaProducer();
KafkaConsumer<String, String> kafkaConsumer = buildKafkaConsummer(groupId);
kafkaConsumer.subscribe(Arrays.asList("topic02"));
kafkaProducer.initTransactions();
try {
while (true){
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
if (!consumerRecords.isEmpty()){
kafkaProducer.beginTransaction();
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
Map<TopicPartition, OffsetAndMetadata> offset = new HashMap<>();
while (iterator.hasNext()){
ConsumerRecord<String, String> record = iterator.next();
String key = record.key();
String value = record.value();
offset.put(new TopicPartition(record.topic(),record.partition()),new OffsetAndMetadata(record.offset()+1));
ProducerRecord<String, String> producerRecord = new ProducerRecord<String, String>("topic01",key,value+"jiangsiyu");
kafkaProducer.send(producerRecord);
}
kafkaProducer.flush();
kafkaProducer.sendOffsetsToTransaction(offset,groupId);
kafkaProducer.commitTransaction();
}
}
} catch (Exception e) {
kafkaProducer.abortTransaction();
}
kafkaProducer.close();
}
public static KafkaProducer<String,String> buildKafkaProducer(){
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "Centos:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, 1024 * 1024);
properties.put(ProducerConfig.LINGER_MS_CONFIG, 500);
properties.put(ProducerConfig.ACKS_CONFIG,"-1");
properties.put(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG,5000);
properties.put(ProducerConfig.RETRIES_CONFIG,3);
properties.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG,true);
properties.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG,"transaction-id"+ UUID.randomUUID());
return new KafkaProducer<String, String>(properties);
}
public static KafkaConsumer<String,String> buildKafkaConsummer(String groupId){
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());
properties.put(ConsumerConfig.GROUP_ID_CONFIG,groupId);
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);
properties.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed");
return new KafkaConsumer<String, String>(properties);
}
}
消费者从topic01中读取数据
package com.baizhi.jsy.transaction;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Properties;
public class ConsumerKafkaReadCommitted {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"Centos:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"g1");
properties.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG,"read_committed");
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<String, String>(properties);
kafkaConsumer.subscribe(Arrays.asList("topic01"));
try {
while (true){
ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
if(!consumerRecords.isEmpty()){
Iterator<ConsumerRecord<String, String>> iterator = consumerRecords.iterator();
while (iterator.hasNext()){
ConsumerRecord<String, String> next = iterator.next();
String topic = next.topic();
String key = next.key();
String value = next.value();
long offset = next.offset();
int partition = next.partition();
long timestamp = next.timestamp();
System.out.println("key = " + key+"\t"+"offset = " + offset+"\t"+"value = " + value+"\t"+"partition = " + partition+"\t"+"timestamp = " + timestamp+"\t"+"topic = " + topic);
}
}
}
} catch (Exception e) {
e.printStackTrace();
}finally {
kafkaConsumer.close();
}
}
}