消费者
package kafka;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class Customer {
public static void main(String[] args) {
//配置信息
Properties props = new Properties();
//kafka服务器地址
props.put("bootstrap.servers", "surfing1:9092");
//必须指定消费者组
props.put("group.id", "productscanlog");
//设置数据key和value的序列化处理类
props.put("key.deserializer", StringDeserializer.class);
props.put("value.deserializer", StringDeserializer.class);
//创建消息者实例
KafkaConsumer<String,String> consumer = new KafkaConsumer<>(props);
//订阅topic1的消息
consumer.subscribe(Arrays.asList("productscanlog"));
//到服务器中读取记录
while (true){
ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(100));
for(ConsumerRecord<String,String> record : records){
System.out.println("key:" + record.key() + "" + ",value:" + record.value());
}
}
}
}
生产者
package kafka;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
public class Producer {
public static void main(String[] args) {
//配置信息
Properties props = new Properties();
//kafka服务器地址
props.put("bootstrap.servers", "surfing1:9092");
//设置数据key和value的序列化处理类
props.put("key.serializer", StringSerializer.class);
props.put("value.serializer", StringSerializer.class);
//创建生产者实例
KafkaProducer<String,String> producer = new KafkaProducer<>(props);
ProducerRecord record = new ProducerRecord<String, String>("productscanlog", "userName", "lc");
//发送记录
producer.send(record);
producer.close();
}
}
创建主题
package kafka;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.KafkaAdminClient;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Arrays;
import java.util.Properties;
public class Client {
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "surfing1:9092"); //kafka服务地址
props.put("acks", "all");
props.put("retries", 0);
props.put("batch.size", 16384);
props.put("linger.ms", 1);
props.put("key.serializer", StringSerializer.class.getName());
props.put("value.serializer", StringSerializer.class.getName());
AdminClient client = KafkaAdminClient.create(props);//创建操作客户端
//创建名称为test1的topic,有5个分区
NewTopic topic = new NewTopic("test1", 2, (short) 1);
client.createTopics(Arrays.asList(topic));
client.close();//关闭
}
}
消费特定偏移量
Map<TopicPartition,OffsetAndMetadata> currentOffset = new HashMap<>();
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100);
for (ConsumerRecord<String, String> record : records) {
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
currentOffset.put(new TopicPartition(record.topic(),record.partition()),new OffsetAndMetadata(record.offset(),"metadata"));
try {
System.out.println("模拟消息处理失败的情况");
} catch (Exception e) {
consumer.commitAsync(currentOffset,null);
}
}
}