生产者
package nj.zb.kb22.kafka;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import java.util.Properties;
import java.util.Scanner;
public class Myproducer {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.153.141:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,StringSerializer.class);
properties.put(ProducerConfig.ACKS_CONFIG,"0");
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(properties);
while(true){
Scanner scanner = new Scanner(System.in);
System.out.println("输入生产者内容:");
String msg = scanner.next();
ProducerRecord<String, String> stringstringProducerRecord = new ProducerRecord<String, String>("kb21",msg);
producer.send(stringstringProducerRecord);
}
}
}
消费者
package nj.zb.kb22.kafka;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.StringDeserializer;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.Set;
public class Myconsumer {
public static void main(String[] args) {
Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.153.141:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class);
properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,"earliest");
properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,false);
properties.put(ConsumerConfig.GROUP_ID_CONFIG,"GROUP1");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties);
consumer.subscribe(Collections.singleton("kb21"));
while(true){
Duration duration = Duration.ofMillis(100);
ConsumerRecords<String, String> records = consumer.poll(duration);
for (ConsumerRecord<String,String> cr :records
) {
String topic = cr.topic();
int partition = cr.partition();
long offset = cr.offset();
String key = cr.key();
String value = cr.value();
long timestamp = cr.timestamp();
String name = Thread.currentThread().getName();
System.out.println("threadname:"+name+" topic:"+topic+" partition:"+partition+" offset:"+offset+" key:"+key
+" value:"+value+" timestamp:"+timestamp);
}
}
}
}