import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import java.io.IOException;
import java.util.Properties;
/*
* 生产者
* */
public class Demo5 {
public static void main(String[] args) throws IOException {
Properties properties = new Properties();
properties.load(Demo5.class.getClassLoader().getResourceAsStream("producer.properties"));
KafkaProducer producer = new KafkaProducer<>(properties);
//创建要发送的数据(叫做生产的记录producerecord)
ProducerRecord<Object, String> record = new ProducerRecord<>("hadoop", "nihao");
//生产者生产消息
for (int i = 0;i<20;i++){
//生产者发送信息
producer.send(record);
}
producer.close();
}
}
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
/*
* 消费者
* */
public class Demo6 {
public static void main(String[] args) throws IOException {
Properties properties = new Properties();
properties.load(Demo6.class.getClassLoader().getResourceAsStream("consumer.properties"));
KafkaConsumer consumer = new KafkaConsumer<>(properties);
//消费者要想消费,需要在好到那个主题,也就是先订阅(subscribe)那个主题
consumer.subscribe(Arrays.asList("hadoop"));
//消费者开始消费数据
while (true){
ConsumerRecords<String,String> records = consumer.poll(1000);//获取数据
//数据不止一个所以需要遍历
for (ConsumerRecord<String,String> record : records) {//这个遍历的类型是ConsumerRecord<String,String>,不要弄错了
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
}
}
}