public class ProducerDemo {
// Topic
private static final String topic = "kafkaTopic";
public static void main(String[] args) throws Exception {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.174.127:9092");
props.put("acks", "0");
props.put("group.id", "1111");
props.put("retries", "0");
//设置key和value序列化方式
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
//生产者实例
KafkaProducer<String, String> producer = new KafkaProducer<String, String>(props);
int i = 1;
// 发送业务消息
// 读取文件 读取内存数据库 读socket端口
while (true) {
Thread.sleep(1000);
producer.send(new ProducerRecord<String, String>(topic, "key:" + i, "value:" + i));
System.out.println("key:" + i + " " + "value:" + i);
i++;
}
}
}
public class ConsumerDemo {
private static final Logger logger = LoggerFactory.getLogger(ConsumerDemo.class);
private static final String topic = "kafkaTopic";
public static void main(String[] args) {
Properties props = new Properties();
props.put("bootstrap.servers", "192.168.174.127:9092");//单节点,kafka多节点时候使用,逗号隔开
props.put("group.id", "1111"); //定义消费组
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("auto.offset.reset", "earliest");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);
consumer.subscribe(Arrays.asList(topic));//订阅主题
// consumer.subscribe(topic);//订阅主题
while (true) {
// ConsumerRecords<String, String> records = consumer.poll(1000);
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
// Map<String, ConsumerRecords<String, String>> poll = consumer.poll(1000);
// System.out.println("JSON.toJSONString(poll) = " + JSON.toJSONString(poll));
for (ConsumerRecord<String, String> record : records) {
System.out.printf("offset = %d, key = %s, value = %s%n", record.offset(), record.key(), record.value());
}
}
}
}
kafka服务器版本:kafka_2.12-2.3.0.tgz
java API kafka客户端版本:
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.3.0</version>
</dependency>
大神们帮忙看看!