maven依赖
<properties>
<kafka.version>2.3.0</kafka.version>
</properties>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka.version}</version>
</dependency>
配置修改:
broker1的server1.properties
zookeeper.connect=192.168.**:2181
listeners=PLAINTEXT://192.168.**:9092
broker.id=0
注:复制broker1中的server1.properties修改如下:
broker2的server2.properties
listeners=PLAINTEXT://192.168.**:9093
broker.id=1
注:复制broker1中的server1.properties修改如下:
broker3的server3.properties
listeners=PLAINTEXT://192.168.**:9094
broker.id=2
启动命令
nohup bash kafka-server-start.sh ../config/server1.properties &
nohup bash kafka-server-start.sh ../config/server2.properties &
nohup bash kafka-server-start.sh ../config/server3.properties &
检查是否启动
jps -m
会出现3个Kafka进程
Kafka生产者
package kafkaDemo;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.log4j.Logger;
import java.util.Properties;
public class KafkaProducerDemo {
public static void main(String[] args) {
if (args.length == 0) {
System.out.println("YOU HIVE TO INPUT YOUR TOPIC");
System.exit(1);
}
Properties conf = new Properties();
conf.put("bootstrap.servers", "192.168.**:9092");
conf.put("acks", "all");
// conf.put("topic", args[0]);
conf.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
conf.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
KafkaProducer kafkaClient = new KafkaProducer<String, String>(conf);
for (int i = 0; i < 10; i++) {
kafkaClient.send(new ProducerRecord<String, String>("MyTopicTwo", Integer.toString(i), Integer.toString(i + 1)));
System.out.println("HIVE SEND ONE MESSAGE");
}
kafkaClient.close();
}
}
kafka消费者
package kafkaDemo;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;
public class KafkaConsumerDemo {
public static void main(String[] args) {
Properties conf = new Properties();
conf.put("bootstrap.servers", "192.168.**:9092");
conf.put("group.id", "test");
conf.put("enable.auto.commit", "true");
conf.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
conf.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
KafkaConsumer kafkaConsumer = new KafkaConsumer<String,String>(conf);
kafkaConsumer.subscribe(Arrays.asList("MyTopicTwo"));
int i = 0;
while (true){
ConsumerRecords<String,String> records = kafkaConsumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, String> record : records) {
System.out.printf("offset = %d,key = %s,value=%s\n",record.offset(),record.key(),record.value());
}
}
}
}