1、创建Maven项目
2、添加kafka依赖
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.12</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.2.0</version>
注:我在添加依赖时,kafka_2.12一直在爆红,解决方法:Alt+Enter选第一个Convter tag to atrributel里的Edit intention settings,把error改成了warn,至于Maven仓库中红线并没有理睬,不影响项目运行
3、生产者:首先可以知道的是生产者,字面理解是生产数据的,那么生产数据了,肯定是要发送给消费者的,所以首先要想到发送数据的方法,有了方法肯定需要new一个对象出来用,发送的数据要有容器,发送完成别忘记关闭,不浪费内存
package com.zpark.kafkatest.one;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import java.util.Properties;
import java.util.concurrent.Future;
public class ProducerDemo {
public static void main(String[] args){
send();
}
//发送的属性,这段自动理解为硬编码,
public static void send(){
Properties prop = new Properties();
prop..put("bootstrap.servers","hdp-2:9092")
prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
KafkaProducer<String,String> producer = new KafkaProducer<String,String>(prop);
ProducerRecord<String,String> record = new ProducerRecord<String,String>("test","haha")
Futrue<RecordMetadata> send = producer.send(records);
producer.closer
}
}
注:此处出现了一个错(笑哭),估计很少有人会错,此处懒劲儿犯了,没从producer里拿代码,直接把consumer中的代码拿来用了,结果报错:
Missing required configuration "key.deserializer" which has no default value.
注意细节,和consumer中的这块儿代码不一样:
prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
4、创建消费者:消费者要接收数据,创建消费者,判断主题是否存在,存在则考虑,生产者发送数据不知发送一次,所以接受数据要不断地刷新接受新数据,需要用死循环来实现,定义接收数据的容器,格式化输出,关闭
package com.zpark.kafkatest.one;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.Properties;
import org.apache.hadoop.conf.Configuration;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
public class ConsumerDemo {
public static void main(String[] args){
recevice();
}
public static void recevice(){
Properties prop = new Properties();
prop.put("bootstrap.server","hdp-3:9092");
prop.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
prop.put(group.id,"Jazyn");
KafkaConsumer<String,String> consumer = new KafkaConsumer<String,String>(prop);
consumer.subscribe(Conllections.singleton("test"));
try{
FileOutputStream fos = new FileOutputStream("f:/cc.txt");
OutputStreamWriter osw = new OutputStreamWriter(fos);
try{
while(true){
//创建消费者,
ConsumerRecords<String,String> records = consumer.poll(100);
for(ConsumerRecords<String,String> records : records){
//将msg写入文件
String msg = "key:" + record.key() + ",value" +record.value() + ",offset:" + record.offset() + ",topic:" + record.topic() + "\r\n";
System.out.printf("topic = %s,offset = %s,key = %s,value =%s“,r.topic(),r.offset(),r.key(),r.value());
BufferedWriter bw = new BufferedWriter(osw);
bw.write(msg);
bw.flush();
}
}
}atch (IOException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}finally{
consumer.close();
}
}
}
查看F盘中已经创建文件,文件当中有msg内容