1 springboot中文件配置
#kafka服务所在IP 可以是多个,用逗号隔开
spring.kafka.bootstrap-servers=192.168.1.26:9092
#分组设置 需在本机hosts文件中映射
spring.kafka.consumer.group-id=kafka-1
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.bootstrap-servers=192.168.1.26:9092
2 修改本机电脑host文件
192.168.1.26 kafka-1
3 maven中增加依赖
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka_2.11 -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka_2.11</artifactId>
<version>0.11.0.1</version>
</dependency>
4.测试类
package org.atm.dc.app.web;
import com.alibaba.fastjson.JSONObject;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.atm.dc.app.constant.Constant;
import org.atm.dc.app.service.ScheduledService;
import org.atm.dc.app.util.Mongodb;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.Map;
/**
* Created by hh on 2019/4/19.14:00
*/
@RequestMapping("/test")
@RestController
public class KafkaTestController {
private static Logger logger = LoggerFactory.getLogger(Test.class);
@Autowired
private KafkaTemplate<String,Object> kafkaTemplate;
@Autowired
private ScheduledService scheduledService;
/**
*测试数据接收
* @return
*/
@KafkaListener(topics = "EFFECT_DATA")
public Object testkafka(ConsumerRecord<?, ?> consumer){
logger.info("{} - {}:{}", consumer.topic(), consumer.key(), consumer.value());
return null;
}
/**
* kafka测试发送
* @return
*/
@RequestMapping("/push")
public Object pushTest(){
Map<String, List<JSONObject>> listMap = scheduledService.pushEffectData();
for (Map.Entry<String,List<JSONObject>> listMap1:listMap.entrySet()){
if(listMap1.getValue()!=null&&listMap1.getValue().size()>0){
kafkaTemplate.send(Constant.EFFECT_TPOIC,listMap1.getKey(),listMap1.getValue().toString());
}
}
return null;
}
}
这里使用kafkaTemplate进行消息的发送,发送的key为表名,value为一个list集合,集合中放入了很多json对象,消费者需要对生产者会提供的数据反序列化转换为list集合才可使用。
5. 控制台打印log