1、环境
Windows10、jdk1.8、idea、zookeeper3.4.12、kafka_2.12-1.0.0
2、zookeeper 集群搭建
https://www.cnblogs.com/xuxiuxiu/p/5868481.html
3、kafka集群搭建
https://www.cnblogs.com/lentoo/p/7785004.html
4、集成使用
springboot的配置:
#kafka #kafka地址 brokers集群地址用,隔开 spring.kafka.bootstrap-servers=127.0.0.1:9091,127.0.0.1:9092,127.0.0.1:9093 #生产者的配置,大部分我们可以使用默认的,这里列出几个比较重要的属性 #每批次发送消息的数量 spring.kafka.producer.batch-size=16 #发送失败重试次数 spring.kafka.producer.retries=2 #即32MB的批处理缓冲区 spring.kafka.producer.buffer-memory=33554432 #key序列化方式 spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer #消费者的配置 ##Kafka中没有初始偏移或如果当前偏移在服务器上不再存在时,默认区最新 ,有三个选项 【latest, earliest, none】 spring.kafka.consumer.auto-offset-reset=latest #是否开启自动提交 spring.kafka.consumer.enable-auto-commit=true #自动提交的时间间隔 spring.kafka.consumer.auto-commit-interval=100 #key的解码方式 spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer #value的解码方式 spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer #在kafka/config文件的consumer.properties中有配置 spring.kafka.consumer.group-id=test-consumer-group
kafka消息topic 自定义:
package com.zh.service.kafka; /** * @author zhangH * @date 2018/10/29 */ public class KFKTopic { // default public static final String K_DEFAULT = "k_default"; // user public static final String USER_BASE = "USER_BASE"; public static final String USER_ADDRESS = "USER_ADDRESS"; }
kafka消息生产者
package com.zh.service.kafka; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.stereotype.Component; import javax.annotation.Resource; /** * @author zhangH * @date 2018/10/29 */ @Component public class KFKMessageProduce { @Resource private KafkaTemplate kafkaTemplate; public void sendMsg(String topic, Object context) { try { kafkaTemplate.send(topic, context); } catch (Exception e) { e.printStackTrace(); } } }
kafka消息消费者:
package com.zh.service.kafka; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.slf4j.LoggerFactory; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; /** * @author zhangH * @date 2018/10/29 */ @Component public class KFKMessageConsumer { @KafkaListener(topics = {KFKTopic.K_DEFAULT}) public void listenT(ConsumerRecord record) { LoggerFactory.getLogger("kafka").info("当前监听主题:" + KFKTopic.K_DEFAULT + ", msg : " + record.value().toString()); } @KafkaListener(topics = {KFKTopic.USER_BASE, KFKTopic.USER_ADDRESS}) public void listenT2(ConsumerRecord record) { LoggerFactory.getLogger("kafka").info("当前监听主题:" + KFKTopic.USER_BASE + ", msg : " + record.value().toString()); } }
测试类:
package com.zh.kafkas; import com.zh.common.BaseTest; import com.zh.service.kafka.DataBean; import com.zh.service.kafka.KFKMessageProduce; import com.zh.service.kafka.KFKTopic; import org.junit.Test; import javax.annotation.Resource; /** * @author zhangH * @date 2018/10/29 */ public class KaFKaTest extends BaseTest { @Resource private KFKMessageProduce kfkMessageProduce; @Test public void test() { for (int i = 0; i < 10; i++) { if (i % 3 == 1) { kfkMessageProduce.sendMsg(KFKTopic.USER_BASE, new DataBean("test1", i).toString()); } else if (i % 3 == 2) { kfkMessageProduce.sendMsg(KFKTopic.USER_ADDRESS, new DataBean("test2", i).toString()); } else { kfkMessageProduce.sendMsg(KFKTopic.K_DEFAULT, new DataBean("test", i).toString()); } } } }
基础测试类BaseTest :
package com.zh.common; import com.zh.SpringBootDemoApplication; import org.junit.runner.RunWith; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.test.context.web.WebAppConfiguration; /** * @author zhangH * @date 2018/10/29 * 进行测试,可使用注解等 */ @RunWith(SpringRunner.class) @SpringBootTest(classes = SpringBootDemoApplication.class) @WebAppConfiguration public class BaseTest { }
总结:
kafka跟activemq的使用大同小异,建议针对性学习,一通百通,同类型的针对一个学习,其余的对比学习,见效快。
一天一点积累,就是想要的,我为我代言。
!!!