java 生产者消费者 demo_Java实现Kafka基本的生产者和消费者Demo

第一、环境准备

1、JDK

2、Zookeeper

3、Kafka

第二、实现效果

Postman或Chrome中,调用如下接口

36b47e8fa407

生产者Producer  Console显示miniooc send message:

{

"uuid": "d3660a0d-ca78-44fa-9d78-b41f5101bc25",

"date": "2019-07-10 09:09:16"

}

消费者Consumer  Console显示miniooc receive message:

{

"uuid": "d3660a0d-ca78-44fa-9d78-b41f5101bc25",

"date": "2019-07-10 09:09:16"

}

第三、启动Zookeeper和Kafka

启动Zookeeper

36b47e8fa407

启动Kafka

36b47e8fa407

第四、生产者源码

1、目录结构

36b47e8fa407

2、POM

org.springframework.boot

spring-boot-starter-web

org.springframework.kafka

spring-kafka

com.google.code.gson

gson

2.8.5

org.projectlombok

lombok

1.16.22

provided

org.springframework.boot

spring-boot-starter-test

test

org.springframework.kafka

spring-kafka-test

test

3、配置文件application.properties

server.port=9526

spring.application.name=kafka-producer

kafka.bootstrap.servers=127.0.0.1:9092

kafka.topic.order=topic-order

kafka.group.id=group-order

4、实体类MessageBean

package com.example.producer.entity;

import lombok.Data;

import java.io.Serializable;

import java.util.Date;

/**

* 消息实体类

*/

@Data

public class MessageBeanimplements Serializable {

/** uuid */

private Stringuuid;

/** 时间  */

private Datedate;

public MessageBean() {

}

public MessageBean(String uuid, Date date) {

this.uuid = uuid;

this.date = date;

}

public StringgetUuid() {

return uuid;

}

public void setUuid(String uuid) {

this.uuid = uuid;

}

public DategetDate() {

return date;

}

public void setDate(Date date) {

this.date = date;

}

@Override

public StringtoString() {

return "MessageBean{" +

"uuid='" +uuid +'\'' +

", date=" +date +

'}';

}

}

5、Kafka生产者配置类和生产者类

package com.example.producer.produce;

import org.apache.kafka.clients.producer.ProducerConfig;

import org.apache.kafka.common.serialization.StringSerializer;

import org.springframework.beans.factory.annotation.Value;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.kafka.annotation.EnableKafka;

import org.springframework.kafka.core.DefaultKafkaProducerFactory;

import org.springframework.kafka.core.KafkaTemplate;

import org.springframework.kafka.core.ProducerFactory;

import java.util.HashMap;

import java.util.Map;

/**

* kafka配置类

*/

@EnableKafka

@Configuration

public class KafkaProducerConfig {

@Value("${kafka.bootstrap.servers}")

private StringBOOTSTRAP_SERVERS_CONFIG;

@Bean

public KafkaTemplatekafkaTemplate(){

return new KafkaTemplate<>(producerFactory());

}

public ProducerFactoryproducerFactory(){

Map props =new HashMap<>();

props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS_CONFIG);

props.put(ProducerConfig.RETRIES_CONFIG, 0);

props.put(ProducerConfig.BATCH_SIZE_CONFIG, 4096);

props.put(ProducerConfig.LINGER_MS_CONFIG, 1);

props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 40960);

props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);

return new DefaultKafkaProducerFactory<>(props);

}

}

package com.example.producer.produce;

import com.example.producer.entity.MessageBean;

import com.google.gson.Gson;

import com.google.gson.GsonBuilder;

import lombok.extern.java.Log;

import org.slf4j.Logger;

import org.slf4j.LoggerFactory;

import org.springframework.beans.factory.annotation.Value;

import org.springframework.kafka.core.KafkaTemplate;

import org.springframework.stereotype.Component;

import javax.annotation.Resource;

/**

* Kafka消息生产类

*/

@Log

@Component

public class KafkaProducer {

Loggerlog = LoggerFactory.getLogger(KafkaProducer.class);

@Resource

private KafkaTemplatekafkaTemplate;

@Value("${kafka.topic.order}")

private StringtopicOrder;

/**

* 发送消息

*

* @param messageBean 消息实例

*/

public void sendMessage(MessageBean messageBean){

GsonBuilder builder =new GsonBuilder();

builder.setPrettyPrinting();

builder.setDateFormat("yyyy-MM-dd HH:mm:ss");

Gson gson = builder.create();

// 将消息实例序列化为json格式的字符串

String message = gson.toJson(messageBean);

// 发送消息

kafkaTemplate.send(topicOrder,message);

// 打印消息

log.info("\nminiooc send message:\n" + message);

}

}

6、Kafka Controller调用类

package com.example.producer.controller;

import com.example.producer.entity.MessageBean;

import com.example.producer.produce.KafkaProducer;

import lombok.extern.java.Log;

import org.springframework.stereotype.Controller;

import org.springframework.web.bind.annotation.RequestMapping;

import org.springframework.web.bind.annotation.ResponseBody;

import javax.annotation.Resource;

import java.util.Date;

import java.util.HashMap;

import java.util.Map;

import java.util.UUID;

/**

* 消息控制器

*/

@Log

@Controller

@RequestMapping("/message")

public class MessageController {

@Resource

private KafkaProducerkafkaProducer;

/**

* 生成消息

*

* @return

*/

@RequestMapping("/create")

@ResponseBody

public Mapcreate(){

// 创建消息

MessageBean messageBean =new MessageBean();

String uuid = UUID.randomUUID().toString();

messageBean.setUuid(uuid);

messageBean.setDate(new Date());

// 将消息发送到 kafka

kafkaProducer.sendMessage(messageBean);

Map model =new HashMap();

// 返回成功信息

model.put("resultCode",1);

model.put("resultMsg","success");

model.put("messageBean",messageBean);

return model;

}

}

第五、消费者源码

1、目录结构

36b47e8fa407

2、POM

org.springframework.boot

spring-boot-starter-web

org.springframework.kafka

spring-kafka

com.google.code.gson

gson

2.8.5

org.projectlombok

lombok

1.16.22

provided

org.springframework.boot

spring-boot-starter-test

test

org.springframework.kafka

spring-kafka-test

test

3、配置文件application.properties

server.port=9527

spring.application.name=kafka-consumer

kafka.bootstrap.servers=127.0.0.1:9092

kafka.topic.order=topic-order

kafka.group.id=group-order

4、实体类MessageBean

package com.example.consumer.controller.entity;

import lombok.Data;

import java.io.Serializable;

import java.util.Date;

/**

* 消息实体类

*/

@Data

public class MessageBeanimplements Serializable {

/** uuid */

private Stringuuid;

/** 时间  */

private Datedate;

public MessageBean() {

}

public MessageBean(String uuid, Date date) {

this.uuid = uuid;

this.date = date;

}

public StringgetUuid() {

return uuid;

}

public void setUuid(String uuid) {

this.uuid = uuid;

}

public DategetDate() {

return date;

}

public void setDate(Date date) {

this.date = date;

}

@Override

public StringtoString() {

return "MessageBean{" +

"uuid='" +uuid +'\'' +

", date=" +date +

'}';

}

}

5、Kafka消费者配置和消费监听类

package com.example.consumer.controller.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;

import org.apache.kafka.common.serialization.StringDeserializer;

import org.springframework.beans.factory.annotation.Value;

import org.springframework.context.annotation.Bean;

import org.springframework.context.annotation.Configuration;

import org.springframework.kafka.annotation.EnableKafka;

import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;

import org.springframework.kafka.config.KafkaListenerContainerFactory;

import org.springframework.kafka.core.ConsumerFactory;

import org.springframework.kafka.core.DefaultKafkaConsumerFactory;

import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;

import java.util.HashMap;

import java.util.Map;

/**

* kafka配置类

*/

@EnableKafka

@Configuration

public class KafkaConsumerConfig {

@Value("${kafka.bootstrap.servers}")

private StringBOOTSTRAP_SERVERS_CONFIG;

@Value("${kafka.group.id}")

private StringGROUP_ID_CONFIG;

@Bean

public KafkaListenerContainerFactory>kafkaListenerContainerFactory(){

ConcurrentKafkaListenerContainerFactory factory =new ConcurrentKafkaListenerContainerFactory();

factory.setConsumerFactory(cnsumerFactory());

factory.setConcurrency(10);

factory.getContainerProperties().setPollTimeout(3000);

return factory;

}

public ConsumerFactorycnsumerFactory(){

Map propsMap =new HashMap<>();

propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, BOOTSTRAP_SERVERS_CONFIG);

propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); // 自动提交

propsMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 1);

propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");

propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");

propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);

propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, GROUP_ID_CONFIG);

propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");

return new DefaultKafkaConsumerFactory<>(propsMap);

}

}

package com.example.consumer.controller.consumer;

import com.example.consumer.controller.entity.MessageBean;

import com.google.gson.Gson;

import com.google.gson.GsonBuilder;

import com.google.gson.reflect.TypeToken;

import lombok.extern.java.Log;

import org.slf4j.Logger;

import org.slf4j.LoggerFactory;

import org.springframework.kafka.annotation.KafkaListener;

import org.springframework.messaging.handler.annotation.Payload;

import org.springframework.stereotype.Component;

/**

* Kafka消息消费类

*/

@Log

@Component

public class KafkaConsumer {

Loggerlog = LoggerFactory.getLogger(KafkaConsumer.class);

@KafkaListener(topics ="${kafka.topic.order}",containerFactory ="kafkaListenerContainerFactory")

public void consume(@Payload String message){

GsonBuilder builder =new GsonBuilder();

builder.setPrettyPrinting();

builder.setDateFormat("yyyy-MM-dd HH:mm:ss");

Gson gson = builder.create();

// 将接收到的消息反序列化消息实例

MessageBean messageBean = gson.fromJson(message,new MessageBean().getClass());

// 将消息实例序列化为json格式的字符串

String json = gson.toJson(messageBean);

// 打印消息

log.info("\nminiooc receive message:\n" + json);

}

}

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值