code repository address : https://github.com/itwjl/kafka-demo.git
1. Install zookeeper with docker
docker run --privileged=true \
--name zookeeper \
-p 2181:2181 \
-d wurstmeister/zookeeper
2. Install kafka with docker
kafka config link : kafka-config
Note that the IP address is your real IP address
docker run \
-p 9092:9092 \
--name kafka \
--privileged=true \
-e KAFKA_BROKER_ID=0 \
-e KAFKA_ZOOKEEPER_CONNECT=192.168.100.246:2181 \
-e KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://192.168.100.246:9092 \
-e KAFKA_LISTENERS=PLAINTEXT://0.0.0.0:9092 \
-v /Users/envo/kafka/config:/opt/kafka/config \
-v /Users/envo/kafka/logs:/opt/kafka/logs \
-d wurstmeister/kafka:latest
3. Create a spring-boot project with IntellJ IDEA
pom.xml
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.codehaus.jackson</groupId>
<artifactId>jackson-mapper-asl</artifactId>
<version>1.9.11</version>
</dependency>
<!-- Lombok -->
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
</dependency>
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
<version>27.0.1-jre</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
</dependencies>
4. Configurate kafka of spring-boot demo
4.1 application.yml file add this configuration
spring:
kafka:
#kafka服务地址端口号
bootstrap-servers: 127.0.0.1:9092
consumer:
auto-offset-reset: latest
group-id: juewei
enable-auto-commit: true
max-poll-records: 500 #批量消费一次最大拉取的数据量
fetch-min-size: 20480
concurrency: 20
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
listener:
concurrency: 5
4.2 writing the kafka consumer config
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class KafkaConsumerConfig {
@Value("${spring.kafka.bootstrap-servers}")
private String servers;
@Value("${spring.kafka.consumer.enable-auto-commit}")
private boolean enableAutoCommit;
@Value("${spring.kafka.consumer.group-id}")
private String groupId;
@Value("${spring.kafka.consumer.auto-offset-reset}")
private String autoOffsetReset;
@Value("${spring.kafka.consumer.concurrency}")
private int concurrency;
@Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, byte[]>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, byte[]> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(concurrency);
factory.setBatchListener(true);
factory.getContainerProperties().setPollTimeout(1500);
return factory;
}
public ConsumerFactory<String, byte[]> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
propsMap.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 100);
return propsMap;
}
}
4.3 writing kafka producer application
import com.google.common.collect.Lists;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* kafka生产者
*
* @author yangxunwu
*/
@Slf4j
@Component
public class KfProducer {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
public void submitList(List<String> msg) {
log.info("kafka 待推送数据 {} 条", msg.size());
List<List<String>> partition = Lists.partition(msg, 10);
for (List<String> item : partition) {
send("temp", item.toString());
}
}
private void send(String topic, String msg) {
log.info("kafka 发送 " + topic + " 消息长度 " + msg.length());
kafkaTemplate.send(topic, msg);
}
}
4.4 writing kafka consumer application
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.util.List;
@Slf4j
@Component
public class KfConsumer {
/**
* @description 接收Kafka信息
* @author wjl
* @dateTime 2022/11/28 11:56
**/
@KafkaListener(topics = "temp", containerFactory = "kafkaListenerContainerFactory")
public void receive(List<ConsumerRecord<String, String>> recordList) {
recordList.forEach(record -> {
String value = record.value();
System.out.println("value = " + value);
});
}
}
5. Test kafka demo application
5.1 run spring-boot project of kafka demo
5.2 test kafka producer
5.2.1 enter system terminal 1 input this three command into kafka container and run a consumer
docker exec -it kafka /bin/bash
cd /opt/kafka_2.13-2.8.1/bin
kafka-console-consumer.sh --bootstrap-server localhost:9092 --topic temp --from-beginning
5.2.2 create a new terminal 2 input
curl --location --request POST 'http://localhost:8081/kafka/demo/produce' \
--header 'Content-Type: application/json' \
--data-raw '[
"hello", "exec"
]'
5.2.3 observe the first terminal
5.3 test kafka consumer
5.3.1 create a new terminal 3 input 5.2.1 the first two command into kafka container and this command run a producer
./kafka-console-producer.sh --broker-list localhost:9092 --topic temp
5.3.2 input test data with third terminal and observe IntellJ IDEA console