https://www.jianshu.com/p/4bf007885116
https://www.cnblogs.com/sujing/p/10960832.html
https://www.cnblogs.com/sujing/p/10960832.html
写了个kafka的一个简单使用的Demo,复杂的操作请参考api哈~
首先在pom文件里添加kafka依赖
<!-- https://mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-starter-bus-kafka -->
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-bus-kafka</artifactId>
<version>2.2.0.RELEASE</version>
</dependency>
然后在配置文件bootstrap.yml或application.properties里添加kafka所需的相关属性
#kafka配置
kafka:
producer:
servers: 10.1.128.92:9092,10.1.128.93:9092,10.1.128.104:9092 # kafka服务器地址配置
retries: 0 # 提交失败后重试次数
batch:
size: 16384 # 批量大小
linger: 1 # 提交延迟
buffer:
memory: 33554432 # 缓存
listener:
topicName: KAFKA_EVENT_ABP_SIT # 主题名称
consumer:
group:
id: tzbank-event-group
servers: 10.1.128.92:9092,10.1.128.93:9092,10.1.128.104:9092 #kafka服务器集群
enable:
auto:
commit: true
auto:
commit:
interval: 1000
offset:
reset: latest
session:
timeout: 30000
concurrency: 10
Kafka的消息推送者工具类KafkaProducerUtil.java
package com.tzbank.payment.utils;
import com.alibaba.fastjson.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.Future;
/**
* @author JJJ
* 20200610
*/
@Component
@Slf4j
public class KafkaProducerUtil {
@Value("${kafka.producer.servers}")
private String servers; //kafka的服务地址
@Value("${kafka.producer.retries}")
private int retries; //提交失败后重试次数
@Value("${kafka.producer.batch.size}")
private int batchSize; //批量大小
@Value("${kafka.producer.linger}")
private int linger; //提交延迟
@Value("${kafka.producer.buffer.memory}")
private int bufferMemory; //缓存
public KafkaProducerUtil(){
}
@Override
public String toString() {
return "KafkaUtil{" +
"servers='" + servers + '\'' +
", retries=" + retries +
", batchSize=" + batchSize +
", linger=" + linger +
", bufferMemory=" + bufferMemory +
'}';
}
// 配置kafka相应信息,配置属性在yml文件中
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
props.put(ProducerConfig.RETRIES_CONFIG, retries);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize);
props.put(ProducerConfig.LINGER_MS_CONFIG, linger);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return props;
}
//创建消息生产者
public KafkaProducer<String,Object> getKafkaProducer(){
log.info("kafak配置参数入参:" + toString());
return new KafkaProducer<String, Object>(producerConfigs());
}
//发送消息
public void sendMessage(String topic,String value){
KafkaProducer<String,Object> producer = getKafkaProducer();
try {
Future<RecordMetadata> future = producer.send(new ProducerRecord<>(topic, value), (recordMetadata, e) -> {
if (e == null) {
log.info("send-消息发送成功,内容为:" + value);
} else {
log.error("send-消息发送失败,内容为:" + value +" 错误信息为:"+e.getMessage());
}
});
} catch (Exception e) {
log.error("智脑内部发送事件 出错:{}", e);
}finally {
producer.close();
}
}
}
Kafka的消息接收工具类KafkaConsumerUtil.java
package com.tzbank.payment.utils;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.stereotype.Component;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
/**
* @author JJJ
* 20200610
*/
@Component
@Slf4j
public class KafkaConsumerUtil {
@Value("${kafka.consumer.servers}")
private String servers;
@Value("${kafka.consumer.enable.auto.commit}")
private boolean enableAutoCommit;
@Value("${kafka.consumer.session.timeout}")
private String sessionTimeout;
@Value("${kafka.consumer.auto.commit.interval}")
private String autoCommitInterval;
@Value("${kafka.consumer.group.id}")
private String groupId;
@Value("${kafka.consumer.auto.offset.reset}")
private String autoOffsetReset;
@Value("${kafka.consumer.concurrency}")
private int concurrency;
@Value("${kafka.listener.topicName}")
String topicName; // 为topicName
public KafkaConsumerUtil(){
}
@Override
public String toString() {
return "KafkaConsumerUtil{" +
"servers='" + servers + '\'' +
", enableAutoCommit=" + enableAutoCommit +
", sessionTimeout='" + sessionTimeout + '\'' +
", autoCommitInterval='" + autoCommitInterval + '\'' +
", groupId='" + groupId + '\'' +
", autoOffsetReset='" + autoOffsetReset + '\'' +
", concurrency=" + concurrency +
'}';
}
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers);
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout);
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
return propsMap;
}
//创建消息消费者
public KafkaConsumer<String,Object> getKafkaConsumer(){
return new KafkaConsumer<String, Object>(consumerConfigs());
}
//接收消息
public String getMessage(){
System.out.println(toString());
//指定Topic
KafkaConsumer<String,Object> consumer= getKafkaConsumer();
consumer.subscribe(Arrays.asList(topicName));
while (true){
ConsumerRecords<String,Object> consumerRecord = consumer.poll(100);
for(ConsumerRecord<String,Object> records:consumerRecord){
log.info("消息推送接收的值是:{}",records.value());
}
}
}
}