一,消费者线程类
1,创建,消费线程构造函数
2,订阅消息
3,拉取消息
4,消费消息,执行具体业务逻辑
public class ConsumerTask implements Runnable {
private static Logger LOGGER = LoggerFactory.getLogger(ConsumerTask.class);
/**
* 每个线程维护KafkaConsumer实例
*/
private final KafkaConsumer<String, String> consumer;
private Gson gson ;
private OrderService orderService;
public ConsumerTask(String brokerList, String groupId, String topic) {
this.gson = SpringBeanFactory.getBean(Gson.class) ;
this.orderService = SpringBeanFactory.getBean(OrderService.class) ;
Properties props = new Properties();
props.put("bootstrap.servers", brokerList);
props.put("group.id", groupId);
//自动提交位移
props.put("enable.auto.commit", "true");
props.put("auto.commit.interval.ms", "1000");
props.put("session.timeout.ms", "30000");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
this.consumer = new KafkaConsumer<String, String>(props);
consumer.subscribe(Arrays.asList(topic));
}
@Override
public void run() {
boolean flag = true;
while (flag) {
// 使用200ms作为获取超时时间
ConsumerRecords<String, String> records = consumer.poll(200);
for (ConsumerRecord<String, String> record : records) {
// 简单地打印消息
LOGGER.info("==="+record.value() + " consumed " + record.partition() +
" message with offset: " + record.offset());
dealMessage(record.value()) ;
}
}
}
/**
* @param value
*/
private void dealMessage(String value) {
try {
Stock stock = gson.fromJson(value, Stock.class);
LOGGER.info("consumer stock={}",JSON.toJSONString(stock));
//创建订单
orderService.createOptimisticOrderUseRedisAndKafka(stock);
}catch (RejectedExecutionException e){
LOGGER.error("rejected message = " + value);
}catch (Exception e){
LOGGER.error("unknown exception",e);
}
}
二,消费组-线程池
1,创建线程池
2,正式创建消费者线程,并加入线程池
3,线程池执行线程消费任务
public class ConsumerGroup {
private static Logger LOGGER = LoggerFactory.getLogger(ConsumerGroup.class);
/**
* 线程池
*/
private ExecutorService threadPool;
private List<ConsumerTask> consumers ;
public ConsumerGroup(int threadNum, String groupId, String topic, String brokerList) {
LOGGER.info("kafka parameter={},{},{},{}",threadNum,groupId,topic,brokerList);
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("consumer-pool-%d").build();
threadPool = new ThreadPoolExecutor(threadNum, threadNum,
0L, TimeUnit.MILLISECONDS,
new LinkedBlockingQueue<Runnable>(1024), namedThreadFactory, new ThreadPoolExecutor.AbortPolicy());
consumers = new ArrayList<ConsumerTask>(threadNum);
for (int i = 0; i < threadNum; i++) {
ConsumerTask consumerThread = new ConsumerTask(brokerList, groupId, topic);
consumers.add(consumerThread);
}
}
/**
* 执行任务
*/
public void execute() {
for (ConsumerTask runnable : consumers) {
threadPool.submit(runnable) ;
}
}
}