上一期写了篇博客:springboot集成kafka,再测高并发的时候入库操作有些问题
这篇用springboot集成的kafka进行操作,验证了2000个并发没发现任何问题
1、添加配置文件:
spring:
kafka:
bootstrap-servers: 127.0.0.1:9092
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
consumer:
group-id: test
enable-auto-commit: true
auto-commit-interval: 1000
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
2、Producer生产者直接调用send函数即可:
import io.renren.common.utils.R;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* author jinsq
*
* @date 2019/5/22 10:59
*/
@RestController
@RequestMapping("test")
public class KafkaTestController {
@Autowired
private KafkaTemplate<String,String> kafkaTemplate;
@RequestMapping(value = "/producer")
public R consume(@RequestBody String body) throws IOException {
kafkaTemplate.send("result",body);
return R.ok();
}
}
3、消费者代码:
/**
* author jinsq
*
* @date 2019/5/22 17:06
*/
import io.renren.modules.sys.entity.SysConfigEntity;
import io.renren.modules.sys.service.SysConfigService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.io.IOException;
/**
* kafka监听
* @author shangzz
*
*/
@Component
@Slf4j
public class RawDataListener {
@Autowired
private SysConfigService sysConfigService;
/**
* 实时获取kafka数据(生产一条,监听生产topic自动消费一条)
* @param record
* @throws IOException
*/
@KafkaListener(topics = "result")
public void listen(ConsumerRecord<?, ?> record) throws IOException {
String value = (String) record.value();
String topic = record.topic();
if("result".equals(topic)){
log.info("接收到的信息为:"+value);
SysConfigEntity sysConfigEntity = new SysConfigEntity();
sysConfigEntity.setParamKey(value);
sysConfigEntity.setParamValue(topic);
sysConfigEntity.setRemark("测试数据");
sysConfigService.save(sysConfigEntity);
}else{
log.info("其他信息!!!!!!!");
}
}
}
这里我实现了写数据库的操作,后面模拟2000个并发操作,看数据库是否会报异常信息
4、模拟高并发操作:
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.concurrent.CountDownLatch;
/**
* author jinsq
*
* @date 2019/5/22 17:27
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class CountDownLatchTest {
@Autowired
private KafkaTemplate kafkaTemplate;
//模拟短时间内的并发请求量
private static final int threadNum =2000;
//倒计时器,用于模拟高并发
private CountDownLatch cdl = new CountDownLatch(threadNum);
private static int i = 0;
@Test
public void test(){
for(int i =1;i<=threadNum;i++){
MyThread myThread = new MyThread(cdl);
Thread thread = new Thread(myThread);
thread.start();
}
try {
cdl.await();
}catch (Exception e){
e.printStackTrace();
}
}
class MyThread implements Runnable{
private CountDownLatch countDownLatch;
public MyThread(CountDownLatch countDownLatch){
this.countDownLatch = countDownLatch;
}
@Override
public void run(){
kafkaTemplate.send("result",(i++)+"");
countDownLatch.countDown();
}
}
}
上面的代码采用CountDownLatch进行模拟2000个并发操作
5、最终写数据库如下所示,没有任何问题: