高并发消费kafka数据存储到mysql

步骤:消费kafak->redis缓存数据->批量插入mysql

package com.navinfo.opentsp.yiqi.kafka.service.impl;

import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import com.navinfo.opentsp.yiqi.kafka.mapper.FillingOilMapper;
import com.navinfo.opentsp.yiqi.kafka.pojo.FillingOilPojo;
import com.navinfo.opentsp.yiqi.kafka.service.IFillingOilService;
import com.navinfo.opentsp.yiqi.kafka.util.Cluster;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.math.BigDecimal;
import java.util.*;
import java.util.stream.Collectors;


/**
 * @description: 加油事件
 * @author: mengxiaoming
 * @create: 2020-12-09
 **/
@Service
@Slf4j
public class FillingOilServiceImpl implements IFillingOilService {

    private final static String REDIS_KEY_FILLINGOIL = "fillingOil";

    private final static String REDIS_KEY_FILLINGOIL_LOCK = "fillingOilLock";

    private final static String TABLENAME_FILLINGOIL = "oil_event";



    @Autowired
    private RedisTemplate<String, String> redisTemplate;

    @Autowired
    private FillingOilMapper fillingOilMapper;

    @Value("${fillingOil.redisBatchSize:500}")
    private long redisBatchSize;

    @Value("${fillingOil.mysqlBatchSize:1000}")
    private int mysqlBatchSize;

    @Autowired
    private Cluster cluster;

    @Override
    public void handleData(String message, String messageType) {
        log.info("handleData start!");
        FillingOilPojo pojo = JSON.parseObject(message, FillingOilPojo.class);
        //将数据添加到redis中
        pojo.setDiffOilLevel(this.sub(pojo.getEndOilLevel(), pojo.getStartOilLevel()));
        String elemKey = pojo.getTid()+"_"+pojo.getEventType()+"_"+pojo.getStartGpsTime();
        for(int i=0;i<3;i++){
            try {
                log.info("尝试次数:{}", i+1);
                redisTemplate.opsForHash().put(REDIS_KEY_FILLINGOIL, elemKey, JSON.toJSONString(pojo));
                log.info("hash put:{}", JSON.toJSONString(pojo));
                break;
            }catch (Exception e) {
                try {
                    Thread.sleep(100L);
                    continue;
                } catch (InterruptedException e1) {
                    log.error(e.getMessage(), e);
                }
                log.error(e.getMessage(), e);
            }
        }

        //redis数据size
        long redisListSize = redisTemplate.opsForHash().size(REDIS_KEY_FILLINGOIL);
        log.info("key:{} totalSize:{}", REDIS_KEY_FILLINGOIL, redisListSize);

        //超过redis存储限定值
        if(redisListSize >= this.redisBatchSize){
            log.info("执行落库!");
            if (this.cluster.lock(REDIS_KEY_FILLINGOIL_LOCK)) {
                log.info("获取redis锁!");
                try {
                    Map<Object, Object> oilMap = redisTemplate.opsForHash().entries(REDIS_KEY_FILLINGOIL);
                    if(oilMap.size() < this.redisBatchSize){
                        log.info("并发太高,redis没锁住,返回");
                        return;
                    }
                    log.info("entries redis总条数:{}!", oilMap.size());
                    long delCount = redisTemplate.opsForHash().delete(REDIS_KEY_FILLINGOIL, oilMap.keySet().toArray());
                    log.info("delete redis删除总条数:{}!", delCount);
                    List<String> oilList = oilMap.values().stream().map(e -> String.valueOf(e)).collect(Collectors.toList());
                    log.info("oilList size:{} data:{}", oilList.size(), oilList);
                    if (CollectionUtils.isNotEmpty(oilList)) {
                        List<FillingOilPojo> fillingOilList = JSON.parseArray(oilList.toString(), FillingOilPojo.class);
                        List<List<FillingOilPojo>> fillingOilListList = Lists.partition(fillingOilList, this.mysqlBatchSize);
                        for(List<FillingOilPojo> fillingOilListItem : fillingOilListList){
                            log.info("执行落库start:{}!", fillingOilListItem.size());
                            long sTime = System.currentTimeMillis();
                            fillingOilMapper.batchInsert(fillingOilListItem, TABLENAME_FILLINGOIL);
                            log.info("执行落库end,耗时:{}", System.currentTimeMillis() - sTime);
                        }
                    }
                }catch (Exception e){
                    log.error(e.getMessage(), e);
                }finally {
                    log.info("释放redis锁!");
                    this.cluster.unlock(REDIS_KEY_FILLINGOIL_LOCK);
                }
            }
        }
        log.info("handleData end!");
    }



    /**
     * 减法
     * @param v1
     * @param v2
     * @return
     */
    private double sub(double v1, double v2) {
        BigDecimal b1 = new BigDecimal(Double.toString(v1));
        BigDecimal b2 = new BigDecimal(Double.toString(v2));
        return b1.subtract(b2).doubleValue();
    }
}
package com.navinfo.opentsp.yiqi.kafka.util;

import com.navinfo.opentsp.yiqi.common.util.DateUtils;
import com.navinfo.opentsp.yiqi.common.util.StringUtil;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.core.RedisCallback;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Service;
import java.util.Objects;


@Service
public class Cluster {
    private final static Logger LOGGER = org.slf4j.LoggerFactory.getLogger(Cluster.class);
    @Autowired
    private RedisTemplate redisTemplate;

    private final long LOCK_EXPIRE = 90000;//默认锁自动过期时长
    private final int DEFAULT_RETRY = 3;//重试次数
    private final String LOCKED = "LOCK";
    private final String LOCK_PREFIX = "_CLUSTER_KEY_";



    public boolean lock(String lock){
        for(int i = 0 ; i < DEFAULT_RETRY ; i ++){
            if(this.redisLock(lock , LOCK_EXPIRE)){
                return true;
            }
            try {
                Thread.sleep(100);
            } catch (InterruptedException e) {
                return false;
            }
        }
        return false;
    }

    /**
     *锁
     *
     * @param key key值
     * @return 是否获取到
     */
    private boolean redisLock(String key , long lockExpire){
        String lock = LOCK_PREFIX + key;
        // 利用lambda表达式
        return (Boolean) redisTemplate.execute((RedisCallback) connection -> {

            long expireAt = System.currentTimeMillis() + lockExpire + 1;
            Boolean acquire = connection.setNX(lock.getBytes(), String.valueOf(expireAt).getBytes());


            if (acquire) {
                connection.pExpire(lock.getBytes() , LOCK_EXPIRE);
                return true;
            } else {
                byte[] value = connection.get(lock.getBytes());
                if (Objects.nonNull(value) && value.length > 0) {
                    long expireTime = Long.parseLong(new String(value));
                    if (expireTime < System.currentTimeMillis()) {
                        // 如果锁已经过期
                        byte[] oldValue = connection.getSet(lock.getBytes(), String.valueOf(System.currentTimeMillis() + lockExpire + 1).getBytes());
                        // 防止死锁
                        return Long.parseLong(new String(oldValue)) < System.currentTimeMillis();
                    }
                }
            }
            return false;
        });
    }



    /**
     * 删除锁
     *
     * @param key
     */
    public void unlock(String key) {
        String lock = LOCK_PREFIX + key;
        RedisConnection redisConnection = redisTemplate.getConnectionFactory().getConnection();
        if(redisConnection != null){
            redisConnection.del(lock.getBytes());
            redisConnection.close();
        }
    }


    private int timeoutMsecs = 1000;
    private int expireMsecs = 10000;

    /**
     * 锁
     * @param lockKey
     * @return
     * @throws InterruptedException
     */
    public synchronized boolean acquire(String lockKey) throws InterruptedException {
        int timeout = timeoutMsecs;
        RedisConnection redisConnection = this.redisTemplate.getConnectionFactory().getConnection();
        boolean lockStatus = false;
        while (timeout >= 0) {
            long expires = System.currentTimeMillis() + expireMsecs + 1;
            String expiresStr = String.valueOf(expires); //锁到期时间
            LOGGER.info("锁过期时间 {}", DateUtils.formatDate(expires));
            if (redisConnection.setNX(lockKey.getBytes(), expiresStr.getBytes())) {
                lockStatus = true;
                break;
            }

            String currentValueStr = new String(redisConnection.get(lockKey.getBytes())); //redis里的时间
            LOGGER.info("redis里的时间时间 {} 系统时间 {}", expires ,  System.currentTimeMillis());
            if (currentValueStr != null && Long.parseLong(currentValueStr) < System.currentTimeMillis()) {
                //判断是否为空,不为空的情况下,如果被其他线程设置了值,则第二个条件判断是过不去的
                String oldValueStr = new String(redisConnection.getSet(lockKey.getBytes(), expiresStr.getBytes()));
                LOGGER.info("oldValueStr {}", DateUtils.formatDate(expires));
                //获取上一个锁到期时间,并设置现在的锁到期时间,
                //只有一个线程才能获取上一个线上的设置时间,因为jedis.getSet是同步的
                if (StringUtil.isNotEmpty(oldValueStr) && oldValueStr.equals(currentValueStr)) {
                    //如过这个时候,多个线程恰好都到了这里,但是只有一个线程的设置值和当前值相同,他才有权利获取锁
                    lockStatus = true;
                    break;
                }
            }
            timeout -= 100;
            Thread.sleep(100);
        }
        if(redisConnection != null){
            redisConnection.close();
        }
        return lockStatus;
    }
}

 

  • 2
    点赞
  • 5
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值