DFA敏感词算法

DFA(Deterministic Finite Automaton)即确定有限状态自动机,是一种用于字符串匹配的算法。它是一种有限状态机,在文本中搜索匹配的模式,常用于敏感词过滤、字符串搜索等应用场景。DFA算法的特点是高效且固定时间复杂度。

DFA算法的核心是构建一个有限状态自动机,该自动机由一个有限数量的状态和状态之间的转换构成。在字符串匹配过程中,算法会从字符串的开头开始,根据字符逐步转移到下一个状态,直到匹配完整个模式或者无法继续匹配。每次状态转换都是确定性的,即对于当前状态和输入字符,只有唯一的下一个状态。

下面是DFA算法的基本步骤:

  1. 构建敏感词字典树:将所有敏感词按字符顺序构建成一个字典树,也称为前缀树。每个节点代表一个字符,从根节点到叶子节点的路径表示一个敏感词。

  2. 构建状态转换表:从根节点开始,按照每个字符的转移情况,构建状态转换表。在状态转换表中,每个状态代表当前匹配到的字符串前缀,表格中的每一项表示在当前状态下,接受某个字符后转移到的下一个状态。

  3. 匹配过程:从文本的开头开始,逐个字符地读取文本,并根据状态转换表找到对应的下一个状态。如果找不到对应的状态,表示当前位置不是敏感词的开头,需要从下一个字符重新开始匹配。如果当前状态为敏感词的终止状态,说明匹配到一个敏感词,可以记录下来或进行相应的处理。

DFA算法相较于其他字符串匹配算法(如KMP和BM算法),它的时间复杂度是固定的,不会受到模式长度的影响。这使得它在处理大规模敏感词库时非常高效。然而,DFA算法的构建过程比较耗时,因为需要预先构建状态转换表和敏感词字典树,但这个过程通常在系统启动时进行,不会影响到实际的匹配性能。

总的来说,DFA算法是一种高效的字符串匹配算法,特别适用于敏感词过滤等场景。它通过构建状态转换表,实现了在固定时间复杂度下对文本进行快速匹配。

本文代码基于Spring Boot

Set<String> keyWordSet = sensitiveWordsService.readSensitiveWords();

这部分你可以根据自己实际情况,从数据库、配置中心或文本文件中读取敏感词。


import java.util.HashMap;
import java.util.Map;

/**
 * 字典树节点
 * @author ljh
 */
public class TrieNode {
    private Map<Character, TrieNode> children;
    private int isEnd;

    public TrieNode() {
        this.children = new HashMap<>();
        this.isEnd = 0;
    }

    public TrieNode getChild(char c) {
        return children.get(c);
    }

    public TrieNode addChild(char c, int isEnd) {
        TrieNode childNode = children.get(c);
        if (childNode == null) {
            childNode = new TrieNode();
            children.put(c, childNode);
        }
        if (isEnd > childNode.isEnd) {
            childNode.isEnd = isEnd;
        }
        return childNode;
    }

    public Integer getEnd() {
        return isEnd;
    }

    public void setEnd(Integer isEnd) {
        this.isEnd = isEnd;
    }

    public boolean isEnd() {
        return isEnd > 0;
    }

    public boolean isLastChar() {
        return isEnd == 2;
    }

    public Map<Character, TrieNode> getChildren() {
        return children;
    }
}

import game.box.message.service.service.ISensitiveWordsService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.event.ApplicationReadyEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.Map;
import java.util.Objects;
import java.util.Set;

/**
 * 敏感词初始化
 * 敏感词的初始化需要按照字符逐步构建敏感词的数据结构。当当前字符是非叶子节点并且敏感词没有到达最后一个字符时,将该字符的 isEnd 设置为0。
 * 当敏感词的某个字符是最后一个字符但不是最后的叶子节点时,将该字符的 isEnd 设置为1。
 * 敏感词的最后一个字符是最后的叶子节点,将该字符的 isEnd 设置为2。
 */
@Slf4j
@Component
public class SensitiveWordInit implements ApplicationListener<ApplicationReadyEvent> {
    /**
     * 敏感词库
     */
    public static TrieNode sensitiveWordTrie;
    /**
     * 最后一条数据的创建时间
     */
    private LocalDateTime lastCreatedAt = null;
    @Resource
    private ISensitiveWordsService sensitiveWordsService;

    @Override
    public void onApplicationEvent(ApplicationReadyEvent event) {
        long now = System.currentTimeMillis();
        initKeyWord();
        log.info("敏感词初始化完成,耗时:{}ms", System.currentTimeMillis() - now);
    }

    /**
     * 刷新敏感词库
     */
    public void refresh(boolean isForce) {
        if (isForce) {
            doRefresh();
            return;
        }
        LocalDateTime tempLastCreatedAt = sensitiveWordsService.getLastCreatedAt();
        if (Objects.isNull(tempLastCreatedAt) || Objects.isNull(lastCreatedAt) || tempLastCreatedAt.isAfter(lastCreatedAt)) {
            doRefresh();
        }
    }

    public void doRefresh() {
        try {
            log.info("开始刷新敏感词库");
            long now = System.currentTimeMillis();
            // 读取敏感词库
            Set<String> keyWordSet = sensitiveWordsService.readSensitiveWords();
            log.info("敏感词库数量:{}", keyWordSet.size());
            // 构建敏感词前缀树
            sensitiveWordTrie = buildSensitiveWordTrie(keyWordSet);
            log.info("敏感词刷新完成,耗时:{}ms", System.currentTimeMillis() - now);
        } catch (Exception e) {
            log.info(e.getMessage());
        }
    }


    /**
     * 初始化敏感词库
     */
    void initKeyWord() {
        try {
            // 读取敏感词库
            Set<String> keyWordSet = sensitiveWordsService.readSensitiveWords();
            log.info("敏感词库数量:{}", keyWordSet.size());
            // 构建敏感词前缀树
            sensitiveWordTrie = buildSensitiveWordTrie(keyWordSet);
            lastCreatedAt = sensitiveWordsService.getLastCreatedAt();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /**
     * 构建敏感词前缀树
     *
     * @param keyWordSet 敏感词库
     */
    private TrieNode buildSensitiveWordTrie(Set<String> keyWordSet) {
        TrieNode tempSensitiveWordTrie = new TrieNode();
        for (String word : keyWordSet) {
            TrieNode currentNode = tempSensitiveWordTrie;
            for (int i = 0; i < word.length(); i++) {
                char c = word.charAt(i);
                boolean isLastChar = (i == word.length() - 1);
                currentNode = currentNode.addChild(c, isLastChar ? 1 : 0);
            }
        }
        setLeafNodeEndFlag(tempSensitiveWordTrie.getChildren());
        return tempSensitiveWordTrie;
    }

    private void setLeafNodeEndFlag(Map<Character, TrieNode> children) {
        if (children == null) {
            return;
        }
        for (TrieNode childNode : children.values()) {
            setLeafNodeEndFlag(childNode.getChildren());
        }
        for (TrieNode childNode : children.values()) {
            if (childNode.isEnd() && childNode.getChildren().isEmpty()) {
                childNode.setEnd(2);
            }
        }
    }

}

import game.box.message.service.common.util.sensitive.SensitiveWordInit;
import game.box.message.service.common.util.sensitive.TrieNode;
import lombok.extern.slf4j.Slf4j;

import java.util.HashSet;
import java.util.Map;
import java.util.Set;

/**
 * 敏感词工具类
 *
 * @author ljh
 */
@Slf4j
public class SensitiveWordsUtil {
    public static final int MIN_MATCH_TYPE = 1;
    public static final int MAX_MATCH_TYPE = 2;

    public static Set<String> getSensitiveWord(String text, int matchType) {
        Set<String> sensitiveWords = new HashSet<>();
        int textLength = text.length();
        for (int i = 0; i < textLength; i++) {
            int length = matchSensitiveWordAtIndex(text, i, matchType);
            if (length > 0) {
                String sensitiveWord = text.substring(i, i + length);
                if (isCompleteSensitiveWord(sensitiveWord, matchType)) {
                    sensitiveWords.add(sensitiveWord);
                }
                i += length - 1;
            }
        }
        return sensitiveWords;
    }

    public static String replaceSensitiveWord(String text, int matchType, String replaceChar) {
        String result = text;
        Set<String> sensitiveWords = getSensitiveWord(text, matchType);
        for (String word : sensitiveWords) {
            String replaceString = getReplacementString(replaceChar, word.length());
            result = result.replaceAll(word, replaceString);
        }
        return result;
    }

    public static int checkSensitiveWord(String text, int matchType) {
        Set<String> sensitiveWords = getSensitiveWord(text, matchType);
        return sensitiveWords.size();
    }

    private static int matchSensitiveWordAtIndex(String text, int beginIndex, int matchType) {
        int matchFlag = 0;
        int textLength = text.length();
        TrieNode currentNode = SensitiveWordInit.sensitiveWordTrie;
        for (int i = beginIndex; i < textLength; i++) {
            char character = text.charAt(i);
            Map<Character, TrieNode> children = currentNode.getChildren();
            if (children != null && children.containsKey(character)) {
                matchFlag++;
                currentNode = children.get(character);
                if (currentNode.isEnd()) {
                    if (i < textLength - 1 && matchType == MAX_MATCH_TYPE) {
                        char nextChar = text.charAt(i + 1);
                        if (children.containsKey(nextChar)) {
                            // 如果下一个字符也是敏感词的一部分,则继续匹配
                            continue;
                        }
                    }
                    // 找到一个敏感词
                    break;
                }
            } else {
                // 不是敏感词
                break;
            }
        }
        if (matchFlag < 2 || (matchType == MAX_MATCH_TYPE && matchFlag < textLength - beginIndex) || !currentNode.isEnd()) {
            // 如果不满足敏感词的条件,则重置匹配标志
            matchFlag = 0;
        }
        return matchFlag;
    }

    private static boolean isCompleteSensitiveWord(String sensitiveWord, int matchType) {
        TrieNode currentNode = SensitiveWordInit.sensitiveWordTrie;
        int length = sensitiveWord.length();
        for (int i = 0; i < length; i++) {
            char character = sensitiveWord.charAt(i);
            Map<Character, TrieNode> children = currentNode.getChildren();
            if (children == null || !children.containsKey(character)) {
                // 如果字典树中缺少字符,则不是完整的敏感词
                return false;
            }
            currentNode = children.get(character);
            if (matchType == MAX_MATCH_TYPE && currentNode.isEnd()) {
                // 如果在字典树中被标记为结尾,则不是完整的敏感词
                return false;
            }
        }
        return true;
    }

    private static String getReplacementString(String replaceChar, int length) {
        StringBuilder replacement = new StringBuilder(replaceChar);
        for (int i = 1; i < length; i++) {
            replacement.append(replaceChar);
        }
        return replacement.toString();
    }

}

import game.box.message.client.api.SensitiveWordsClient;
import game.box.message.client.request.SensitiveWordsReq;
import game.box.message.service.common.util.sensitive.SensitiveWordInit;
import game.box.message.service.common.util.SensitiveWordsUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RestController;

import javax.annotation.Resource;
import java.util.Objects;
import java.util.Set;

/**
 * @Description 敏感词检测
 * @Date 2023/6/27 14:47
 * @Author ljh
 * @Version 1.0
 */
@Slf4j
@RestController
public class SensitiveWordsController implements SensitiveWordsClient {

    @Resource
    private SensitiveWordInit sensitiveWordInit;

    @Override
    public boolean sensitiveWordsCheck(SensitiveWordsReq req) {
        String words = StringUtils.trimAllWhitespace(req.getWords());
        int matchType = Objects.nonNull(req.getMatchType()) ? req.getMatchType() : SensitiveWordsUtil.MIN_MATCH_TYPE;
        return SensitiveWordsUtil.checkSensitiveWord(words, matchType) > 0;
    }

    @Override
    public Set<String> getSensitiveWords(SensitiveWordsReq req) {
        String words = StringUtils.trimAllWhitespace(req.getWords());
        return SensitiveWordsUtil.getSensitiveWord(words, SensitiveWordsUtil.MIN_MATCH_TYPE);
    }

    @Override
    public String replaceSensitiveWords(SensitiveWordsReq req) {
        int matchType = Objects.nonNull(req.getMatchType()) ? req.getMatchType() : SensitiveWordsUtil.MIN_MATCH_TYPE;
        String replaceChar = StringUtils.isEmpty(req.getReplaceChar()) ? "*" : req.getReplaceChar();
        String words = StringUtils.trimAllWhitespace(req.getWords());
        return SensitiveWordsUtil.replaceSensitiveWord(words, matchType, replaceChar);
    }

    @Override
    public synchronized void refreshSensiviveWords(boolean isForce) {
        sensitiveWordInit.refresh(isForce);
    }

}

  • 2
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值