最近我遇到了一个特殊的需求,写一个敏感词过滤功能。用于过滤一些不和谐,或者敏感的词汇。
最开始我的想法是利用flink+分词器实现这个功能。但是这有点小题大做了,后来想到了我在leetcode上遇到过这个问题。后来在网上收集了有限自动机的代码,我做了一点点微不足道的改动让它更符合代码规范。
唉~想当年劳资意气风发的时候也是打过ACM的,现在看看这些算法不禁老泪流了出来----浪花淘尽英雄啊(虽然我不是)!
想了解算法原理的朋友自行搜索有限自动机(DFA):
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* 敏感词库初始化
*
* @author AlanLee
*
*/
public class SensitiveWordInit
{
/**
* 敏感词库
*/
public HashMap sensitiveWordMap;
/**
* 初始化敏感词
*
* @return
*/
public Map initKeyWord(List<String> sensitiveWords)
{
try
{
// 从敏感词集合对象中取出敏感词并封装到Set集合中
Set<String> keyWordSet = new HashSet<String>();
for (String s : sensitiveWords)
{
keyWordSet.add(s.trim());
}
// 将敏感词库加入到HashMap中
addSensitiveWordToHashMap(keyWordSet);
}
catch (Exception e)
{
e.printStackTrace();
}
return sensitiveWordMap;
}
/**
* 封装敏感词库
*
* @param keyWordSet
*/
private void addSensitiveWordToHashMap(Set<String> keyWordSet)
{
// 初始化HashMap对象并控制容器的大小
sensitiveWordMap = new HashMap(keyWordSet.size());
// 敏感词
String key = null;
// 用来按照相应的格式保存敏感词库数据
Map nowMap = null;
// 用来辅助构建敏感词库
Map<String, String> newWorMap = null;
// 使用一个迭代器来循环敏感词集合
for (String s : keyWordSet) {
key = s;
// 等于敏感词库,HashMap对象在内存中占用的是同一个地址,所以此nowMap对象的变化,sensitiveWordMap对象也会跟着改变
nowMap = sensitiveWordMap;
for (int i = 0; i < key.length(); i++) {
// 截取敏感词当中的字,在敏感词库中字为HashMap对象的Key键值
char keyChar = key.charAt(i);
// 判断这个字是否存在于敏感词库中
if (nowMap.containsKey(keyChar)) {
nowMap = (Map) nowMap.get(keyChar);
} else {
newWorMap = new HashMap<String, String>();
newWorMap.put("isEnd", "0");
nowMap.put(keyChar, newWorMap);
nowMap = newWorMap;
}
// 如果该字是当前敏感词的最后一个字,则标识为结尾字
if (i == key.length() - 1) {
nowMap.put("isEnd", "1");
}
//System.out.println("封装敏感词库过程:"+sensitiveWordMap);
}
//System.out.println("查看敏感词库数据:" + sensitiveWordMap);
}
}
}
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
/**
* 敏感词过滤工具类
*
* @author AlanLee
*
*/
public class SensitiveWordEngine
{
/**
* 敏感词库
*/
public static Map sensitiveWordMap = null;
/**
* 只过滤最小敏感词
*/
public static int minMatchTYpe = 1;
/**
* 过滤所有敏感词
*/
public static int maxMatchType = 2;
/**
* 敏感词库敏感词数量
*
* @return
*/
public static int getWordSize()
{
if (SensitiveWordEngine.sensitiveWordMap == null)
{
return 0;
}
return SensitiveWordEngine.sensitiveWordMap.size();
}
/**
* 是否包含敏感词
*
* @param txt
* @param matchType
* @return
*/
public static boolean isContaintSensitiveWord(String txt, int matchType)
{
boolean flag = false;
for (int i = 0; i < txt.length(); i++)
{
int matchFlag = checkSensitiveWord(txt, i, matchType);
if (matchFlag > 0)
{
flag = true;
}
}
return flag;
}
/**
* 获取敏感词内容
*
* @param txt
* @param matchType
* @return 敏感词内容
*/
public static Set<String> getSensitiveWord(String txt, int matchType)
{
Set<String> sensitiveWordList = new HashSet<String>();
for (int i = 0; i < txt.length(); i++)
{
int length = checkSensitiveWord(txt, i, matchType);
if (length > 0)
{
// 将检测出的敏感词保存到集合中
sensitiveWordList.add(txt.substring(i, i + length));
i = i + length - 1;
}
}
return sensitiveWordList;
}
/**
* 替换敏感词
*
* @param txt
* @param matchType
* @param replaceChar
* @return
*/
public static String replaceSensitiveWord(String txt, int matchType, String replaceChar)
{
String resultTxt = txt;
Set<String> set = getSensitiveWord(txt, matchType);
Iterator<String> iterator = set.iterator();
String word = null;
String replaceString = null;
while (iterator.hasNext())
{
word = iterator.next();
replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
return resultTxt;
}
/**
* 替换敏感词内容
*
* @param replaceChar
* @param length
* @return
*/
private static String getReplaceChars(String replaceChar, int length)
{
String resultReplace = replaceChar;
for (int i = 1; i < length; i++)
{
resultReplace += replaceChar;
}
return resultReplace;
}
/**
* 检查敏感词数量
*
* @param txt
* @param beginIndex
* @param matchType
* @return
*/
public static int checkSensitiveWord(String txt, int beginIndex, int matchType)
{
boolean flag = false;
// 记录敏感词数量
int matchFlag = 0;
char word = 0;
Map nowMap = SensitiveWordEngine.sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++)
{
word = txt.charAt(i);
// 判断该字是否存在于敏感词库中
nowMap = (Map) nowMap.get(word);
if (nowMap != null)
{
matchFlag++;
// 判断是否是敏感词的结尾字,如果是结尾字则判断是否继续检测
if ("1".equals(nowMap.get("isEnd")))
{
flag = true;
// 判断过滤类型,如果是小过滤则跳出循环,否则继续循环
if (SensitiveWordEngine.minMatchTYpe == matchType)
{
break;
}
}
}
else
{
break;
}
}
if (!flag)
{
matchFlag = 0;
}
return matchFlag;
}
}
然后就可以用了
SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
List<String> sensitiveWords = new ArrayList<>();
sensitiveWords.add("日当午");
sensitiveWords.add("坐爱枫");
sensitiveWords.add("休.杰克曼");
// 传入SensitivewordEngine类中的敏感词库
SensitiveWordEngine.sensitiveWordMap = sensitiveWordInit.initKeyWord(sensitiveWords);
Set<String> set = SensitiveWordEngine.getSensitiveWord("锄禾日当午,坐爱枫林晚", 2);
for (String s: set) {
System.out.println(s);
}
溜了