第一步:通过传入敏感词构建敏感词库
/**
* 敏感词库初始化
*
* @author AlanLee
*
*/
public class SensitiveWordInit {
/**
* 敏感词库
*/
@SuppressWarnings("rawtypes")
public HashMap sensitiveWordMap;
/**
* 初始化敏感词
*
* @return
*/
@SuppressWarnings("rawtypes")
public Map initKeyWord(List<CommonDataEntity> sensitiveWords) {
try {
// 从敏感词集合对象中取出敏感词并封装到Set集合中
Set<String> keyWordSet = new HashSet<String>();
for (CommonDataEntity s : sensitiveWords) {
keyWordSet.add(s.getTitleCn().trim());
}
// 将敏感词库加入到HashMap中
addSensitiveWordToHashMap(keyWordSet);
} catch (Exception e) {
e.printStackTrace();
}
return sensitiveWordMap;
}
/**
* 封装敏感词库
*
* @param keyWordSet
*/
// 将得到的敏感词库用一个DFA算法模型放到map中
@SuppressWarnings({ "rawtypes", "unchecked" })
private void addSensitiveWordToHashMap(Set<String> keyWordSet) {
// 初始化HashMap对象并控制容器的大小
sensitiveWordMap = new HashMap(keyWordSet.size());
// 敏感词
String key = null;
// 用来按照相应的格式保存敏感词库数据
Map nowMap = null;
// 用来辅助构建敏感词库
Map<String, String> newWorMap = null;
// 使用一个迭代器来循环敏感词集合
Iterator<String> iterator = keyWordSet.iterator();
while (iterator.hasNext()) {
key = iterator.next();
// 等于敏感词库,HashMap对象在内存中占用的是同一个地址,所以此nowMap对象的变化,sensitiveWordMap对象也会跟着改变
nowMap = sensitiveWordMap;
for (int i = 0; i < key.length(); i++) {
// 截取敏感词当中的字,在敏感词库中字为HashMap对象的Key键值
char keyChar = key.charAt(i);
// 判断这个字是否存在于敏感词库中
Object wordMap = nowMap.get(keyChar);
if (wordMap != null) {
nowMap=(Map) wordMap;
} else {
newWorMap = new HashMap<String, String>();
newWorMap.put("isEnd", "0");
nowMap.put(keyChar, newWorMap);
nowMap = newWorMap;
}
// 如果该字是当前敏感词的最后一个字,则标识为结尾字
if (i == key.length() - 1) {
nowMap.put("isEnd", "1");
}
}
}
}
}
第二步 :筛选敏感词
public class SensitivewordFilter {
@SuppressWarnings("rawtypes")
public Map sensitiveWordMap = null;
public static int minMatchTYpe = 1; // 最小匹配规则
public static int maxMatchType = 2; // 最大匹配规则
public List<CommonDataEntity> list;
public SensitivewordFilter() {
}
/**
* 构造函数,初始化敏感词库
*/
public SensitivewordFilter(List<CommonDataEntity> list) {
this.list=list;
sensitiveWordMap = new SensitiveWordInit().initKeyWord(list);
}
/**
* 判断文字是否包含敏感字符
*
* @param txt
* 文字
* @param matchType
* 匹配规则 1:最小匹配规则,2:最大匹配规则
* @return 若包含返回true,否则返回false
* @version 1.0
*/
public boolean isContaintSensitiveWord(String txt, int matchType) {
boolean flag = false;
for (int i = 0; i < txt.length(); i++) {
int matchFlag = this.CheckSensitiveWord(txt, i, matchType); // 判断是否包含敏感字符
if (matchFlag > 0) { // 大于0存在,返回true
flag = true;
}
}
return flag;
}
/**
* 检查文字中是否包含敏感字符,检查规则如下:
*
* @param txt
* @param beginIndex
* @param matchType
* @return,如果存在,则返回敏感词字符的长度,不存在返回0
* @version 1.0
*/
@SuppressWarnings({ "rawtypes" })
public int CheckSensitiveWord(String txt, int beginIndex, int matchType) {
boolean flag = false; // 敏感词结束标识位:用于敏感词只有1位的情况
int matchFlag = 0; // 匹配标识数默认为0
char word = 0;
Map nowMap = sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++) {
word = txt.charAt(i);
nowMap = (Map) nowMap.get(word); // 获取指定key
if (nowMap != null) { // 存在,则判断是否为最后一个
matchFlag++; // 找到相应key,匹配标识+1
if ("1".equals(nowMap.get("isEnd"))) { // 如果为最后一个匹配规则,结束循环,返回匹配标识数
flag = true; // 结束标志位为true
if (SensitivewordFilter.minMatchTYpe == matchType) { // 最小规则,直接返回,最大规则还需继续查找
break;
}
}
} else { // 不存在,直接返回
break;
}
}
if (matchFlag < 2 || !flag) { // 长度必须大于等于1,为词
matchFlag = 0;
}
return matchFlag;
}
/**
* 获取文字中的敏感词
*
* @param matchType
* 匹配规则 1:最小匹配规则,2:最大匹配规则
* @return
* @version 1.0
*/
public List<CommonDataEntity> getSensitiveWord(String txt, int matchType) {
List<CommonDataEntity> sensitiveWordList = new ArrayList<CommonDataEntity>();
for (int i = 0; i < txt.length(); i++) {
int length = CheckSensitiveWord(txt, i, matchType); // 判断是否包含敏感字符
if (length > 0) { // 存在,加入list中
for (CommonDataEntity commonDataEntity : list) {
if(commonDataEntity.getTitleCn().equals(txt.substring(i, i + length))) {
sensitiveWordList.add(commonDataEntity);
}
}
// sensitiveWordList.add(txt.substring(i, i + length));
i = i + length - 1; // 减1的原因,是因为for会自增
}
}
return sensitiveWordList;
}
}
第三步 : 查询敏感词数据
/**
* 过滤敏感词服务
*
* @param condition
* @return
* @throws Exception
*/
@RequestMapping(value="/sensitive",method=RequestMethod.POST)
public ReturnResult filterSensitiveWord( @RequestBody LemmasSearchModel condition) throws Exception {
// 查询敏感词内容
CommonDataEntity commonDataEntity = new CommonDataEntity();
commonDataEntity.setType(5);
List<CommonDataEntity> list=commonDataService.list(commonDataEntity);
// 去除忽略敏感词
if(condition.getSesitiveWord()!=null) {
for (int i = 0; i < condition.getSesitiveWord().length; i++) {
for (int j = 0; j < list.size(); j++) {
if(list.get(j).getTitleCn().equals(condition.getSesitiveWord()[i])) {
list.remove(j);
}
}
}
}
SensitivewordFilter s =new SensitivewordFilter(list);
// 过滤所有敏感词
List<CommonDataEntity>sensitiveWordList = s.getSensitiveWord(condition.getK(), 2);
return new ReturnResult(sensitiveWordList);
}