关键词过滤

13 篇文章 0 订阅

关键词库创建思路:采用二叉树


实现代码:

1.关键词库初始化:

package com.slince.test2;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

/**
 * @Description: 初始化敏感词库,将敏感词加入到HashMap中,构建DFA算法模型
 * @Projecttest
 * @Author : chenming
 * @Date 2014420日 下午2:27:06
 * @version 1.0
 */
public class SensitiveWordInit {
   private String ENCODING = "GBK";    //字符编码
   @SuppressWarnings("rawtypes")
   public HashMap sensitiveWordMap;

   public SensitiveWordInit(){
      super();
   }

   /**
    * @author chenming
    * @date 2014420日 下午2:28:32
    * @version 1.0
    */
   @SuppressWarnings("rawtypes")
   public Map initKeyWord(){
      try {
         //读取敏感词库
         Set<String> keyWordSet = readSensitiveWordFile();
         //将敏感词库加入到HashMap         addSensitiveWordToHashMap(keyWordSet);
         //spring获取application,然后application.setAttribute("sensitiveWordMap",sensitiveWordMap);
      } catch (Exception e) {
         e.printStackTrace();
      }
      return sensitiveWordMap;
   }

   /**
    * 读取敏感词库,将敏感词放入HashSet中,构建一个DFA算法模型:<br>
    * = {
    *      isEnd = 0
    *      = {<br>
    *          isEnd = 1
    *           = {isEnd = 0
    *                = {isEnd = 1}
    *                }
    *           = {
    *                isEnd = 0
    *                = {
    *                    isEnd = 1
    *                   }
    *             }
    *           }
    *      }
    *  = {
    *      isEnd = 0
    *      = {
    *         isEnd = 0
    *         = {
    *              isEnd = 0
    *              = {
    *                   isEnd = 1
    *                  }
    *              }
    *         }
    *      }
    * @author chenming
    * @date 2014420日 下午3:04:20
    * @param keyWordSet  敏感词库
    * @version 1.0
    */
   @SuppressWarnings({ "rawtypes", "unchecked" })
   private void addSensitiveWordToHashMap(Set<String> keyWordSet) {
      sensitiveWordMap = new HashMap(keyWordSet.size());     //初始化敏感词容器,减少扩容操作
      String key = null;
      Map nowMap = null;
      Map<String, String> newWorMap = null;
      //迭代keyWordSet
      Iterator<String> iterator = keyWordSet.iterator();
      while(iterator.hasNext()){
         key = iterator.next();    //关键字
         nowMap = sensitiveWordMap;
         for(int i = 0 ; i < key.length() ; i++){
            char keyChar = key.charAt(i);       //转换成char            Object wordMap = nowMap.get(keyChar);       //获取

            if(wordMap != null){        //如果存在该key,直接赋值
               nowMap = (Map) wordMap;
            }
            else{     //不存在则,则构建一个map,同时将isEnd设置为0,因为他不是最后一个
               newWorMap = new HashMap<String,String>();
               newWorMap.put("isEnd", "0");     //不是最后一个
               nowMap.put(keyChar, newWorMap);
               nowMap = newWorMap;
            }

            if(i == key.length() - 1){
               nowMap.put("isEnd", "1");    //最后一个
            }
         }
      }
   }

   /**
    * 读取敏感词库中的内容,将内容添加到set集合中
    * @author chenming
    * @date 2014420日 下午2:31:18
    * @return
    * @version 1.0
    * @throws Exception
    */
   @SuppressWarnings("resource")
   private Set<String> readSensitiveWordFile() throws Exception{
      Set<String> set = null;

      File file = new File("D:\\SensitiveWord.txt");    //读取文件
      InputStreamReader read = new InputStreamReader(new FileInputStream(file),ENCODING);
      try {
         if(file.isFile() && file.exists()){      //文件流是否存在
            set = new HashSet<String>();
            BufferedReader bufferedReader = new BufferedReader(read);
            String txt = null;
            while((txt = bufferedReader.readLine()) != null){    //读取文件,将文件内容放入到set               set.add(txt);
            }
         }
         else{         //不存在抛出异常信息
            throw new Exception("敏感词库文件不存在");
         }
      } catch (Exception e) {
         throw e;
      }finally{
         read.close();     //关闭文件流
      }
      return set;
   }
}

2.关键词检索和测试:

package com.slince.test2;

import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;

/**
 * @Description: 敏感词过滤
 * @Projecttest
 * @Author : chenming
 * @Date 2014420日 下午4:17:15
 * @version 1.0
 */
public class SensitivewordFilter {
   @SuppressWarnings("rawtypes")
   private Map sensitiveWordMap = null;
   public static int minMatchTYpe = 1;      //最小匹配规则
   public static int maxMatchType = 2;      //最大匹配规则

   /**
    * 构造函数,初始化敏感词库
    */
   public SensitivewordFilter(){
      sensitiveWordMap = new SensitiveWordInit().initKeyWord();
   }

   /**
    * 判断文字是否包含敏感字符
    * @author chenming
    * @date 2014420日 下午4:28:30
    * @param txt  文字
    * @param matchType  匹配规则&nbsp;1:最小匹配规则,2:最大匹配规则
    * @return 若包含返回true,否则返回false
    * @version 1.0
    */
   public boolean isContaintSensitiveWord(String txt,int matchType){
      boolean flag = false;
      for(int i = 0 ; i < txt.length() ; i++){
         int matchFlag = this.CheckSensitiveWord(txt, i, matchType); //判断是否包含敏感字符
         if(matchFlag > 0){    //大于0存在,返回true
            flag = true;
         }
      }
      return flag;
   }

   /**
    * 获取文字中的敏感词
    * @author chenming
    * @date 2014420日 下午5:10:52
    * @param txt 文字
    * @param matchType 匹配规则&nbsp;1:最小匹配规则,2:最大匹配规则
    * @return
    * @version 1.0
    */
   public Set<String> getSensitiveWord(String txt , int matchType){
      Set<String> sensitiveWordList = new HashSet<String>();

      for(int i = 0 ; i < txt.length() ; i++){
         int length = CheckSensitiveWord(txt, i, matchType);    //判断是否包含敏感字符
         if(length > 0){    //存在,加入list            sensitiveWordList.add(txt.substring(i, i+length));
            i = i + length - 1;    //1的原因,是因为for会自增
         }
      }

      return sensitiveWordList;
   }

   /**
    * 替换敏感字字符
    * @author chenming
    * @date 2014420日 下午5:12:07
    * @param txt
    * @param matchType
    * @param replaceChar 替换字符,默认*
    * @version 1.0
    */
   public String replaceSensitiveWord(String txt,int matchType,String replaceChar){
      String resultTxt = txt;
      Set<String> set = getSensitiveWord(txt, matchType);     //获取所有的敏感词
      Iterator<String> iterator = set.iterator();
      String word = null;
      String replaceString = null;
      while (iterator.hasNext()) {
         word = iterator.next();
         replaceString = getReplaceChars(replaceChar, word.length());
         resultTxt = resultTxt.replaceAll(word, replaceString);
      }

      return resultTxt;
   }

   /**
    * 获取替换字符串
    * @author chenming
    * @date 2014420日 下午5:21:19
    * @param replaceChar
    * @param length
    * @return
    * @version 1.0
    */
   private String getReplaceChars(String replaceChar,int length){
      String resultReplace = replaceChar;
      for(int i = 1 ; i < length ; i++){
         resultReplace += replaceChar;
      }

      return resultReplace;
   }

   /**
    * 检查文字中是否包含敏感字符,检查规则如下:<br>
    * @author chenming
    * @date 2014420日 下午4:31:03
    * @param txt
    * @param beginIndex
    * @param matchType
    * @return,如果存在,则返回敏感词字符的长度,不存在返回0
    * @version 1.0
    */
   @SuppressWarnings({ "rawtypes"})
   public int CheckSensitiveWord(String txt,int beginIndex,int matchType){
      boolean  flag = false;    //敏感词结束标识位:用于敏感词只有1位的情况
      int matchFlag = 0;     //匹配标识数默认为0
      char word = 0;
      Map nowMap = sensitiveWordMap;
      for(int i = beginIndex; i < txt.length() ; i++){
         word = txt.charAt(i);
         nowMap = (Map) nowMap.get(word);     //获取指定key
         if(nowMap != null){     //存在,则判断是否为最后一个
            matchFlag++;     //找到相应key,匹配标识+1 
            if("1".equals(nowMap.get("isEnd"))){       //如果为最后一个匹配规则,结束循环,返回匹配标识数
               flag = true;       //结束标志位为true   
               if(SensitivewordFilter.minMatchTYpe == matchType){    //最小规则,直接返回,最大规则还需继续查找
                  break;
               }
            }
         }
         else{     //不存在,直接返回
            break;
         }
      }
      if(matchFlag < 2 || !flag){        //长度必须大于等于1,为词 
         matchFlag = 0;
      }
      return matchFlag;
   }

   public static void main(String[] args) {
      SensitivewordFilter filter = new SensitivewordFilter();
      System.out.println("敏感词的数量:" + filter.sensitiveWordMap.size());
      String string = "老虎机,游戏机,假币,毛泽东";
      System.out.println("待检测语句字数:" + string.length());
      long beginTime = System.currentTimeMillis();
      Set<String> set = filter.getSensitiveWord(string, 1);
      long endTime = System.currentTimeMillis();
      System.out.println("语句中包含敏感词的个数为:" + set.size() + "。包含:" + set);
      System.out.println("总共消耗时间为:" + (endTime - beginTime));
   }
}
3.加载的关键词库的格式:简单文本格式(一行代表一个关键词)

  • 0
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值