温馨提示×

温馨提示×

您好,登录后才能下订单哦!

密码登录×
登录注册×
其他方式登录
点击 登录注册 即表示同意《亿速云用户服务条款》

基于java实现DFA算法代码实例

发布时间:2020-09-28 21:46:46 来源:脚本之家 阅读:243 作者:NopSmile 栏目:开发技术

DFA简介

DFA全称为:Deterministic Finite Automaton,即确定有穷自动机。(自己百度吧)

直接代码:

敏感词实体类

package com.nopsmile.dfa;

public class Keywords {
  private String pid;
  private String Content;

  public Keywords() {

  }

  public Keywords(String content) {
    super();
    Content = content;
  }

  public String getContent() {
    return Content;
  }

  public void setContent(String content) {
    Content = content;
  }

  public String getPid() {
    return pid;
  }

  public void setPid(String pid) {
    this.pid = pid;
  }

}

敏感词库初始化

package com.nopsmile.dfa;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * 敏感词库初始化
 * 
 */
public class SensitiveWordInit{

  /**
   * 敏感词库
   */
  public HashMap sensitiveWordMap;

  /**
   * 初始化敏感词 keywords
   */
  public Map initKeyWord(List<Keywords> sensitiveWords) {
    try {
      // 从敏感词集合对象中取出敏感词并封装到Set集合中
      Set<String> keyWordSet = new HashSet<String>();
      for (Keywords s : sensitiveWords) {
        keyWordSet.add(s.getContent().trim());
      }
      // 将敏感词库加入到HashMap中
      addSensitiveWordToHashMap(keyWordSet);
    } catch (Exception e) {
      e.printStackTrace();
    }
    return sensitiveWordMap;
  }

  /**
   * 封装敏感词库
   */
  private void addSensitiveWordToHashMap(Set<String> keyWordSet) {
    // 初始化HashMap对象并控制容器的大小
    sensitiveWordMap = new HashMap(keyWordSet.size());
    // 敏感词
    String key = null;
    // 用来按照相应的格式保存敏感词库数据
    Map nowMap = null;
    // 用来辅助构建敏感词库
    Map<String, String> newWorMap = null;
    // 使用一个迭代器来循环敏感词集合
    Iterator<String> iterator = keyWordSet.iterator();
    while (iterator.hasNext()) {
      key = iterator.next();
      // 等于敏感词库,HashMap对象在内存中占用的是同一个地址,所以此nowMap对象的变化,sensitiveWordMap对象也会跟着改变
      nowMap = sensitiveWordMap;
      for (int i = 0; i < key.length(); i++) {
        // 截取敏感词当中的字,在敏感词库中字为HashMap对象的Key键值
        char keyChar = key.charAt(i);

        // 判断这个字是否存在于敏感词库中
        Object wordMap = nowMap.get(keyChar);
        if (wordMap != null) {
          nowMap = (Map) wordMap;
        } else {
          newWorMap = new HashMap<String, String>();
          newWorMap.put("isEnd", "0");
          nowMap.put(keyChar, newWorMap);
          nowMap = newWorMap;
        }

        // 如果该字是当前敏感词的最后一个字,则标识为结尾字
        if (i == key.length() - 1) {
          nowMap.put("isEnd", "1");
        }
      }
    }
  }
}

自定义的工具类

package com.nopsmile.dfa;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;

import com.alibaba.fastjson.JSONArray;

import net.sf.json.JSONObject;

/**
 * 敏感词过滤工具类
 * 
 * @author AlanLee
 *
 */
public class SensitivewordUtils {
  /**
   * 敏感词库
   */
  public static Map sensitiveWordMap = null;

  /**
   * 只过滤最小敏感词
   */
  public static int minMatchTYpe = 1;

  /**
   * 过滤所有敏感词
   */
  public static int maxMatchType = 2;

  /**
   * 敏感词库敏感词数量
   *
   * @return
   */
  public static int getWordSize() {
    if (SensitivewordUtils.sensitiveWordMap == null) {
      return 0;
    }
    return SensitivewordUtils.sensitiveWordMap.size();
  }

  /**
   * 是否包含敏感词
   *
   */
  public static boolean isContaintSensitiveWord(String txt, int matchType) {
    boolean flag = false;
    for (int i = 0; i < txt.length(); i++) {
      int matchFlag = checkSensitiveWord(txt, i, matchType);
      if (matchFlag > 0) {
        flag = true;
      }
    }
    return flag;
  }

  /**
   * 获取敏感词内容
   *
   * @param txt
   * @param matchType
   * @return 敏感词内容
   */
  public static Set<String> getSensitiveWord(String txt, int matchType) {
    Set<String> sensitiveWordList = new HashSet<String>();

    for (int i = 0; i < txt.length(); i++) {
      int length = checkSensitiveWord(txt, i, matchType);
      if (length > 0) {
        // 将检测出的敏感词保存到集合中
        sensitiveWordList.add(txt.substring(i, i + length));
        i = i + length - 1;
      }
    }

    return sensitiveWordList;
  }

  /**
   * 替换敏感词
   *
   */
  public static String replaceSensitiveWord(String txt, int matchType, String replaceChar) {
    String resultTxt = txt;
    Set<String> set = getSensitiveWord(txt, matchType);
    Iterator<String> iterator = set.iterator();
    String word = null;
    String replaceString = null;
    while (iterator.hasNext()) {
      word = iterator.next();
      replaceString = getReplaceChars(replaceChar, word.length());
      resultTxt = resultTxt.replaceAll(word, replaceString);
    }

    return resultTxt;
  }

  /**
   * 替换敏感词内容
   *
   */
  private static String getReplaceChars(String replaceChar, int length) {
    String resultReplace = replaceChar;
    for (int i = 1; i < length; i++) {
      resultReplace += replaceChar;
    }

    return resultReplace;
  }

  /**
   * 检查敏感词数量
   *
   */
  public static int checkSensitiveWord(String txt, int beginIndex, int matchType) {
    boolean flag = false;
    // 记录敏感词数量
    int matchFlag = 0;
    char word = 0;
    Map nowMap = SensitivewordUtils.sensitiveWordMap;
    for (int i = beginIndex; i < txt.length(); i++) {
      word = txt.charAt(i);
      // 判断该字是否存在于敏感词库中
      nowMap = (Map) nowMap.get(word);
      if (nowMap != null) {
        matchFlag++;
        // 判断是否是敏感词的结尾字,如果是结尾字则判断是否继续检测
        if ("1".equals(nowMap.get("isEnd"))) {
          flag = true;
          // 判断过滤类型,如果是小过滤则跳出循环,否则继续循环
          if (SensitivewordUtils.minMatchTYpe == matchType) {
            break;
          }
        }
      } else {
        break;
      }
    }
    if (!flag) {
      matchFlag = 0;
    }
    return matchFlag;
  }

  /**
   * 敏感词汇对应个数
   * 返回 "关键字"="关键字个数"
   *
   */
  public static Map getSensitiveWordSum(String txt, int matchType) {
    Map<String,Integer> map = new HashMap<String,Integer>();
    for (int i = 0; i < txt.length(); i++) {
      int length = checkSensitiveWord(txt, i, matchType);
      if (length > 0) {
        // 将检测出的敏感词保存到集合中
        String str=txt.substring(i, i + length);
        if(map.containsKey(str)) {
          map.put(str, map.get(str).intValue()+1);
        }else {
          map.put(str, new Integer(1));
        }
        //System.out.println(txt.substring(i, i + length));
        i = i + length - 1;
      }
    }
    return map;
  }

  /**
   * 对map数组value排序,并取前10
   * this method will always sort the map;
   * isCondition is true condition can be used otherwise invalid
   * @param unsortMap
   * @return
   */
  public static Map<String, Integer> sortByValue(Map<String, Integer> unsortMap,int condition,boolean isCondition) {

    // 1. Convert Map to List of Map
    List<Map.Entry<String, Integer>> list =
        new LinkedList<Map.Entry<String, Integer>>(unsortMap.entrySet());

    // 2. Sort list with Collections.sort(), provide a custom Comparator
    //  Try switch the o1 o2 position for a different order
    Collections.sort(list, new Comparator<Map.Entry<String, Integer>>() {
      public int compare(Map.Entry<String, Integer> o1,
                Map.Entry<String, Integer> o2) {
        return (o2.getValue()).compareTo(o1.getValue());
      }
    });

    // 3. Loop the sorted list and put it into a new insertion order Map LinkedHashMap
    Map<String, Integer> sortedMap = new LinkedHashMap<String, Integer>();
    if(isCondition) {
      for (int i = 0; i < list.size(); i++) {
        if (i < condition) {
          sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
        }
      }
    }else{
      for (int i = 0; i < list.size(); i++) {
          sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
      }
    }
    return sortedMap;
  }
}

使用上面类流程代码

Keywords ss=new Keywords("好");
List list = new ArrayList();
list.add(ss);

SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
Map sensitiveWordMap  = sensitiveWordInit.initKeyWord(list);

// 传入SensitivewordEngine类中的敏感词库
SensitivewordUtils.sensitiveWordMap = sensitiveWordMap;

SensitivewordUtils.getSensitiveWordSum("需要检测的文本", 2)  ;

以上就是本文的全部内容,希望对大家的学习有所帮助,也希望大家多多支持亿速云。

向AI问一下细节

免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。

AI