您好,登錄后才能下訂單哦!
DFA全稱為:Deterministic Finite Automaton,即確定有窮自動機。(自己百度吧)
package com.nopsmile.dfa;
public class Keywords {
private String pid;
private String Content;
public Keywords() {
}
public Keywords(String content) {
super();
Content = content;
}
public String getContent() {
return Content;
}
public void setContent(String content) {
Content = content;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
}
package com.nopsmile.dfa;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* 敏感詞庫初始化
*
*/
public class SensitiveWordInit{
/**
* 敏感詞庫
*/
public HashMap sensitiveWordMap;
/**
* 初始化敏感詞 keywords
*/
public Map initKeyWord(List<Keywords> sensitiveWords) {
try {
// 從敏感詞集合對象中取出敏感詞并封裝到Set集合中
Set<String> keyWordSet = new HashSet<String>();
for (Keywords s : sensitiveWords) {
keyWordSet.add(s.getContent().trim());
}
// 將敏感詞庫加入到HashMap中
addSensitiveWordToHashMap(keyWordSet);
} catch (Exception e) {
e.printStackTrace();
}
return sensitiveWordMap;
}
/**
* 封裝敏感詞庫
*/
private void addSensitiveWordToHashMap(Set<String> keyWordSet) {
// 初始化HashMap對象并控制容器的大小
sensitiveWordMap = new HashMap(keyWordSet.size());
// 敏感詞
String key = null;
// 用來按照相應的格式保存敏感詞庫數據
Map nowMap = null;
// 用來輔助構建敏感詞庫
Map<String, String> newWorMap = null;
// 使用一個迭代器來循環敏感詞集合
Iterator<String> iterator = keyWordSet.iterator();
while (iterator.hasNext()) {
key = iterator.next();
// 等于敏感詞庫,HashMap對象在內存中占用的是同一個地址,所以此nowMap對象的變化,sensitiveWordMap對象也會跟著改變
nowMap = sensitiveWordMap;
for (int i = 0; i < key.length(); i++) {
// 截取敏感詞當中的字,在敏感詞庫中字為HashMap對象的Key鍵值
char keyChar = key.charAt(i);
// 判斷這個字是否存在于敏感詞庫中
Object wordMap = nowMap.get(keyChar);
if (wordMap != null) {
nowMap = (Map) wordMap;
} else {
newWorMap = new HashMap<String, String>();
newWorMap.put("isEnd", "0");
nowMap.put(keyChar, newWorMap);
nowMap = newWorMap;
}
// 如果該字是當前敏感詞的最后一個字,則標識為結尾字
if (i == key.length() - 1) {
nowMap.put("isEnd", "1");
}
}
}
}
}
package com.nopsmile.dfa;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.alibaba.fastjson.JSONArray;
import net.sf.json.JSONObject;
/**
* 敏感詞過濾工具類
*
* @author AlanLee
*
*/
public class SensitivewordUtils {
/**
* 敏感詞庫
*/
public static Map sensitiveWordMap = null;
/**
* 只過濾最小敏感詞
*/
public static int minMatchTYpe = 1;
/**
* 過濾所有敏感詞
*/
public static int maxMatchType = 2;
/**
* 敏感詞庫敏感詞數量
*
* @return
*/
public static int getWordSize() {
if (SensitivewordUtils.sensitiveWordMap == null) {
return 0;
}
return SensitivewordUtils.sensitiveWordMap.size();
}
/**
* 是否包含敏感詞
*
*/
public static boolean isContaintSensitiveWord(String txt, int matchType) {
boolean flag = false;
for (int i = 0; i < txt.length(); i++) {
int matchFlag = checkSensitiveWord(txt, i, matchType);
if (matchFlag > 0) {
flag = true;
}
}
return flag;
}
/**
* 獲取敏感詞內容
*
* @param txt
* @param matchType
* @return 敏感詞內容
*/
public static Set<String> getSensitiveWord(String txt, int matchType) {
Set<String> sensitiveWordList = new HashSet<String>();
for (int i = 0; i < txt.length(); i++) {
int length = checkSensitiveWord(txt, i, matchType);
if (length > 0) {
// 將檢測出的敏感詞保存到集合中
sensitiveWordList.add(txt.substring(i, i + length));
i = i + length - 1;
}
}
return sensitiveWordList;
}
/**
* 替換敏感詞
*
*/
public static String replaceSensitiveWord(String txt, int matchType, String replaceChar) {
String resultTxt = txt;
Set<String> set = getSensitiveWord(txt, matchType);
Iterator<String> iterator = set.iterator();
String word = null;
String replaceString = null;
while (iterator.hasNext()) {
word = iterator.next();
replaceString = getReplaceChars(replaceChar, word.length());
resultTxt = resultTxt.replaceAll(word, replaceString);
}
return resultTxt;
}
/**
* 替換敏感詞內容
*
*/
private static String getReplaceChars(String replaceChar, int length) {
String resultReplace = replaceChar;
for (int i = 1; i < length; i++) {
resultReplace += replaceChar;
}
return resultReplace;
}
/**
* 檢查敏感詞數量
*
*/
public static int checkSensitiveWord(String txt, int beginIndex, int matchType) {
boolean flag = false;
// 記錄敏感詞數量
int matchFlag = 0;
char word = 0;
Map nowMap = SensitivewordUtils.sensitiveWordMap;
for (int i = beginIndex; i < txt.length(); i++) {
word = txt.charAt(i);
// 判斷該字是否存在于敏感詞庫中
nowMap = (Map) nowMap.get(word);
if (nowMap != null) {
matchFlag++;
// 判斷是否是敏感詞的結尾字,如果是結尾字則判斷是否繼續檢測
if ("1".equals(nowMap.get("isEnd"))) {
flag = true;
// 判斷過濾類型,如果是小過濾則跳出循環,否則繼續循環
if (SensitivewordUtils.minMatchTYpe == matchType) {
break;
}
}
} else {
break;
}
}
if (!flag) {
matchFlag = 0;
}
return matchFlag;
}
/**
* 敏感詞匯對應個數
* 返回 "關鍵字"="關鍵字個數"
*
*/
public static Map getSensitiveWordSum(String txt, int matchType) {
Map<String,Integer> map = new HashMap<String,Integer>();
for (int i = 0; i < txt.length(); i++) {
int length = checkSensitiveWord(txt, i, matchType);
if (length > 0) {
// 將檢測出的敏感詞保存到集合中
String str=txt.substring(i, i + length);
if(map.containsKey(str)) {
map.put(str, map.get(str).intValue()+1);
}else {
map.put(str, new Integer(1));
}
//System.out.println(txt.substring(i, i + length));
i = i + length - 1;
}
}
return map;
}
/**
* 對map數組value排序,并取前10
* this method will always sort the map;
* isCondition is true condition can be used otherwise invalid
* @param unsortMap
* @return
*/
public static Map<String, Integer> sortByValue(Map<String, Integer> unsortMap,int condition,boolean isCondition) {
// 1. Convert Map to List of Map
List<Map.Entry<String, Integer>> list =
new LinkedList<Map.Entry<String, Integer>>(unsortMap.entrySet());
// 2. Sort list with Collections.sort(), provide a custom Comparator
// Try switch the o1 o2 position for a different order
Collections.sort(list, new Comparator<Map.Entry<String, Integer>>() {
public int compare(Map.Entry<String, Integer> o1,
Map.Entry<String, Integer> o2) {
return (o2.getValue()).compareTo(o1.getValue());
}
});
// 3. Loop the sorted list and put it into a new insertion order Map LinkedHashMap
Map<String, Integer> sortedMap = new LinkedHashMap<String, Integer>();
if(isCondition) {
for (int i = 0; i < list.size(); i++) {
if (i < condition) {
sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
}
}
}else{
for (int i = 0; i < list.size(); i++) {
sortedMap.put(list.get(i).getKey(), list.get(i).getValue());
}
}
return sortedMap;
}
}
Keywords ss=new Keywords("好");
List list = new ArrayList();
list.add(ss);
SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();
Map sensitiveWordMap = sensitiveWordInit.initKeyWord(list);
// 傳入SensitivewordEngine類中的敏感詞庫
SensitivewordUtils.sensitiveWordMap = sensitiveWordMap;
SensitivewordUtils.getSensitiveWordSum("需要檢測的文本", 2) ;
免責聲明:本站發布的內容(圖片、視頻和文字)以原創、轉載和分享為主,文章觀點不代表本網站立場,如果涉及侵權請聯系站長郵箱:is@yisu.com進行舉報,并提供相關證據,一經查實,將立刻刪除涉嫌侵權內容。