日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程资源 > 编程问答 >内容正文

编程问答

java dfa_java 实现DFA 算法(理论百度搜索)

發布時間:2023/12/19 编程问答 44 豆豆
生活随笔 收集整理的這篇文章主要介紹了 java dfa_java 实现DFA 算法(理论百度搜索) 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

DFA簡介

DFA全稱為:Deterministic Finite Automaton,即確定有窮自動機。(自己百度吧)

直接代碼:

敏感詞實體類

package com.nopsmile.dfa;

public class Keywords {

private String pid;

private String Content;

public Keywords() {

}

public Keywords(String content) {

super();

Content = content;

}

public String getContent() {

return Content;

}

public void setContent(String content) {

Content = content;

}

public String getPid() {

return pid;

}

public void setPid(String pid) {

this.pid = pid;

}

}

敏感詞庫初始化

package com.nopsmile.dfa;

import java.util.HashMap;

import java.util.HashSet;

import java.util.Iterator;

import java.util.List;

import java.util.Map;

import java.util.Set;

/**

* 敏感詞庫初始化

*

*/

public class SensitiveWordInit{

/**

* 敏感詞庫

*/

public HashMap sensitiveWordMap;

/**

* 初始化敏感詞 keywords

*/

public Map initKeyWord(List sensitiveWords) {

try {

// 從敏感詞集合對象中取出敏感詞并封裝到Set集合中

Set keyWordSet = new HashSet();

for (Keywords s : sensitiveWords) {

keyWordSet.add(s.getContent().trim());

}

// 將敏感詞庫加入到HashMap中

addSensitiveWordToHashMap(keyWordSet);

} catch (Exception e) {

e.printStackTrace();

}

return sensitiveWordMap;

}

/**

* 封裝敏感詞庫

*/

private void addSensitiveWordToHashMap(Set keyWordSet) {

// 初始化HashMap對象并控制容器的大小

sensitiveWordMap = new HashMap(keyWordSet.size());

// 敏感詞

String key = null;

// 用來按照相應的格式保存敏感詞庫數據

Map nowMap = null;

// 用來輔助構建敏感詞庫

Map newWorMap = null;

// 使用一個迭代器來循環敏感詞集合

Iterator iterator = keyWordSet.iterator();

while (iterator.hasNext()) {

key = iterator.next();

// 等于敏感詞庫,HashMap對象在內存中占用的是同一個地址,所以此nowMap對象的變化,sensitiveWordMap對象也會跟著改變

nowMap = sensitiveWordMap;

for (int i = 0; i < key.length(); i++) {

// 截取敏感詞當中的字,在敏感詞庫中字為HashMap對象的Key鍵值

char keyChar = key.charAt(i);

// 判斷這個字是否存在于敏感詞庫中

Object wordMap = nowMap.get(keyChar);

if (wordMap != null) {

nowMap = (Map) wordMap;

} else {

newWorMap = new HashMap();

newWorMap.put("isEnd", "0");

nowMap.put(keyChar, newWorMap);

nowMap = newWorMap;

}

// 如果該字是當前敏感詞的最后一個字,則標識為結尾字

if (i == key.length() - 1) {

nowMap.put("isEnd", "1");

}

}

}

}

}

自定義的工具類

package com.nopsmile.dfa;

import java.util.ArrayList;

import java.util.Collections;

import java.util.Comparator;

import java.util.HashMap;

import java.util.HashSet;

import java.util.Iterator;

import java.util.LinkedHashMap;

import java.util.LinkedList;

import java.util.List;

import java.util.Map;

import java.util.Set;

import com.alibaba.fastjson.JSONArray;

import net.sf.json.JSONObject;

/**

* 敏感詞過濾工具類

*

* @author AlanLee

*

*/

public class SensitivewordUtils {

/**

* 敏感詞庫

*/

public static Map sensitiveWordMap = null;

/**

* 只過濾最小敏感詞

*/

public static int minMatchTYpe = 1;

/**

* 過濾所有敏感詞

*/

public static int maxMatchType = 2;

/**

* 敏感詞庫敏感詞數量

*

* @return

*/

public static int getWordSize() {

if (SensitivewordUtils.sensitiveWordMap == null) {

return 0;

}

return SensitivewordUtils.sensitiveWordMap.size();

}

/**

* 是否包含敏感詞

*

*/

public static boolean isContaintSensitiveWord(String txt, int matchType) {

boolean flag = false;

for (int i = 0; i < txt.length(); i++) {

int matchFlag = checkSensitiveWord(txt, i, matchType);

if (matchFlag > 0) {

flag = true;

}

}

return flag;

}

/**

* 獲取敏感詞內容

*

* @param txt

* @param matchType

* @return 敏感詞內容

*/

public static Set getSensitiveWord(String txt, int matchType) {

Set sensitiveWordList = new HashSet();

for (int i = 0; i < txt.length(); i++) {

int length = checkSensitiveWord(txt, i, matchType);

if (length > 0) {

// 將檢測出的敏感詞保存到集合中

sensitiveWordList.add(txt.substring(i, i + length));

i = i + length - 1;

}

}

return sensitiveWordList;

}

/**

* 替換敏感詞

*

*/

public static String replaceSensitiveWord(String txt, int matchType, String replaceChar) {

String resultTxt = txt;

Set set = getSensitiveWord(txt, matchType);

Iterator iterator = set.iterator();

String word = null;

String replaceString = null;

while (iterator.hasNext()) {

word = iterator.next();

replaceString = getReplaceChars(replaceChar, word.length());

resultTxt = resultTxt.replaceAll(word, replaceString);

}

return resultTxt;

}

/**

* 替換敏感詞內容

*

*/

private static String getReplaceChars(String replaceChar, int length) {

String resultReplace = replaceChar;

for (int i = 1; i < length; i++) {

resultReplace += replaceChar;

}

return resultReplace;

}

/**

* 檢查敏感詞數量

*

*/

public static int checkSensitiveWord(String txt, int beginIndex, int matchType) {

boolean flag = false;

// 記錄敏感詞數量

int matchFlag = 0;

char word = 0;

Map nowMap = SensitivewordUtils.sensitiveWordMap;

for (int i = beginIndex; i < txt.length(); i++) {

word = txt.charAt(i);

// 判斷該字是否存在于敏感詞庫中

nowMap = (Map) nowMap.get(word);

if (nowMap != null) {

matchFlag++;

// 判斷是否是敏感詞的結尾字,如果是結尾字則判斷是否繼續檢測

if ("1".equals(nowMap.get("isEnd"))) {

flag = true;

// 判斷過濾類型,如果是小過濾則跳出循環,否則繼續循環

if (SensitivewordUtils.minMatchTYpe == matchType) {

break;

}

}

} else {

break;

}

}

if (!flag) {

matchFlag = 0;

}

return matchFlag;

}

/**

* 敏感詞匯對應個數

* 返回 "關鍵字"="關鍵字個數"

*

*/

public static Map getSensitiveWordSum(String txt, int matchType) {

Map map = new HashMap();

for (int i = 0; i < txt.length(); i++) {

int length = checkSensitiveWord(txt, i, matchType);

if (length > 0) {

// 將檢測出的敏感詞保存到集合中

String str=txt.substring(i, i + length);

if(map.containsKey(str)) {

map.put(str, map.get(str).intValue()+1);

}else {

map.put(str, new Integer(1));

}

//System.out.println(txt.substring(i, i + length));

i = i + length - 1;

}

}

return map;

}

/**

* 對map數組value排序,并取前10

* this method will always sort the map;

* isCondition is true condition can be used otherwise invalid

* @param unsortMap

* @return

*/

public static Map sortByValue(Map unsortMap,int condition,boolean isCondition) {

// 1. Convert Map to List of Map

List> list =

new LinkedList>(unsortMap.entrySet());

// 2. Sort list with Collections.sort(), provide a custom Comparator

// Try switch the o1 o2 position for a different order

Collections.sort(list, new Comparator>() {

public int compare(Map.Entry o1,

Map.Entry o2) {

return (o2.getValue()).compareTo(o1.getValue());

}

});

// 3. Loop the sorted list and put it into a new insertion order Map LinkedHashMap

Map sortedMap = new LinkedHashMap();

if(isCondition) {

for (int i = 0; i < list.size(); i++) {

if (i < condition) {

sortedMap.put(list.get(i).getKey(), list.get(i).getValue());

}

}

}else{

for (int i = 0; i < list.size(); i++) {

sortedMap.put(list.get(i).getKey(), list.get(i).getValue());

}

}

return sortedMap;

}

}

使用上面類流程代碼

Keywords ss=new Keywords("好");

List list = new ArrayList();

list.add(ss);

SensitiveWordInit sensitiveWordInit = new SensitiveWordInit();

Map sensitiveWordMap = sensitiveWordInit.initKeyWord(list);

// 傳入SensitivewordEngine類中的敏感詞庫

SensitivewordUtils.sensitiveWordMap = sensitiveWordMap;

SensitivewordUtils.getSensitiveWordSum("需要檢測的文本", 2) ;

總結

以上是生活随笔為你收集整理的java dfa_java 实现DFA 算法(理论百度搜索)的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。