dengsixing hace 3 años
padre
commit
ef51f334bc

+ 7 - 0
4dkankan-common-utils/pom.xml

@@ -144,6 +144,13 @@
             <version>1.2.2</version>
         </dependency>
 
+        <dependency>
+            <groupId>com.baomidou</groupId>
+            <artifactId>mybatis-plus-extension</artifactId>
+            <scope>compile</scope>
+            <version>3.4.3.4</version>
+        </dependency>
+
 
 
     </dependencies>

+ 1 - 0
4dkankan-common-utils/src/main/java/com/fdkankan/common/constant/ServerCode.java

@@ -6,6 +6,7 @@ public enum ServerCode {
 	SYSTEM_ERROR(-1, "服务器异常"),
 	PARAM_ERROR(-2, "解析请求参数出错"),
 	PARAM_REQUIRED(-3, "缺少必要参数"),
+	FEIGN_REQUEST_FAILD(-4, "跨服务请求失败"),
 
 	AUTH_FAIL(3000, "鉴权失败!"),
 	NON_TOKEN(3001, "无token,请重新登录"),

+ 1 - 1
4dkankan-common-utils/src/main/java/com/fdkankan/common/request/BasePageParam.java

@@ -5,7 +5,7 @@ import lombok.Data;
 import java.io.Serializable;
 
 @Data
-public class BasePageParam implements Serializable {
+public class RequestBase implements Serializable {
 
     private int pageNum = 1;
 

+ 10 - 0
4dkankan-common-utils/src/main/java/com/fdkankan/common/response/PageInfo.java

@@ -1,5 +1,6 @@
 package com.fdkankan.common.response;
 
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
 import lombok.AllArgsConstructor;
 import lombok.Builder;
 import lombok.Data;
@@ -19,4 +20,13 @@ public class PageInfo<T> {
 
     private T list;
 
+    public static PageInfo PageInfo(Page page){
+        return PageInfo.builder()
+            .pageNum(page.getCurrent())
+            .pageSize(page.getSize())
+            .total(page.getTotal())
+            .list(page.getRecords())
+            .build();
+    }
+
 }

+ 31 - 0
4dkankan-common-utils/src/main/java/com/fdkankan/common/util/PageInfoUtils.java

@@ -0,0 +1,31 @@
+package com.fdkankan.common.util;
+
+import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * Created by Hb_zzZ on 2021/2/18.
+ */
+public class PageInfoUtils {
+
+    public static <T> Page<T> list2PageInfo(List<T> arrayList, Integer pageNum, Integer pageSize) {
+        //实现list分页
+        int pageStart = pageNum == 1 ? 0 : (pageNum - 1) * pageSize;
+        int pageEnd = arrayList.size() < pageSize * pageNum ? arrayList.size() : pageSize * pageNum;
+        List<T> pageResult = new LinkedList<T>();
+        if (arrayList.size() > pageStart) {
+            pageResult = arrayList.subList(pageStart, pageEnd);
+        }
+        Page<T> pageInfo = new Page<T>();
+        pageInfo.setRecords(pageResult);
+        //获取PageInfo其他参数
+        pageInfo.setTotal(arrayList.size());
+        int pages = arrayList.size() % pageSize == 0 ? arrayList.size() / pageSize : (arrayList.size() / pageSize) + 1;
+        pageInfo.setPages(pages);
+        pageInfo.setCurrent(pageNum);
+        pageInfo.setSize(pageSize);
+//        pageInfo.setPages(pages);
+        return pageInfo;
+    }
+}

+ 13 - 2
4dkankan-common-utils/src/main/java/com/fdkankan/common/validation/SensitiveWord.java

@@ -2,14 +2,20 @@ package com.fdkankan.common.validation;
 
 import com.fdkankan.common.constant.ConstantFileName;
 
+import com.fdkankan.common.constant.ServerCode;
 import java.util.HashSet;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Set;
+import javax.annotation.PostConstruct;
+import lombok.extern.slf4j.Slf4j;
+import net.sf.jsqlparser.schema.Server;
+import org.springframework.core.io.ClassPathResource;
 
 /**
  *  敏感词过滤
  */
+@Slf4j
 public class SensitiveWord {
     @SuppressWarnings("rawtypes")
     public Map sensitiveWordMap = null;
@@ -21,11 +27,16 @@ public class SensitiveWord {
      */
     public SensitiveWord(){
         if (sensitiveWordMap == null){
-            String txtPath = this.getClass().getResource("/static/txt/"+ ConstantFileName.BBS_SENSITIVE).getPath();
-            sensitiveWordMap = new SensitiveWordConfig().initKeyWord(txtPath);
+            ClassPathResource classPathResource = new ClassPathResource("static/txt/" + ConstantFileName.BBS_SENSITIVE);
+            try {
+                sensitiveWordMap = new SensitiveWordConfig().initKeyWord(classPathResource.getFile());
+            }catch (Exception e){
+                log.error("初始化敏感词失败!", e);
+            }
         }
     }
 
+
     /**
      * 判断文字是否包含敏感字符
      * @author chenming

+ 38 - 0
4dkankan-common-utils/src/main/java/com/fdkankan/common/validation/SensitiveWordConfig.java

@@ -36,6 +36,19 @@ public class SensitiveWordConfig {
         return sensitiveWordMap;
     }
 
+    public Map initKeyWord(File file){
+        try {
+            //读取敏感词库
+            Set<String> keyWordSet = readSensitiveWordFile(file);
+            //将敏感词库加入到HashMap中
+            addSensitiveWordToHashMap(keyWordSet);
+            //spring获取application,然后application.setAttribute("sensitiveWordMap",sensitiveWordMap);
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+        return sensitiveWordMap;
+    }
+
     /**
      * 读取敏感词库,将敏感词放入HashSet中,构建一个DFA算法模型:<br>
      * 中 = {
@@ -128,4 +141,29 @@ public class SensitiveWordConfig {
         }
         return set;
     }
+
+    private Set<String> readSensitiveWordFile(File file) throws Exception{
+        Set<String> set = null;
+        //File file = new File("E:\\2017\\4Dweb\\bug汇总\\过滤敏感词\\敏感词库\\敏感词库\\2012年最新敏感词列表\\论坛需要过滤的不良词语大全.txt");    //读取文件
+        //File file = new File("D:\\SensitiveWord.txt");    //读取文件
+        InputStreamReader read = new InputStreamReader(new FileInputStream(file),ENCODING);
+        try {
+            if(file.isFile() && file.exists()){      //文件流是否存在
+                set = new HashSet<String>();
+                BufferedReader bufferedReader = new BufferedReader(read);
+                String txt = null;
+                while((txt = bufferedReader.readLine()) != null){    //读取文件,将文件内容放入到set中
+                    set.add(txt);
+                }
+            }
+            else{         //不存在抛出异常信息
+                throw new Exception("敏感词库文件不存在");
+            }
+        } catch (Exception e) {
+            throw e;
+        }finally{
+            read.close();     //关闭文件流
+        }
+        return set;
+    }
 }