Browse Source

岸电报表生成分支创建提交

sbj 3 weeks ago
parent
commit
693dcd7e9c

+ 0 - 54
src/main/java/com/shcd/scheduling/DredgingSchedule.java

@@ -1,54 +0,0 @@
-//package com.shcd.scheduling;
-//
-//
-//import com.shcd.entity.dredging.DredgingImportRule;
-//import com.shcd.service.dredging.DredgingService;
-//import com.xxl.job.core.biz.model.ReturnT;
-//import com.xxl.job.core.handler.annotation.XxlJob;
-//import com.xxl.job.core.log.XxlJobLogger;
-//import groovy.util.logging.Slf4j;
-//import org.springframework.scheduling.annotation.Scheduled;
-//import org.springframework.stereotype.Component;
-//
-//import javax.annotation.Resource;
-//import java.util.List;
-//
-//@Component
-//@Slf4j
-//public class DredgingSchedule {
-//    @Resource
-//    private DredgingService dredgingService;
-//    @XxlJob("Dredging")
-//    //@Scheduled(cron = "0 0 1 * * ?")
-//    public ReturnT<String> dredging(String param){
-//       List<DredgingImportRule> dredgingImportRules =  dredgingService.getAllProjectId();
-//       if (dredgingImportRules != null && dredgingImportRules.size() > 0){
-//           for (DredgingImportRule dredgingImportRule : dredgingImportRules) {
-//               try {
-//                   dredgingService.importData(dredgingImportRule);
-//               } catch (Exception e) {
-//                   e.printStackTrace();
-//                   XxlJobLogger.log(e.getMessage() + "疏浚数据导入失败");
-//                   return ReturnT.FAIL;
-//               }
-//           }
-//           System.out.println("ok");
-//       }
-//       return ReturnT.SUCCESS;
-//    }
-//
-//    @XxlJob("DredgingCamera")
-//    public ReturnT<String> dredgingCamera(){
-//        List<DredgingImportRule> dredgingImportRules =  dredgingService.getAllProjectId();
-//        if (!dredgingImportRules.isEmpty()){
-//            for (DredgingImportRule dredgingImportRule : dredgingImportRules) {
-//                try {
-//                    dredgingService.updateShipCamera(dredgingImportRule.getProjectOverviewId());
-//                }catch (Exception e){
-//                    e.printStackTrace();
-//                    XxlJobLogger.log("刷新船舶失败 : {}" ,  e.getMessage());
-//                }
-//            }
-//        }
-//    }
-//}

+ 136 - 136
src/main/java/com/shcd/scheduling/ShorePowerReportGenerateScheduler.java

@@ -1,136 +1,136 @@
-//package com.shcd.scheduling;
-//
-//import com.shcd.entity.ShorePowerDeviceUsage;
-//import com.shcd.enums.AreaSortEnum;
-//import com.shcd.service.ShorePowerDeviceUsageService;
-//import com.shcd.service.SpStatementInlandLowService;
-//import com.shcd.service.SpStatementQuarterReportService;
-//import com.shcd.service.SpStatementShipUsageMonthService;
-//import java.text.ParseException;
-//import java.time.LocalDate;
-//import java.time.format.DateTimeFormatter;
-//import javax.annotation.Resource;
-//import lombok.extern.slf4j.Slf4j;
-//import org.springframework.beans.factory.annotation.Autowired;
-//import org.springframework.scheduling.annotation.Scheduled;
-//import org.springframework.stereotype.Component;
-//
-//@Component
-//@Slf4j
-//public class ShorePowerReportGenerateScheduler {
-//
-//  @Resource
-//  SpStatementShipUsageMonthService spStatementShipUsageMonthService;
-//
-//  @Resource
-//  SpStatementInlandLowService spStatementInlandLowService;
-//
-//  @Autowired
-//  private SpStatementQuarterReportService spStatementQuarterReportService;
-//
-//  @Resource
-//  private ShorePowerDeviceUsageService shorePowerDeviceUsageService;
-//
-//  //  @PostConstruct
-//  @Scheduled(cron = "0 0 0 25 * ?")
-//  public void generateSpInlandLowStatement() {
-//    try {
-//      // 生成各区上报数据   内河低压分析表
-//      log.warn("开始生成 内河低压分析表-各区上报数据  ========");
-//      generateReport1();
-//      log.warn("内河低压分析表-各区上报数据  ======== 生成结束");
-//      log.warn("生成下个月的内河非标岸电使用情况表  ======");
-//      generateDeviceUsage();
-//      log.warn("生成下个月的内河非标岸电使用情况表  ====== 生成结束");
-//////       生成市级平台自动上传数据   内河低压分析表
-//      log.warn("开始生成 内河低压分析表-市级平台自动上传数据  ========");
-//      generateReport2();
-//      log.warn("内河低压分析表-市级平台自动上传数据  ========生成结束");
-//      //生成上海市长江经济带船舶使用岸电情况报表
-//      log.warn(
-//          "开始生成 上海市长江经济带船舶使用岸电情况报表   以及  上海港船舶使用岸电情况分年度对比分析表  以及  靠泊上港集团码头的集装箱船舶(2024年上海市岸电使用指标Part2) ========");
-//      generateReport3();
-//      log.warn("上海市长江经济带船舶使用岸电情况报表 ========生成结束");
-//      log.warn("开始生成  岸电季度报表   ================");
-//      generateSpQuarterReport();
-//      log.warn("岸电季度报表 ========生成结束");
-//    } catch (Exception e) {
-//      log.error("中间表生成逻辑出错....原因:{}", e.getMessage());
-//    }
-//  }
-//
-//  private void generateDeviceUsage() throws ParseException {
-//    try {
-//      shorePowerDeviceUsageService.generateDeviceUsageNextMonth(getShorePowerNextMonthDate());
-//    } catch (Exception e) {
-//      log.error("生成下个月的内河非标岸电使用情况表出错....原因:{}", e.getMessage());
-//    }
-//
-//  }
-//
-//  //@PostConstruct
-//  public void generateSpQuarterReport() {
-//    try {
-//      spStatementQuarterReportService.generateReportSpQuarterReport(getShorePowerDate());
-//    } catch (Exception e) {
-//      log.error("季度表出错....原因:{}", e.getMessage());
-//    }
-//  }
-//
-//  private void generateReport3() {
-//    try {
-//      spStatementShipUsageMonthService.makeShangHaiShorePowerUsageRecord(getShorePowerDate());
-//    } catch (Exception e) {
-//      log.error("上海市长江经济带船舶使用岸电情况报表出错....原因:{}", e.getMessage());
-//    }
-//  }
-//
-//  private void generateReport1() {
-//    try {
-//      spStatementInlandLowService.makeReport1(getShorePowerDate(), getShorePowerLastMonthDate());
-//    } catch (Exception e) {
-//      log.error("内河低压分析表-市级平台自动上传数据出错....原因:{}", e.getMessage());
-//    }
-//  }
-//
-//  private void generateReport2() {
-//    try {
-//      spStatementInlandLowService.makeReport2(getShorePowerDate(), getShorePowerLastMonthDate(),
-//          getShorePowerLast2MonthDate(), getShorePowerLast3MonthDate());
-//    } catch (Exception e) {
-//      log.error("内河低压分析表-各区上报出错....原因:{}", e.getMessage());
-//    }
-//  }
-//
-//  private static String getShorePowerDate() {
-//    String date = null;
-//    LocalDate now = LocalDate.now();
-//    date = DateTimeFormatter.ofPattern("yyyy-MM").format(now);
-//    return date;
-//  }
-//
-//  private static String getShorePowerNextMonthDate() {
-//    String date = null;
-//    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(1));
-//    return date;
-//  }
-//
-//  private static String getShorePowerLastMonthDate() {
-//    String date = null;
-//    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-1));
-//    return date;
-//  }
-//
-//
-//  private static String getShorePowerLast2MonthDate() {
-//    String date = null;
-//    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-2));
-//    return date;
-//  }
-//
-//  private static String getShorePowerLast3MonthDate() {
-//    String date = null;
-//    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-3));
-//    return date;
-//  }
-//}
+package com.shcd.scheduling;
+
+import com.shcd.entity.ShorePowerDeviceUsage;
+import com.shcd.enums.AreaSortEnum;
+import com.shcd.service.ShorePowerDeviceUsageService;
+import com.shcd.service.SpStatementInlandLowService;
+import com.shcd.service.SpStatementQuarterReportService;
+import com.shcd.service.SpStatementShipUsageMonthService;
+import java.text.ParseException;
+import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
+import javax.annotation.Resource;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.scheduling.annotation.Scheduled;
+import org.springframework.stereotype.Component;
+
+@Component
+@Slf4j
+public class ShorePowerReportGenerateScheduler {
+
+  @Resource
+  SpStatementShipUsageMonthService spStatementShipUsageMonthService;
+
+  @Resource
+  SpStatementInlandLowService spStatementInlandLowService;
+
+  @Autowired
+  private SpStatementQuarterReportService spStatementQuarterReportService;
+
+  @Resource
+  private ShorePowerDeviceUsageService shorePowerDeviceUsageService;
+
+  //  @PostConstruct
+  @Scheduled(cron = "0 0 0 25 * ?")
+  public void generateSpInlandLowStatement() {
+    try {
+      // 生成各区上报数据   内河低压分析表
+      log.warn("开始生成 内河低压分析表-各区上报数据  ========");
+      generateReport1();
+      log.warn("内河低压分析表-各区上报数据  ======== 生成结束");
+      log.warn("生成下个月的内河非标岸电使用情况表  ======");
+      generateDeviceUsage();
+      log.warn("生成下个月的内河非标岸电使用情况表  ====== 生成结束");
+////       生成市级平台自动上传数据   内河低压分析表
+      log.warn("开始生成 内河低压分析表-市级平台自动上传数据  ========");
+      generateReport2();
+      log.warn("内河低压分析表-市级平台自动上传数据  ========生成结束");
+      //生成上海市长江经济带船舶使用岸电情况报表
+      log.warn(
+          "开始生成 上海市长江经济带船舶使用岸电情况报表   以及  上海港船舶使用岸电情况分年度对比分析表  以及  靠泊上港集团码头的集装箱船舶(2024年上海市岸电使用指标Part2) ========");
+      generateReport3();
+      log.warn("上海市长江经济带船舶使用岸电情况报表 ========生成结束");
+      log.warn("开始生成  岸电季度报表   ================");
+      generateSpQuarterReport();
+      log.warn("岸电季度报表 ========生成结束");
+    } catch (Exception e) {
+      log.error("中间表生成逻辑出错....原因:{}", e.getMessage());
+    }
+  }
+
+  private void generateDeviceUsage() throws ParseException {
+    try {
+      shorePowerDeviceUsageService.generateDeviceUsageNextMonth(getShorePowerNextMonthDate());
+    } catch (Exception e) {
+      log.error("生成下个月的内河非标岸电使用情况表出错....原因:{}", e.getMessage());
+    }
+
+  }
+
+  //@PostConstruct
+  public void generateSpQuarterReport() {
+    try {
+      spStatementQuarterReportService.generateReportSpQuarterReport(getShorePowerDate());
+    } catch (Exception e) {
+      log.error("季度表出错....原因:{}", e.getMessage());
+    }
+  }
+
+  private void generateReport3() {
+    try {
+      spStatementShipUsageMonthService.makeShangHaiShorePowerUsageRecord(getShorePowerDate());
+    } catch (Exception e) {
+      log.error("上海市长江经济带船舶使用岸电情况报表出错....原因:{}", e.getMessage());
+    }
+  }
+
+  private void generateReport1() {
+    try {
+      spStatementInlandLowService.makeReport1(getShorePowerDate(), getShorePowerLastMonthDate());
+    } catch (Exception e) {
+      log.error("内河低压分析表-市级平台自动上传数据出错....原因:{}", e.getMessage());
+    }
+  }
+
+  private void generateReport2() {
+    try {
+      spStatementInlandLowService.makeReport2(getShorePowerDate(), getShorePowerLastMonthDate(),
+          getShorePowerLast2MonthDate(), getShorePowerLast3MonthDate());
+    } catch (Exception e) {
+      log.error("内河低压分析表-各区上报出错....原因:{}", e.getMessage());
+    }
+  }
+
+  private static String getShorePowerDate() {
+    String date = null;
+    LocalDate now = LocalDate.now();
+    date = DateTimeFormatter.ofPattern("yyyy-MM").format(now);
+    return date;
+  }
+
+  private static String getShorePowerNextMonthDate() {
+    String date = null;
+    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(1));
+    return date;
+  }
+
+  private static String getShorePowerLastMonthDate() {
+    String date = null;
+    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-1));
+    return date;
+  }
+
+
+  private static String getShorePowerLast2MonthDate() {
+    String date = null;
+    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-2));
+    return date;
+  }
+
+  private static String getShorePowerLast3MonthDate() {
+    String date = null;
+    date = DateTimeFormatter.ofPattern("yyyy-MM").format(LocalDate.now().plusMonths(-3));
+    return date;
+  }
+}

File diff suppressed because it is too large
+ 0 - 1221
src/main/java/com/shcd/scheduling/ShorePowerScheduler.java


+ 72 - 0
src/main/resources/profile/pro/application.properties

@@ -0,0 +1,72 @@
+server.port=9096
+#\u6570\u636E\u5E93\u8FDE\u63A5\u914D\u7F6E
+spring.datasource.url=jdbc:oracle:thin:@//172.17.190.5/orcl
+#spring.datasource.url=jdbc:oracle:thin:@//198.39.1.218/shhwdb
+#spring.datasource.url=jdbc:oracle:thin:@//10.114.4.56/shhwdb
+spring.datasource.username=ghjg_basics
+spring.datasource.password=GHJG@123
+#\u7A7A\u95F4\u5750\u6807\u7CFB\u53C2\u6570 \u751F\u4EA7\u5E93
+spaceCoordinatesSystem=300003
+#\u7A7A\u95F4\u5750\u6807\u7CFB\u53C2\u6570 \u6D4B\u8BD5\u5E93
+#spaceCoordinatesSystem=300015
+swagger2.enable=true
+
+#\u589E\u52A0tomcatpost\u8BF7\u6C42\u4F20\u53C2\u5927\u5C0F\u4E3A10m
+server.tomcat.max-http-post-size=-1
+
+
+spring.mvc.pathmatch.matching-strategy=ant_path_matcher
+spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
+spring.datasource.hikari.maximum-pool-size= 60
+spring.datasource.hikari.maximum-idle = 15
+spring.datasource.hikari.idle-timeout= 30000
+#Mybatis\u914D\u7F6E
+mybatis.mapper-locations=classpath*:mapper/*.xml
+mybatis.type-aliases-package=com.shcd.entity
+mybatis.configuration.useGeneratedkeys=true
+mybatis.configuration.mapUnderscoreToCamelCase=true
+mybatis.configuration.call-setters-on-nulls=true
+#mybatis-plus.type-enums-package=com.shcd.ghjg.eum
+
+#\uFFFD\uFFFD\uFFFD\uFFFDsql\uFFFD\uFFFD\u04E1\uFFFD\uFFFD\u04E6\uFFFD\u0137\uFFFD\uFFFD\uFFFD\uFFFD\u04FF\uFFFD\uFFFD\uFFFD\uFFFD\u06B5\u0130\uFFFD
+logging.level.com.shcd.ghjg.dao=debug
+
+spring.jmx.enabled=false
+
+
+xxl.job.admin.addresses=http://127.0.0.1:8091/xxl-job-admin
+xxl.job.executor.appname=
+xxl.job.executor.ip=127.0.0.1
+xxl.job.executor.port=
+xxl.job.accessToken=
+xxl.job.executor.logpath=E:/xxl-job/applogs/xxl-job
+xxl.job.executor.logretentiondays=30
+
+# redis\u914D\u7F6E
+spring.redis.database=1
+# \u751F\u4EA7\u7CFB\u7EDFRedis
+spring.redis.host=172.17.159.1
+# \u5361\u53E3\u7CFB\u7EDFRedis
+#spring.redis.host=localhost
+spring.redis.port=6379
+spring.redis.password=b7dz82g#psNEvvuc
+# \u6700\u5927\u8FDE\u63A5\u6570
+spring.redis.jedis.pool.max-active=200
+# \u6700\u5927\u963B\u585E\u7B49\u5F85\u65F6\u95F4\uFF08\u8D1F\u6570\u8868\u793A\u6CA1\u9650\u5236\uFF09
+spring.redis.jedis.pool.max-wait=-1
+# \u6700\u5927\u7A7A\u95F2
+spring.redis.jedis.pool.max-idle=10
+# \u6700\u5C0F\u7A7A\u95F2
+spring.redis.jedis.pool.min-idle=-1
+# \u8FDE\u63A5\u8D85\u65F6\u65F6\u95F4
+spring.redis.timeout=10000
+
+dredgingTokenUrl=http://172.31.233.22:8093/dredging/token/getToken
+#dredgingClientId=34cd20d8489f453d9a9671f1099d53f4
+#dredgingClientSecret=43f111ec3451405c97b914450a91a18c
+dredgingProjectUrl=http://172.31.233.22:8093/dredging/projectOverview/GetProjectOverview
+dredgingElectricFenUrl=http://172.31.233.22:8093/dredging/elecfence/GetElectronicFence
+dredgingWorkOrderUrl=http://172.31.233.22:8093/dredging/workOrder/GetWorkOrderList
+dredgingWorkTraceUrl=http://172.31.233.22:8093/dredging/workOrderHistory/getWorkHistory
+dredgingWorkCameraUrl=http://172.31.233.22:8093/dredging/getProjectCameraList
+#geographyChangeUrl=http://172.17.231.65:8080/NHJTWebservice/common/CommonWebservice.asmx/ChangeDataCoordinateNoLog

+ 198 - 0
src/main/resources/profile/pro/logback-spring.xml

@@ -0,0 +1,198 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- 日志级别从低到高分为TRACE < DEBUG < INFO < WARN < ERROR < FATAL,如果设置为WARN,则低于WARN的信息都不会输出 -->
+<!-- scan:当此属性设置为true时,配置文档如果发生改变,将会被重新加载,默认值为true -->
+<!-- scanPeriod:设置监测配置文档是否有修改的时间间隔,如果没有给出时间单位,默认单位是毫秒。
+                 当scan为true时,此属性生效。默认的时间间隔为1分钟。 -->
+<!-- debug:当此属性设置为true时,将打印出logback内部日志信息,实时查看logback运行状态。默认值为false。 -->
+<configuration  scan="true" scanPeriod="10 seconds">
+    <contextName>logback</contextName>
+
+    <!-- name的值是变量的名称,value的值时变量定义的值。通过定义的值会被插入到logger上下文中。定义后,可以使“${}”来使用变量。 -->
+    <property name="log.path" value="./logFiles" />
+
+    <!--0. 日志格式和颜色渲染 -->
+    <!-- 彩色日志依赖的渲染类 -->
+    <conversionRule conversionWord="clr" converterClass="org.springframework.boot.logging.logback.ColorConverter" />
+    <conversionRule conversionWord="wex" converterClass="org.springframework.boot.logging.logback.WhitespaceThrowableProxyConverter" />
+    <conversionRule conversionWord="wEx" converterClass="org.springframework.boot.logging.logback.ExtendedWhitespaceThrowableProxyConverter" />
+    <!-- 彩色日志格式 -->
+    <property name="CONSOLE_LOG_PATTERN" value="${CONSOLE_LOG_PATTERN:-%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}}"/>
+
+    <!--1. 输出到控制台-->
+    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
+        <!--此日志appender是为开发使用,只配置最底级别,控制台输出的日志级别是大于或等于此级别的日志信息-->
+        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
+            <level>debug</level>
+
+        </filter>
+        <encoder>
+            <Pattern>${CONSOLE_LOG_PATTERN}</Pattern>
+            <!-- 设置字符集 -->
+            <charset>UTF-8</charset>
+        </encoder>
+    </appender>
+
+    <!--2. 输出到文档-->
+    <!-- 2.1 level为 DEBUG 日志,时间滚动输出  -->
+    <appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文档的路径及文档名 -->
+        <file>${log.path}/web_debug.log</file>
+        <!--日志文档输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 日志归档 -->
+            <fileNamePattern>${log.path}/web-debug-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>100MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文档保留天数-->
+            <maxHistory>5</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文档只记录debug级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>debug</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 2.2 level为 INFO 日志,时间滚动输出  -->
+    <appender name="INFO_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文档的路径及文档名 -->
+        <file>${log.path}/web_info.log</file>
+        <!--日志文档输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset>
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <!-- 每天日志归档路径以及格式 -->
+            <fileNamePattern>${log.path}/web-info-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>20MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文档保留天数-->
+            <maxHistory>10</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文档只记录info级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>info</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 2.3 level为 WARN 日志,时间滚动输出  -->
+    <appender name="WARN_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文档的路径及文档名 -->
+        <file>${log.path}/web_warn.log</file>
+        <!--日志文档输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${log.path}/web-warn-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>20MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文档保留天数-->
+            <maxHistory>5</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文档只记录warn级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>warn</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <!-- 2.4 level为 ERROR 日志,时间滚动输出  -->
+    <appender name="ERROR_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
+        <!-- 正在记录的日志文档的路径及文档名 -->
+        <file>${log.path}/web_error.log</file>
+        <!--日志文档输出格式-->
+        <encoder>
+            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{50} - %msg%n</pattern>
+            <charset>UTF-8</charset> <!-- 此处设置字符集 -->
+        </encoder>
+        <!-- 日志记录器的滚动策略,按日期,按大小记录 -->
+        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
+            <fileNamePattern>${log.path}/web-error-%d{yyyy-MM-dd}.%i.log</fileNamePattern>
+            <timeBasedFileNamingAndTriggeringPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedFNATP">
+                <maxFileSize>20MB</maxFileSize>
+            </timeBasedFileNamingAndTriggeringPolicy>
+            <!--日志文档保留天数-->
+            <maxHistory>5</maxHistory>
+        </rollingPolicy>
+        <!-- 此日志文档只记录ERROR级别的 -->
+        <filter class="ch.qos.logback.classic.filter.LevelFilter">
+            <level>ERROR</level>
+            <onMatch>ACCEPT</onMatch>
+            <onMismatch>DENY</onMismatch>
+        </filter>
+    </appender>
+
+    <logger name="com.InterfaceServices.mapper" level="DEBUG" />
+    <logger name="io.lettuce.core.protocol" level="ERROR">
+        <appender-ref ref="ERROR_FILE" />
+    </logger>
+    <!--
+        <logger>用来设置某一个包或者具体的某一个类的日志打印级别、
+        以及指定<appender>。<logger>仅有一个name属性,
+        一个可选的level和一个可选的addtivity属性。
+        name:用来指定受此logger约束的某一个包或者具体的某一个类。
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+              还有一个特俗值INHERITED或者同义词NULL,代表强制执行上级的级别。
+              如果未设置此属性,那么当前logger将会继承上级的级别。
+        addtivity:是否向上级logger传递打印信息。默认是true。
+        <logger name="org.springframework.web" level="info"/>
+        <logger name="org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor" level="INFO"/>
+    -->
+
+    <!--
+        使用mybatis的时候,sql语句是debug下才会打印,而这里我们只配置了info,所以想要查看sql语句的话,有以下两种操作:
+        第一种把<root level="info">改成<root level="DEBUG">这样就会打印sql,不过这样日志那边会出现很多其他消息
+        第二种就是单独给dao下目录配置debug模式,代码如下,这样配置sql语句会打印,其他还是正常info级别:
+        【logging.level.org.mybatis=debug logging.level.dao=debug】
+     -->
+
+    <!--
+        root节点是必选节点,用来指定最基础的日志输出级别,只有一个level属性
+        level:用来设置打印级别,大小写无关:TRACE, DEBUG, INFO, WARN, ERROR, ALL 和 OFF,
+        不能设置为INHERITED或者同义词NULL。默认是DEBUG
+        可以包含零个或多个元素,标识这个appender将会添加到这个logger。
+    -->
+
+    <!-- 4. 最终的策略 -->
+    <!-- 4.1 开发环境:打印控制台-->
+    <!-- <springProfile name="dev">
+        <logger name="com.sdcm.pmp" level="debug"/>
+    </springProfile> -->
+
+    <root level="info">
+        <appender-ref ref="CONSOLE" />
+        <appender-ref ref="DEBUG_FILE" />
+        <appender-ref ref="INFO_FILE" />
+        <appender-ref ref="WARN_FILE" />
+        <appender-ref ref="ERROR_FILE" />
+    </root>
+
+    <!-- 4.2 生产环境:输出到文档
+    <springProfile name="pro">
+        <root level="info">
+            <appender-ref ref="CONSOLE" />
+            <appender-ref ref="DEBUG_FILE" />
+            <appender-ref ref="INFO_FILE" />
+            <appender-ref ref="ERROR_FILE" />
+            <appender-ref ref="WARN_FILE" />
+        </root>
+    </springProfile> -->
+
+</configuration>