宿州改动

ds-bozhou
luyya 2025-06-05 17:39:46 +08:00
parent 0ff37e767e
commit 0d1218f655
306 changed files with 583 additions and 672 deletions

View File

@ -376,7 +376,7 @@
<modules> <modules>
<module>stwzhj-auth</module> <module>stwzhj-auth</module>
<module>stwzhj-gateway</module> <module>wzhj-gateway</module>
<module>stwzhj-visual</module> <module>stwzhj-visual</module>
<module>stwzhj-modules</module> <module>stwzhj-modules</module>
<module>stwzhj-api</module> <module>stwzhj-api</module>

View File

@ -2,6 +2,7 @@
<configuration scan="true" scanPeriod="60 seconds" debug="false"> <configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 --> <!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" /> <property name="log.path" value="logs/${project.artifactId}" />
<property name="log.file" value="${project.artifactId}" />
<!-- 日志输出格式 --> <!-- 日志输出格式 -->
<property name="console.log.pattern" <property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/> value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
@ -37,13 +38,13 @@
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天 如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。 的日志改名为今天的日期。即,<File> 的日志都是当天的。
--> -->
<File>${LOG_PATH}${LOG_FILE}</File> <File>${log.path}${log.file}</File>
<encoder> <encoder>
<charset>UTF-8</charset> <charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern> <pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder> </encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern> <fileNamePattern>${log.path}info/${log.file}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize> <maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 --> <maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy> </rollingPolicy>
@ -59,11 +60,11 @@
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天 如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。 的日志改名为今天的日期。即,<File> 的日志都是当天的。
--> -->
<File>${LOG_PATH}error.${LOG_FILE}</File> <File>${log.path}error.${log.file}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy--> <!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy "> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间--> <!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern> <FileNamePattern>${log.path}error/${log.file}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志--> <!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize> <maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory> <maxHistory>180</maxHistory>

View File

@ -9,6 +9,7 @@ import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.socket.WebSocketHandler; import org.springframework.web.socket.WebSocketHandler;
import org.springframework.web.socket.config.annotation.EnableWebSocket; import org.springframework.web.socket.config.annotation.EnableWebSocket;
import org.springframework.web.socket.config.annotation.WebSocketConfigurer; import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
@ -19,7 +20,7 @@ import org.springframework.web.socket.server.HandshakeInterceptor;
* *
* @author zendwang * @author zendwang
*/ */
@AutoConfiguration @Configuration
@ConditionalOnProperty(value = "websocket.enabled", havingValue = "true") @ConditionalOnProperty(value = "websocket.enabled", havingValue = "true")
@EnableConfigurationProperties(WebSocketProperties.class) @EnableConfigurationProperties(WebSocketProperties.class)
@EnableWebSocket @EnableWebSocket

View File

@ -1,86 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt;
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
</root>
</configuration>

View File

@ -9,18 +9,18 @@
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<modules> <modules>
<module>stwzhj-system</module> <module>wzhj-system</module>
<module>stwzhj-gen</module> <module>stwzhj-gen</module>
<module>stwzhj-job</module> <module>stwzhj-job</module>
<module>stwzhj-resource</module> <module>stwzhj-resource</module>
<module>stwzhj-workflow</module> <module>stwzhj-workflow</module>
<module>stwzhj-data2es</module> <module>wzhj-data2es</module>
<module>stwzhj-baseToSt</module> <module>stwzhj-baseToSt</module>
<module>stwzhj-consumer</module> <module>stwzhj-consumer</module>
<module>stwzhj-location</module> <module>wzhj-location</module>
<module>stwzhj-dataToGas</module> <module>stwzhj-dataToGas</module>
<module>stwzhj-kafkaToWebsocket</module> <module>stwzhj-kafkaToWebsocket</module>
<module>wzhj-webscoket</module> <module>wzhj-websocket</module>
<module>wzhj-extract</module> <module>wzhj-extract</module>
<module>wzhj-udp</module> <module>wzhj-udp</module>
</modules> </modules>

View File

@ -1,86 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false"> <configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 --> <!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" /> <property name="log.path" value="logs" />
<!-- 日志输出格式 --> <property name="log.file" value="consumer" />
<property name="console.log.pattern" <property name="MAX_FILE_SIZE" value="10MB" />
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/> <property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt; <!-- INFO日志Appender -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<encoder> <file>${log.path}/info.${log.file}.log</file>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter"> <filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error--> <level>INFO</level>
<level>ERROR</level> <onMatch>ACCEPT</onMatch>
<!--匹配到就禁止--> <onMismatch>DENY</onMismatch>
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter> </filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern> <fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize> <maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 --> <maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy> </rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder> <encoder>
<charset>UTF-8</charset> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder> </encoder>
</appender> </appender>
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO"> <root level="INFO">
<appender-ref ref="CONSOLE"/> <appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE"/> <appender-ref ref="FILE_ERROR" />
<appender-ref ref="ERRORFILE"/>
</root> </root>
</configuration> </configuration>

View File

@ -1,86 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt;
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
</root>
</configuration>

View File

@ -1,28 +1,106 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false"> <configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 --> <!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/> <property name="log.path" value="logs/${project.artifactId}" />
<property name="log.file" value="${project.artifactId}" />
<!-- 日志输出格式 --> <!-- 日志输出格式 -->
<property name="console.log.pattern" <property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/> value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<!-- 控制台输出 --> <!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender"> <appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder> <encoder>
<pattern>${console.log.pattern}</pattern> <pattern>${log.pattern}</pattern>
<charset>utf-8</charset> <charset>utf-8</charset>
</encoder> </encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${log.path}${log.file}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}info/${log.file}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${log.path}error.${log.file}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${log.path}error/${log.file}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender> </appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" /> <include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 --> <!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" /> <include resource="logback-skylog.xml" />
<!--系统操作日志--> <root level="INFO">
<root level="info">
<appender-ref ref="console"/> <appender-ref ref="console"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
<appender-ref ref="file_console"/>
</root> </root>
</configuration> </configuration>

View File

@ -1,86 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt;
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
</root>
</configuration>

View File

@ -9,10 +9,10 @@
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-data2es</artifactId> <artifactId>wzhj-data2es</artifactId>
<description> <description>
stwzhj-data2es位置汇聚数据处理 wzhj-data2es位置汇聚数据处理
</description> </description>
<dependencies> <dependencies>

View File

@ -68,7 +68,7 @@ public class ElasticsearchConfig {
RestClientBuilder builder = RestClient.builder(httpHost); RestClientBuilder builder = RestClient.builder(httpHost);
// 设置用户名、密码 // 设置用户名、密码
CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
// credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password)); credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password));
// 连接延时配置 // 连接延时配置
builder.setRequestConfigCallback(requestConfigBuilder -> { builder.setRequestConfigCallback(requestConfigBuilder -> {
requestConfigBuilder.setConnectTimeout(connectTimeOut); requestConfigBuilder.setConnectTimeout(connectTimeOut);

View File

@ -27,7 +27,7 @@ public class KafkaConfig {
// private String kafkaServers = "140.168.2.31:21007,140.168.2.32:21007,140.168.2.33:21007"; // private String kafkaServers = "140.168.2.31:21007,140.168.2.32:21007,140.168.2.33:21007";
// private String kafkaServers = "53.208.61.105:6667,53.208.61.106:6667,53.208.61.107:6667";//六安GA网 // private String kafkaServers = "53.208.61.105:6667,53.208.61.106:6667,53.208.61.107:6667";//六安GA网
// private String kafkaServers = "34.72.62.93:9092";//六安视频网 // private String kafkaServers = "34.72.62.93:9092";//六安视频网
private String kafkaServers = "127.0.0.1:9092";//本地 private String kafkaServers = "53.176.146.99:9092,53.176.146.100:9092,53.176.146.101:9092";//本地
// private String kafkaServers = "53.238.79.4:9092,53.238.79.5:9092,53.238.79.6:9092";//省厅 马伟提供 // private String kafkaServers = "53.238.79.4:9092,53.238.79.5:9092,53.238.79.6:9092";//省厅 马伟提供
private String groupId = "ruansiProducer"; private String groupId = "ruansiProducer";

View File

@ -110,11 +110,12 @@ public class GpsServiceImpl implements IGpsService {
List<String> deleteKeys = new ArrayList<>(); List<String> deleteKeys = new ArrayList<>();
BulkRequest bulkRequest = new BulkRequest(); BulkRequest bulkRequest = new BulkRequest();
for (EsGpsInfoVO2 info : list) { for (EsGpsInfoVO2 info : list) {
String deviceType = info.getDeviceType();
String deviceCode = info.getDeviceCode();
//设置地市zzjgdm //设置地市zzjgdm
info = getInfo(info); info = getInfo(info);
if (Objects.isNull(info)) { if (Objects.isNull(info)) {
logger.error("redis中的Object=nulldeviceType={},deviceCode={}",info.getDeviceType(),info.getDeviceCode()); logger.error("redis或者mysql中的Object=nulldeviceType={},deviceCode={}",deviceType,deviceCode);
continue; continue;
} }
//redis //redis
@ -144,6 +145,9 @@ public class GpsServiceImpl implements IGpsService {
public R saveData(EsGpsInfoVO2 info) throws ExecutionException, InterruptedException{ public R saveData(EsGpsInfoVO2 info) throws ExecutionException, InterruptedException{
//设置地市zzjgdm //设置地市zzjgdm
info = getInfo(info); info = getInfo(info);
if (null == info){
return R.fail(-1,"保存失败,暂无基础信息");
}
IndexRequest indexRequest = buildEsIndexRequest(info); IndexRequest indexRequest = buildEsIndexRequest(info);
//存es //存es
CompletableFuture<EsGpsInfo> esFuture = doRequest(info); CompletableFuture<EsGpsInfo> esFuture = doRequest(info);

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs" />
<property name="log.file" value="data2es" />
<property name="MAX_FILE_SIZE" value="10MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<!-- INFO日志Appender -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO">
<appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE_ERROR" />
</root>
</configuration>

View File

@ -1,86 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false"> <configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 --> <!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" /> <property name="log.path" value="logs" />
<property name="log.file" value="extract" />
<property name="MAX_FILE_SIZE" value="10MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 --> <!-- 日志输出格式 -->
<property name="console.log.pattern" <!-- INFO日志Appender -->
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/> <appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt;
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter"> <filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error--> <level>INFO</level>
<level>ERROR</level> <onMatch>ACCEPT</onMatch>
<!--匹配到就禁止--> <onMismatch>DENY</onMismatch>
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter> </filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy"> <rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern> <fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize> <maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 --> <maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy> </rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder> <encoder>
<charset>UTF-8</charset> <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder> </encoder>
</appender> </appender>
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO"> <root level="INFO">
<appender-ref ref="CONSOLE"/> <appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE"/> <appender-ref ref="FILE_ERROR" />
<appender-ref ref="ERRORFILE"/>
</root> </root>
</configuration> </configuration>

View File

@ -9,10 +9,10 @@
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-location</artifactId> <artifactId>wzhj-location</artifactId>
<description> <description>
stwzhj-location位置汇聚坐标点位历史轨迹 wzhj-location位置汇聚坐标点位历史轨迹
</description> </description>
<dependencies> <dependencies>

View File

@ -27,6 +27,7 @@ import redis.clients.jedis.resps.GeoRadiusResponse;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.*; import java.util.*;
import java.util.stream.Collectors;
@Configuration @Configuration
@RestController @RestController
@ -49,7 +50,6 @@ public class LocationController {
* */ * */
@PostMapping("/getAllLocation") @PostMapping("/getAllLocation")
public R getAllLocaltion(@RequestBody Map<String,Object> params){ public R getAllLocaltion(@RequestBody Map<String,Object> params){
String now = DateUtil.format(new Date(),"YYYY-MM-dd");
String keys = "online_users:"; String keys = "online_users:";
String key = null; // 在不同条件下赋值 最后根据此key取值 String key = null; // 在不同条件下赋值 最后根据此key取值
if(CollectionUtils.isEmpty(params)){ if(CollectionUtils.isEmpty(params)){
@ -62,9 +62,9 @@ public class LocationController {
} }
if (null != params.get("type")){ if (null != params.get("type")){
String type = params.get("type").toString(); String type = params.get("type").toString();
key = keys + "*:[" +type+"]:*"; key = keys + "[" +type+"]:*";
if (null != params.get("deptId")){ if (null != params.get("deptId")){
key = keys + params.get("deptId").toString() + "*:["+type+"]:*"; // key值为 online_users:2022-04-20:3401*:[01,02]:* key = keys + params.get("deptId").toString() + ":["+type+"]:*"; // key值为 online_users:2022-04-20:3401*:[01,02]:*
} }
}else { }else {
@ -98,33 +98,34 @@ public class LocationController {
times = params.get("times").toString(); times = params.get("times").toString();
} }
String zzjgdms = ""; String zzjgdms = "";
List<Object> dlist = new ArrayList<>(); List<JSONObject> dlist = new ArrayList<>();
if (null != params.get("type")){ //类型不为空时 查询Redis数据 if (null != params.get("type")){ //类型不为空时 查询Redis数据
String type = params.get("type").toString(); String type = params.get("type").toString();
// String[] types = Convert.toStrArray(type); // String[] types = Convert.toStrArray(type);
String key = keys + "["+type+"]:*"; // key值为 online_users:[01,02]:*
zzjgdms = params.get("zzjgdm").toString(); zzjgdms = params.get("zzjgdm").toString();
String[] zzjgdm = Convert.toStrArray(zzjgdms); String[] zzjgdm = Convert.toStrArray(zzjgdms);
for (String s : zzjgdm) { Set<String> targetSet = new HashSet<>(List.of(zzjgdm)); // 转为HashSet提高查询效率
s = deptIdSub(s); List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key);
String key = keys + s + ":["+type+"]:*"; // key值为 online_users:3401xxxx:[01,02]:* list.removeAll(Collections.singleton(null));
List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key); List<JSONObject> matchedObjects = list.stream()
list.removeAll(Collections.singleton(null)); .filter(obj -> targetSet.contains(obj.getStr("zzjgdm")))
dlist.addAll(list); .collect(Collectors.toList()); //根据前端勾选机构和全部的redis记录匹配是否包含 这样减少多次查询和查库
} dlist.addAll(matchedObjects);
}else { }else {
String key = keys + "*"; // key值为 online_users:*
zzjgdms = params.get("zzjgdm").toString(); zzjgdms = params.get("zzjgdm").toString();
String[] zzjgdm = Convert.toStrArray(zzjgdms); String[] zzjgdm = Convert.toStrArray(zzjgdms);
for (String s : zzjgdm) { Set<String> targetSet = new HashSet<>(List.of(zzjgdm)); // 转为HashSet提高查询效率
s = deptIdSub(s); List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key);
String key = keys + s + ":*"; list.removeAll(Collections.singleton(null));
List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key); List<JSONObject> matchedObjects = list.stream()
list.removeAll(Collections.singleton(null)); .filter(obj -> targetSet.contains(obj.getStr("zzjgdm")))
dlist.addAll(list); .collect(Collectors.toList()); //根据前端勾选机构和全部的redis记录匹配是否包含 这样减少多次查询和查库
} dlist.addAll(matchedObjects);
} }
JSONArray.toJSONString(dlist); JSONArray.toJSONString(dlist);
if ("1".equals(times)){ if ("1".equals(times)){ List<Object> nearList = new ArrayList<>();
List<Object> nearList = new ArrayList<>();
for (Object o : dlist) { for (Object o : dlist) {
JSONObject object = JSONUtil.parseObj(o); JSONObject object = JSONUtil.parseObj(o);
Integer online = object.getInt("online"); Integer online = object.getInt("online");

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs" />
<property name="log.file" value="location" />
<property name="MAX_FILE_SIZE" value="10MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<!-- INFO日志Appender -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO">
<appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE_ERROR" />
</root>
</configuration>

View File

@ -9,10 +9,10 @@
</parent> </parent>
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-system</artifactId> <artifactId>wzhj-system</artifactId>
<description> <description>
stwzhj-system系统模块 wzhj-system系统模块
</description> </description>
<dependencies> <dependencies>

View File

@ -1,7 +1,8 @@
package org.dromara.system.controller; package org.dromara.system.controller.system;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Objects;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
@ -11,6 +12,7 @@ import org.dromara.common.excel.core.ExcelResult;
import org.dromara.system.domain.vo.SysUserImportVo; import org.dromara.system.domain.vo.SysUserImportVo;
import org.dromara.system.domain.vo.TDeviceExportVo; import org.dromara.system.domain.vo.TDeviceExportVo;
import org.dromara.system.domain.vo.TDeviceImportVo; import org.dromara.system.domain.vo.TDeviceImportVo;
import org.dromara.system.exception.MyBusinessException;
import org.dromara.system.listener.SysUserImportListener; import org.dromara.system.listener.SysUserImportListener;
import org.dromara.system.listener.TDeviceImportListener; import org.dromara.system.listener.TDeviceImportListener;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
@ -55,6 +57,17 @@ public class TDeviceController extends BaseController {
return tDeviceService.queryPageList(bo, pageQuery); return tDeviceService.queryPageList(bo, pageQuery);
} }
@PostMapping("/getDeviceList")
public TableDataInfo<TDeviceVo> getDeviceList(@RequestBody TDeviceBo bo){
PageQuery pageQuery = new PageQuery();
if (Objects.isNull(bo)) {
throw new MyBusinessException("请求参数不能为空");
}
pageQuery.setPageNum(bo.getPageNum());
pageQuery.setPageSize(bo.getPageSize());
return tDeviceService.queryPageList(bo, pageQuery);
}
/** /**
* device * device
*/ */

Some files were not shown because too many files have changed in this diff Show More