Compare commits

..

29 Commits

Author SHA1 Message Date
luyya 3be8f33ba4 宣城kafka添加认证 2026-03-24 18:24:01 +08:00
luyya 359b92a7f1 宣城位置汇聚处理带字母机构 2026-02-02 09:58:13 +08:00
luyya 81e9cdfdc1 宣城位置汇聚本地调试修改 2025-10-30 11:30:34 +08:00
luyya 7e6d204b07 宣城位置汇聚周边查询 2025-10-25 15:23:54 +08:00
luyya 13d69aed79 宣城用户添加身份证显示和新增功能 2025-09-16 17:10:39 +08:00
luyya 634b80fb8f 宣城修改县分局登录在线数不对和图上数据全部展示问题 2025-09-11 17:47:23 +08:00
luyya 57f6441457 宣城位置汇聚记录仪同步海康仅新增设备去对照表更新单位 2025-09-05 16:57:40 +08:00
luyya ac992639ed 宣城导入修改设备设置valid为1 2025-08-05 18:43:29 +08:00
luyya 588bb41537 宣城位置汇聚redis存入没有devicename字段处理 2025-07-31 17:11:05 +08:00
luyya fe00bd7311 宣城导入错误问题和redis切换主从监听失效问题处理 2025-07-17 17:07:45 +08:00
luyya 3fd5bc2f17 宣城培训会BUG修改 2025-07-16 20:02:55 +08:00
luyya 8c53620702 宣城位置汇聚 2025-07-07 10:39:57 +08:00
luyya 13f78c74b0 宣城汇聚redis过期监听修改以及新添device_name 2025-07-04 14:55:45 +08:00
luyya 13f517251b 宣城汇聚0703改动 2025-07-04 09:27:32 +08:00
luyya 9b87b40361 宣城汇聚0703改动 2025-07-03 16:37:03 +08:00
luyya 69900614a5 宣城修改 2025-06-27 16:43:42 +08:00
luyya 7553cd9d56 cosumer和websocket修改 2025-06-27 11:05:53 +08:00
luyya 86bf08da8c 把在宿州分支改的宣城变动同步到宣城 2025-06-27 10:24:12 +08:00
luyya 2c65144b04 宣城同步宿州历史轨迹查询不到数据问题 2025-06-12 09:58:51 +08:00
luyya 2f1536afd9 宣城同步宿州的一些改动 2025-06-11 10:07:38 +08:00
luyya b9e405d599 宣城修改module名 2025-06-06 09:43:25 +08:00
luyya 5b9e36dc3f 宣城修改module名 2025-06-05 18:11:09 +08:00
luyya caf03c1efb 宣城修改module名 2025-06-05 17:52:16 +08:00
luyya d04bc848ea 添加监听服务异常发送短信功能(待短信服务提供) 2025-05-12 09:36:53 +08:00
luyya fbdae27daf 添加监听服务异常发送短信功能(待短信服务提供) 2025-05-10 14:34:03 +08:00
luyya f9243af818 宣城新版位置汇聚修改 2025-04-28 10:36:44 +08:00
luyya 65a6e03c1d 宣城新版位置汇聚修改 2025-04-27 09:46:59 +08:00
luyya 86666b8a04 宣城新版位置汇聚 2025-03-25 11:57:14 +08:00
luyya 692c2a31bd 宣城新版位置汇聚 2025-03-03 11:00:15 +08:00
413 changed files with 9698 additions and 2657 deletions

View File

@ -89,12 +89,12 @@
<id>prod</id>
<properties>
<profiles.active>prod</profiles.active>
<nacos.server>127.0.0.1:8848</nacos.server>
<nacos.server>53.238.79.33:8848</nacos.server>
<nacos.discovery.group>DEFAULT_GROUP</nacos.discovery.group>
<nacos.config.group>DEFAULT_GROUP</nacos.config.group>
<nacos.username>nacos</nacos.username>
<nacos.password>nacos</nacos.password>
<logstash.address>127.0.0.1:4560</logstash.address>
<nacos.password>Ycgis!2509</nacos.password>
<logstash.address>53.238.79.33:4560</logstash.address>
</properties>
</profile>
</profiles>
@ -376,7 +376,7 @@
<modules>
<module>stwzhj-auth</module>
<module>stwzhj-gateway</module>
<module>wzhj-gateway</module>
<module>stwzhj-visual</module>
<module>stwzhj-modules</module>
<module>stwzhj-api</module>

View File

@ -14,6 +14,7 @@
<module>stwzhj-api-resource</module>
<module>stwzhj-api-workflow</module>
<module>stwzhj-api-data2es</module>
<module>stwzhj-api-location</module>
</modules>
<artifactId>stwzhj-api</artifactId>

View File

@ -47,6 +47,12 @@
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api-location</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@ -4,8 +4,15 @@ import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import java.util.List;
import java.util.concurrent.ExecutionException;
public interface RemoteDataToEsService {
R saveDataBatch(List<RemoteGpsInfo> gpsInfoList);
R saveData(RemoteGpsInfo gpsInfo) throws Exception;
R updateOnlineStatusBatch(List<RemoteGpsInfo> gpsInfoList);
R updateOnlineStatus(RemoteGpsInfo gpsInfo);
}

View File

@ -37,6 +37,7 @@ public class RemoteGpsInfo implements Serializable {
private String policeNo;
private String policeName;
private String phoneNum;
private String deviceName;
private String carNum;
private Integer online;

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api</artifactId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-api-location</artifactId>
<description>
stwzhj-api-location
</description>
<dependencies>
<!-- stwzhj Common Core-->
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-core</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-excel</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,8 @@
package org.dromara.location.api;
import java.util.List;
public interface RemoteElasticSearchService {
List<String> linstenDataStatus();
}

View File

@ -1,5 +1,10 @@
package org.dromara.system.api;
import org.dromara.system.api.domain.bo.RemoteDeptBo;
import org.dromara.system.api.domain.vo.RemoteDeptVo;
import java.util.List;
/**
*
*
@ -15,4 +20,6 @@ public interface RemoteDeptService {
*/
String selectDeptNameByIds(String deptIds);
List<RemoteDeptVo> selectDept(RemoteDeptBo bo);
}

View File

@ -0,0 +1,70 @@
package org.dromara.system.api.domain.bo;
import io.github.linpeilie.annotations.AutoMapper;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* sys_dept
*
* @author Michelle.Chung
*/
@Data
@NoArgsConstructor
public class RemoteDeptBo implements Serializable {
/**
* id
*/
private String deptId;
/**
* ID
*/
private String parentId;
/**
*
*/
private String deptName;
/**
*
*/
private String deptCategory;
/**
*
*/
private Integer orderNum;
/**
*
*/
private Long leader;
/**
*
*/
private String phone;
/**
*
*/
private String email;
/**
* 0 1
*/
private String status;
private String fullName;
}

View File

@ -83,6 +83,12 @@ public class RemoteDeviceBo implements Serializable {
*/
private String remark1;
private String createTime;
private String updateTime;
private String[] zzjgdms;
/**
* 2
*/

View File

@ -0,0 +1,96 @@
package org.dromara.system.api.domain.vo;
import com.alibaba.excel.annotation.ExcelIgnoreUnannotated;
import com.alibaba.excel.annotation.ExcelProperty;
import io.github.linpeilie.annotations.AutoMapper;
import lombok.Data;
import org.dromara.common.excel.annotation.ExcelDictFormat;
import org.dromara.common.excel.convert.ExcelDictConvert;
import java.io.Serial;
import java.io.Serializable;
import java.util.Date;
/**
* sys_dept
*
* @author Michelle.Chung
*/
@Data
public class RemoteDeptVo implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
/**
* id
*/
private String deptId;
/**
* id
*/
private String parentId;
/**
*
*/
private String parentName;
/**
*
*/
private String ancestors;
/**
*
*/
private String deptName;
/**
*
*/
private String deptCategory;
/**
*
*/
private Integer orderNum;
/**
* ID
*/
private Long leader;
/**
*
*/
private String leaderName;
/**
*
*/
private String phone;
/**
*
*/
private String email;
/**
* 0 1
*/
private String status;
/**
*
*/
private Date createTime;
private Integer allCount;
private Integer onlineCount;
private String fullName;
}

View File

@ -70,6 +70,8 @@ public class RemoteDeviceVo implements Serializable {
private String cardNum;
private String deviceName;
/**
* 01
*/

View File

@ -130,6 +130,8 @@ public class LoginUser implements Serializable {
*/
private String deviceType;
private String manageDeptId;
/**
* id
*/

View File

@ -6,7 +6,7 @@ server:
spring:
application:
# 应用名称
name: stwzhj-auth
name: wzhj-auth
profiles:
# 环境配置
active: @profiles.active@

View File

@ -1,28 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<property name="log.path" value="logs" />
<property name="log.file" value="auth" />
<property name="MAX_FILE_SIZE" value="30MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<!-- INFO日志Appender -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<include resource="logback-common.xml" />
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO">
<appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE_ERROR" />
</root>
</configuration>

View File

@ -21,8 +21,12 @@ public class RedisConstants {
public static final long REDIS_ONLINE_USER_NEVER_EXPIRE = -1;
public static final long REDIS_NEVER_EXPIRE = 0L;
public static final long FIVE_MINUTES_REDIS_ONLINE_USER_EXPIRE_TIME = 60 * 5;
public static final String ONLINE_USERS_TEN = "ten:online_users:";
public static String getUserTokenKey(String token) {
return CCL_CODING_SSO_TOKEN + token;

View File

@ -49,10 +49,15 @@ import static org.apache.dubbo.metadata.report.support.Constants.DEFAULT_METADAT
public class RedisMetadataReport extends AbstractMetadataReport {
private static final String REDIS_DATABASE_KEY = "database";
private static final String SENTINEL_KEY = "sentinel";
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(RedisMetadataReport.class);
// protected , for test
protected JedisPool pool;
protected JedisSentinelPool sentinelPool;
private Set<HostAndPort> jedisClusterNodes;
private int timeout;
private String password;
@ -75,6 +80,14 @@ public class RedisMetadataReport extends AbstractMetadataReport {
for (URL tmpUrl : urls) {
jedisClusterNodes.add(new HostAndPort(tmpUrl.getHost(), tmpUrl.getPort()));
}
} else if (url.getParameter(SENTINEL_KEY,false)) {
Set<String> sentinels = new HashSet<>();
List<URL> urls = url.getBackupUrls();
for (URL tmpUrl : urls) {
sentinels.add(tmpUrl.getHost()+":"+ tmpUrl.getPort());
}
int database = url.getParameter(REDIS_DATABASE_KEY, 0);
sentinelPool = new JedisSentinelPool("mymaster",sentinels ,new GenericObjectPoolConfig<>(), timeout, password, database);
} else {
int database = url.getParameter(REDIS_DATABASE_KEY, 0);
pool = new JedisPool(new JedisPoolConfig(), url.getHost(), url.getPort(), timeout, password, database);
@ -128,11 +141,25 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private void storeMetadata(BaseMetadataIdentifier metadataIdentifier, String v) {
if (pool != null) {
storeMetadataStandalone(metadataIdentifier, v);
}else if(sentinelPool != null) {
storeMetadataInSentinel(metadataIdentifier, v);
} else {
storeMetadataInCluster(metadataIdentifier, v);
}
}
private void storeMetadataInSentinel(BaseMetadataIdentifier metadataIdentifier, String v) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.set(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY), v, jedisParams);
} catch (Throwable e) {
String msg =
"Failed to put " + metadataIdentifier + " to redis cluster " + v + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private void storeMetadataInCluster(BaseMetadataIdentifier metadataIdentifier, String v) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -158,11 +185,24 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private void deleteMetadata(BaseMetadataIdentifier metadataIdentifier) {
if (pool != null) {
deleteMetadataStandalone(metadataIdentifier);
}else if(sentinelPool != null) {
deleteMetadataSentinel(metadataIdentifier);
} else {
deleteMetadataInCluster(metadataIdentifier);
}
}
private void deleteMetadataSentinel(BaseMetadataIdentifier metadataIdentifier) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.del(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY));
} catch (Throwable e) {
String msg = "Failed to delete " + metadataIdentifier + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private void deleteMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -187,11 +227,24 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private String getMetadata(BaseMetadataIdentifier metadataIdentifier) {
if (pool != null) {
return getMetadataStandalone(metadataIdentifier);
}else if(sentinelPool != null) {
return getMetadataSentinel(metadataIdentifier);
} else {
return getMetadataInCluster(metadataIdentifier);
}
}
private String getMetadataSentinel(BaseMetadataIdentifier metadataIdentifier) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
return jedisSentinel.get(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY));
} catch (Throwable e) {
String msg = "Failed to get " + metadataIdentifier + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private String getMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -243,6 +296,8 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private boolean storeMapping(String key, String field, String value, String ticket) {
if (pool != null) {
return storeMappingStandalone(key, field, value, ticket);
}else if(sentinelPool != null) {
return storeMappingSentinel(key, field, value, ticket);
} else {
return storeMappingInCluster(key, field, value, ticket);
}
@ -278,6 +333,33 @@ public class RedisMetadataReport extends AbstractMetadataReport {
return false;
}
/**
* use 'watch' to implement cas.
* Find information about slot distribution by key.
*/
private boolean storeMappingSentinel(String key, String field, String value, String ticket) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.watch(key);
String oldValue = jedisSentinel.hget(key, field);
if (null == oldValue || null == ticket || oldValue.equals(ticket)) {
Transaction transaction = jedisSentinel.multi();
transaction.hset(key, field, value);
List<Object> result = transaction.exec();
if (null != result) {
jedisSentinel.publish(buildPubSubKey(), field);
return true;
}
}
jedisSentinel.unwatch();
} catch (Throwable e) {
String msg = "Failed to put " + key + ":" + field + " to redis " + value + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
return false;
}
/**
* use 'watch' to implement cas.
* Find information about slot distribution by key.
@ -339,6 +421,8 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private String getMappingData(String key, String field) {
if (pool != null) {
return getMappingDataStandalone(key, field);
}else if(sentinelPool != null) {
return getMappingDataSentinel(key, field);
} else {
return getMappingDataInCluster(key, field);
}
@ -355,6 +439,17 @@ public class RedisMetadataReport extends AbstractMetadataReport {
}
}
private String getMappingDataSentinel(String key, String field) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
return jedisSentinel.hget(key, field);
} catch (Throwable e) {
String msg = "Failed to get " + key + ":" + field + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private String getMappingDataStandalone(String key, String field) {
try (Jedis jedis = pool.getResource()) {
return jedis.hget(key, field);
@ -502,6 +597,14 @@ public class RedisMetadataReport extends AbstractMetadataReport {
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
} else if (sentinelPool != null) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.subscribe(notifySub, path);
} catch (Throwable e) {
String msg = "Failed to subscribe " + path + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
} else {
try (JedisCluster jedisCluster = new JedisCluster(
jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {

View File

@ -23,15 +23,27 @@ dubbo:
address: redis://${spring.data.redis.host}:${spring.data.redis.port}
group: DUBBO_GROUP
username: dubbo
password: ${spring.data.redis.password}
password: ruoyi123
# 集群开关
cluster: false
sentinel: true
parameters:
namespace: ${spring.profiles.active}
database: ${spring.data.redis.database}
timeout: ${spring.data.redis.timeout}
# 集群地址 cluster 为 true 生效
backup: 127.0.0.1:6379,127.0.0.1:6381
backup: 53.238.79.33:26380,53.238.79.34:26380,53.238.79.35:26380
# metadata-report:
# address: redis://${spring.data.redis.host}:${spring.data.redis.port}
# group: DUBBO_GROUP
# username: dubbo
# password: ${spring.data.redis.password}
# # 集群开关
# cluster: false
# parameters:
# namespace: ${spring.profiles.active}
# database: ${spring.data.redis.database}
# timeout: ${spring.data.redis.timeout}
# # 集群地址 cluster 为 true 生效
# backup: 127.0.0.1:6379,127.0.0.1:6381
# 消费者相关配置
consumer:
# 结果缓存(LRU算法)
@ -43,3 +55,12 @@ dubbo:
retries: 0
# 初始化检查
check: false
logging:
level:
# 设置 Dubbo 核心包的日志级别为 DEBUG
org.apache.dubbo: DEBUG
# 如果需要更细粒度的调试,可指定元数据报告模块
org.apache.dubbo.metadata: DEBUG
# Redis 客户端日志(可选)
io.lettuce.core: WARN # 避免 Redis 连接日志过多

View File

@ -38,12 +38,12 @@ public enum DataScopeType {
/**
*
*/
DEPT("3", " #{#deptName} = #{#user.deptId} ", " 1 = 0 "),
DEPT("3", " #{#deptName} = #{#user.manageDeptId} ", " 1 = 0 "),
/**
*
*/
DEPT_AND_CHILD("4", " #{#deptName} IN ( #{@sdss.getDeptAndChild( #user.deptId )} )", " 1 = 0 "),
DEPT_AND_CHILD("4", " #{#deptName} IN ( #{@sdss.getDeptAndChild( #user.manageDeptId )} )", " 1 = 0 "),
/**
*

View File

@ -25,6 +25,7 @@ import org.springframework.core.task.VirtualThreadTaskExecutor;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Objects;
import java.util.TimeZone;
/**
@ -94,6 +95,22 @@ public class RedisConfiguration {
.setReadMode(clusterServersConfig.getReadMode())
.setSubscriptionMode(clusterServersConfig.getSubscriptionMode());
}
// 哨兵模式
RedissonProperties.Sentinel sentinel = redissonProperties.getSentinel();
if (Objects.nonNull(sentinel)) {
config.useSentinelServers()
.setNameMapper(new KeyPrefixHandler(redissonProperties.getKeyPrefix()))
.setTimeout(sentinel.getTimeout())
.setClientName(sentinel.getClientName())
.setIdleConnectionTimeout(sentinel.getIdleConnectionTimeout())
.setSubscriptionConnectionPoolSize(sentinel.getSubscriptionConnectionPoolSize())
.setMasterConnectionMinimumIdleSize(sentinel.getMasterConnectionMinimumIdleSize())
.setMasterConnectionPoolSize(sentinel.getMasterConnectionPoolSize())
.setSlaveConnectionMinimumIdleSize(sentinel.getSlaveConnectionMinimumIdleSize())
.setSlaveConnectionPoolSize(sentinel.getSlaveConnectionPoolSize())
.setReadMode(sentinel.getReadMode())
.setSubscriptionMode(sentinel.getSubscriptionMode());
}
log.info("初始化 redis 配置");
};
}

View File

@ -40,6 +40,8 @@ public class RedissonProperties {
*/
private ClusterServersConfig clusterServersConfig;
private Sentinel sentinel;
@Data
@NoArgsConstructor
public static class SingleServerConfig {
@ -132,4 +134,60 @@ public class RedissonProperties {
}
@Data
@NoArgsConstructor
public static class Sentinel {
/**
*
*/
private String clientName;
/**
* master
*/
private int masterConnectionMinimumIdleSize;
/**
* master
*/
private int masterConnectionPoolSize;
/**
* slave
*/
private int slaveConnectionMinimumIdleSize;
/**
* slave
*/
private int slaveConnectionPoolSize;
/**
*
*/
private int idleConnectionTimeout;
/**
*
*/
private int timeout;
/**
*
*/
private int subscriptionConnectionPoolSize;
/**
*
*/
private ReadMode readMode;
/**
*
*/
private SubscriptionMode subscriptionMode;
}
}

View File

@ -1,9 +1,11 @@
package org.dromara.common.redis.utils;
import cn.hutool.core.date.DateUnit;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.core.utils.SpringUtils;
import org.redisson.api.*;
import org.springframework.dao.DataAccessException;
@ -345,6 +347,29 @@ public class RedisUtils {
return rSet.add(data);
}
public static <T> void set(final String key, String data,long time) {
if (time > 0){
CLIENT.getBucket(key).set(data, time, TimeUnit.SECONDS);
}else {
CLIENT.getBucket(key).set(data);
}
/*RSet<T> rSet = CLIENT.getSet(key);
if (time > 0){
rSet.expireAsync(time,TimeUnit.SECONDS);
}
return rSet.add(data);*/
}
public static <T> void del(final String key) {
CLIENT.getBucket(key).delete();
/*RSet<T> rSet = CLIENT.getSet(key);
if (time > 0){
rSet.expireAsync(time,TimeUnit.SECONDS);
}
return rSet.add(data);*/
}
/**
* Set
* <p>
@ -573,6 +598,73 @@ public class RedisUtils {
System.out.println("redis:"+list);
}
/**
* RMap
*
* @param data
* @param timeout
* @param timeUnit
*/
public static void batchPutWithExpire(Map<String, String> data, long timeout, TimeUnit timeUnit) {
// 创建 RBatch 实例
RBatch batch = CLIENT.createBatch();
// 获取 RMapAsync 对象
RMapAsync<Object, Object> mapAsync = batch.getMap("myMap");
// 批量操作:将多个数据添加到 map 中
for (Map.Entry<String, String> entry : data.entrySet()) {
mapAsync.putAsync(entry.getKey(), entry.getValue());
}
// 执行批量操作
batch.execute();
// 获取同步的 RMap 对象并设置过期时间
RMap<Object, Object> mapSync = CLIENT.getMap("myMap");
mapSync.expire(timeout, timeUnit);
}
/**
*
*
* @param data
*/
public static void batchPut(Map<String, String> data) {
// 创建 RBatch 实例
RBatch batch = CLIENT.createBatch();
// 获取 RMapAsync 对象
RMapAsync<Object, Object> mapAsync = batch.getMap("myMap");
// 批量操作:将多个数据添加到 map 中
for (Map.Entry<String, String> entry : data.entrySet()) {
mapAsync.putAsync(entry.getKey(), entry.getValue());
}
// 执行批量操作
batch.execute();
}
/**
* key
*
* @param key key
* @return
*/
public static JSONObject getData(String key) {
// 获取同步的 RMap 对象
RMap<Object, Object> map = CLIENT.getMap("myMap");
// 根据 key 获取数据
Object value = map.get(key);
if (null == value){
return null;
}
return JSONUtil.parseObj(value.toString());
}
/*
*
* */
@ -596,12 +688,42 @@ public class RedisUtils {
return list;
}
/**
* Redis keys key
*
* @param pattern "user:*"
* @return key value
*/
public static List<JSONObject> getMatchingKeysAndValues(String pattern) {
RKeys rKeys = CLIENT.getKeys();
Iterable<String> keysIterable = rKeys.getKeysByPattern(pattern); // 获取匹配的 key
// 获取匹配的键值对
RMap<String, String> map = CLIENT.getMap("myMap");
List<JSONObject> list = new ArrayList<>();
// RBatch batch = CLIENT.createBatch();
// 批量获取这些key的值
for (String key : keysIterable) {
String value = map.get(key); // 获取每个 key 对应的 value
if (null != value){
JSONObject jsonObject = JSONUtil.parseObj(value);
list.add(jsonObject);
}
}
return list;
}
/*
* keyRBucket
* */
public static JSONObject getBucket(String key){
RBucket<Object> bucket = CLIENT.getBucket(key);
Object value = bucket.get();
if (null == value){
return null;
}
return JSONUtil.parseObj(value.toString());
}
@ -623,12 +745,18 @@ public class RedisUtils {
/*
* GEO
* */
/*public static void batchGeoAdd(Map<String, GeoEntry> entryMap){
RGeo<RMap<String, String>> geo = CLIENT.getGeo("myGeo");
Map<String, GeoEntry> entries = new HashMap<>();
entries.put("place1", new GeoEntry(13.361389, 38.115556, "Palermo"));
entries.put("place2", new GeoEntry(15.087269, 37.502669, "Catania"));
geo.p(entries);
}*/
public static long geoAdd(Double lng,Double lat,String member){
RGeo<String> geo = CLIENT.getGeo(RedisConstants.ONLINE_USERS_GEO);
long count1 = geo.add(lng, lat, member);
return count1;
}
// 查询半径周边 米内的成员
public static List<String> nearByXYReadonly(double centerLon,double centerLat,double distance){
RGeo<String> geo = CLIENT.getGeo(RedisConstants.ONLINE_USERS_GEO);
List<String> members = geo.radius(centerLon, centerLat, distance, GeoUnit.METERS);
return members;
}
}

View File

@ -36,6 +36,8 @@ public class LoginHelper {
public static final String USER_NAME_KEY = "userName";
public static final String DEPT_KEY = "deptId";
public static final String DEPT_NAME_KEY = "deptName";
public static final String MANAGE_DEPT__KEY = "manageDeptId";
public static final String DEPT_CATEGORY_KEY = "deptCategory";
public static final String CLIENT_KEY = "clientid";
@ -53,6 +55,7 @@ public class LoginHelper {
.setExtra(USER_KEY, loginUser.getUserId())
.setExtra(USER_NAME_KEY, loginUser.getUsername())
.setExtra(DEPT_KEY, loginUser.getDeptId())
.setExtra(MANAGE_DEPT__KEY,loginUser.getManageDeptId())
.setExtra(DEPT_NAME_KEY, loginUser.getDeptName())
.setExtra(DEPT_CATEGORY_KEY, loginUser.getDeptCategory())
);

View File

@ -1,114 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<!-- 控制台输出 -->
<appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- info异步输出 -->
<appender name="async_info" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_info"/>
</appender>
<!-- error异步输出 -->
<appender name="async_error" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="async_info"/>
<appender-ref ref="async_error"/>
<appender-ref ref="file_console"/>
</root>
</configuration>

View File

@ -9,13 +9,19 @@
<modelVersion>4.0.0</modelVersion>
<modules>
<module>stwzhj-system</module>
<module>wzhj-system</module>
<module>stwzhj-gen</module>
<module>stwzhj-job</module>
<module>stwzhj-resource</module>
<module>stwzhj-workflow</module>
<module>stwzhj-data2es</module>
<module>wzhj-data2es</module>
<module>stwzhj-baseToSt</module>
<module>wzhj-consumer</module>
<module>wzhj-location</module>
<module>stwzhj-dataToGas</module>
<module>wzhj-websocket</module>
<module>wzhj-extract</module>
<module>wzhj-udp</module>
</modules>
<artifactId>stwzhj-modules</artifactId>

View File

@ -1,34 +0,0 @@
package org.dromara.kafka.consumer;
import com.ruansee.redis.JedisConfig;
import com.ruansee.redis.RedisConfig;
import com.ruansee.redis.RedisUtil;
import com.ruansee.redis.RedissionLockUtil;
import org.dromara.kafka.consumer.config.KafkaPropertiesConfig;
import org.redisson.spring.starter.RedissonAutoConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisReactiveAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisRepositoriesAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:12
*/
@SpringBootApplication
@EnableAsync
@EnableConfigurationProperties({KafkaPropertiesConfig.class})
@ServletComponentScan
public class KafkaConsumerApplication {
public static void main(String[] args){
SpringApplication.run(KafkaConsumerApplication.class,args);
}
}

View File

@ -1,136 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
public final class KafkaProperties
{
private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class);
// Topic名称安全模式下需要以管理员用户添加当前用户的访问权限
public final static String TOPIC = "t_gps_realtime";
private static Properties serverProps = new Properties();
private static Properties producerProps = new Properties();
private static Properties consumerProps = new Properties();
private static Properties clientProps = new Properties();
private static KafkaProperties instance = null;
private KafkaProperties()
{
String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
try
{
File proFile = new File(filePath + "producer.properties");
if (proFile.exists())
{
producerProps.load(new FileInputStream(filePath + "producer.properties"));
}
File conFile = new File(filePath + "producer.properties");
if (conFile.exists())
{
consumerProps.load(new FileInputStream(filePath + "consumer.properties"));
}
File serFile = new File(filePath + "server.properties");
if (serFile.exists())
{
serverProps.load(new FileInputStream(filePath + "server.properties"));
}
File cliFile = new File(filePath + "client.properties");
if (cliFile.exists())
{
clientProps.load(new FileInputStream(filePath + "client.properties"));
}
}
catch (IOException e)
{
LOG.info("The Exception occured.", e);
}
}
public synchronized static KafkaProperties getInstance()
{
if (null == instance)
{
instance = new KafkaProperties();
}
return instance;
}
/**
*
* @param key properiteskey
* @param defValue
* @return
*/
public String getValues(String key, String defValue)
{
String rtValue = null;
if (null == key)
{
LOG.error("key is null");
}
else
{
rtValue = getPropertiesValue(key);
}
if (null == rtValue)
{
LOG.warn("KafkaProperties.getValues return null, key is " + key);
rtValue = defValue;
}
LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue);
return rtValue;
}
/**
* keyserver.properties
* @param key
* @return
*/
private String getPropertiesValue(String key)
{
String rtValue = serverProps.getProperty(key);
// server.properties中没有则再向producer.properties中获取
if (null == rtValue)
{
rtValue = producerProps.getProperty(key);
}
// producer中没有则再向consumer.properties中获取
if (null == rtValue)
{
rtValue = consumerProps.getProperty(key);
}
// consumer没有则再向client.properties中获取
if (null == rtValue)
{
rtValue = clientProps.getProperty(key);
}
return rtValue;
}
}

View File

@ -1,35 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Profile;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 15:13
*/
@ConfigurationProperties(prefix = "mykafka")
@Profile(value = "dev")
public
class KafkaPropertiesConfig {
private String serverUrl;
private MyConsumerProperties consumerProperties = new MyConsumerProperties();
public String getServerUrl() {
return serverUrl;
}
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
public MyConsumerProperties getConsumerProperties() {
return consumerProperties;
}
public void setConsumerProperties(MyConsumerProperties consumerProperties) {
this.consumerProperties = consumerProperties;
}
}

View File

@ -1,28 +0,0 @@
package org.dromara.kafka.consumer.config;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-07 14:54
*/
public class MyConsumerProperties {
private String clientId;
private String groupId = "222";
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getGroupId() {
return groupId;
}
public void setGroupId(String groupId) {
this.groupId = groupId;
}
}

View File

@ -1,159 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.dromara.kafka.consumer.handler.KafkaSecurityUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.Properties;
public class NewConsumer extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(NewConsumer.class);
private final KafkaConsumer<Integer, String> consumer;
private final String topic;
// 一次请求的最大等待时间
private final int waitTime = 10000;
// Broker连接地址
private final String bootstrapServers = "bootstrap.servers";
// Group id
private final String groupId = "group.id";
// 消息内容使用的反序列化类
private final String valueDeserializer = "value.deserializer";
// 消息Key值使用的反序列化类
private final String keyDeserializer = "key.deserializer";
// 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT
private final String securityProtocol = "security.protocol";
// 服务名
private final String saslKerberosServiceName = "sasl.kerberos.service.name";
// 域名
private final String kerberosDomainName = "kerberos.domain.name";
// 是否自动提交offset
private final String enableAutoCommit = "enable.auto.commit";
// 自动提交offset的时间间隔
private final String autoCommitIntervalMs = "auto.commit.interval.ms";
// 会话超时时间
private final String sessionTimeoutMs = "session.timeout.ms";
/**
* keytab
*/
private static final String USER_KEYTAB_FILE = "user.keytab";
/**
*
*/
private static final String USER_PRINCIPAL = "aqdsj_ruansi";
/**
* NewConsumer
* @param topic Topic
*/
public NewConsumer(String topic) {
Properties props = new Properties();
KafkaProperties kafkaProc = KafkaProperties.getInstance();
// Broker连接地址
props.put(bootstrapServers,
kafkaProc.getValues(bootstrapServers, "localhost:21007"));
// Group id
props.put(groupId, "DemoConsumer");
// 是否自动提交offset
props.put(enableAutoCommit, "true");
// 自动提交offset的时间间隔
props.put(autoCommitIntervalMs, "1000");
// 会话超时时间
props.put(sessionTimeoutMs, "30000");
// 消息Key值使用的反序列化类
props.put(keyDeserializer,
"org.apache.kafka.common.serialization.IntegerDeserializer");
// 消息内容使用的反序列化类
props.put(valueDeserializer,
"org.apache.kafka.common.serialization.StringDeserializer");
// 安全协议类型
props.put(securityProtocol, kafkaProc.getValues(securityProtocol, "SASL_PLAINTEXT"));
// 服务名
props.put(saslKerberosServiceName, "kafka");
// 域名
props.put(kerberosDomainName, kafkaProc.getValues(kerberosDomainName, "hadoop.hadoop.com"));
consumer = new KafkaConsumer<Integer, String>(props);
this.topic = topic;
}
/**
* Topic
*/
public void doWork()
{
// 订阅
consumer.subscribe(Collections.singletonList(this.topic));
// 消息消费请求
ConsumerRecords<Integer, String> records = consumer.poll(waitTime);
// 消息处理
for (ConsumerRecord<Integer, String> record : records)
{
LOG.info("[NewConsumerExample], Received message: (" + record.key() + ", " + record.value()
+ ") at offset " + record.offset());
}
}
public static void main(String[] args)
{
if (KafkaSecurityUtil.isSecurityModel())
{
try
{
LOG.info("Securitymode start.");
//!!注意,安全认证时,需要用户手动修改为自己申请的机机账号
KafkaSecurityUtil.securityPrepare();
}
catch (IOException e)
{
LOG.error("Security prepare failure.");
LOG.error("The IOException occured : {}.", e);
return;
}
LOG.info("Security prepare success.");
}
NewConsumer consumerThread = new NewConsumer(KafkaProperties.TOPIC);
consumerThread.start();
// 等到60s后将consumer关闭实际执行过程中可修改
try
{
Thread.sleep(60000);
}
catch (InterruptedException e)
{
LOG.info("The InterruptedException occured : {}.", e);
}
finally
{
consumerThread.shutdown();
consumerThread.consumer.close();
}
}
@Override
public synchronized void start() {
doWork();
}
private void shutdown(){
Thread.currentThread().interrupt();
}
}

View File

@ -1,234 +0,0 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import cn.hutool.core.convert.ConvertException;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.ruansee.response.ApiResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfoVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.LinkedBlockingDeque;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:44
*/
public class ConsumerWorker implements Runnable {
private ConsumerRecord<String, Object> record;
private Logger logger = LoggerFactory.getLogger(ConsumerWorker.class);
public static LinkedBlockingDeque linkedBlockingDeque = new LinkedBlockingDeque<>(5000);
private String cityCode ;
ConsumerWorker(ConsumerRecord<String, Object> record, String cityCode) {
this.record = record;
this.cityCode = cityCode;
}
@Override
public void run() {
//其他地市使用的方法,这里使用了一个巧妙的方法我们开发的地市都是传4位这种其他地市的cityCode传大于4位然后截取
if(cityCode.length() > 4){
cityCode = cityCode.substring(0,4);
normalRequest();
}else {
//六安、安庆等地市的方法,这些地市都是我们自己公司开发的东西。
luanrequest();
// luanrequestBatch();
}
}
/*
*
* */
private void luanrequestBatch() {
Object value = record.value();
String topic = record.topic();
List<EsGpsInfo> list = new ArrayList<>();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
List<JSONObject> jsonObjects = JSON.parseArray((String) value, JSONObject.class);
for (JSONObject jsonObject : jsonObjects) {
EsGpsInfo esGpsInfo;
/*try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}*/
try {
esGpsInfo = JSONUtil.toBean(jsonObject, EsGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
esGpsInfo.setInfoSource(cityCode);
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
list.add(esGpsInfo);
}
// dataToEsService.saveGpsInfoBatch(list);
}
private void luanrequest() {
Object value = record.value();
String topic = record.topic();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
RemoteGpsInfo esGpsInfo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
esGpsInfo = JSONUtil.toBean(jsonObject, RemoteGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
esGpsInfo.setInfoSource(cityCode);
try {
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
logger.info("esGpsInfo={}",esGpsInfo);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic={},data2es={},gpsTime={}",topic,"success",esGpsInfo.getGpsTime());
}else{
logger.info("topic={},data2es={}",topic,response.getMsg());
}
}
/**
*
*/
private void normalRequest() {
Object value = record.value();
String topic = record.topic();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
RemoteGpsInfo esGpsInfo = new RemoteGpsInfo();
EsGpsInfoVO esGpsInfoVO;
try {
esGpsInfoVO = JSONUtil.toBean(((String) value), EsGpsInfoVO.class);
}catch (ConvertException e){
logger.info("esGpsInfoVO=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfoVO)){
logger.info("esGpsInfoVO=null no error");
return;
}
try {
DateTime parse = DateUtil.parse(esGpsInfoVO.getGpsTime(), "yyyy-MM-dd HH:mm:ss");
}catch (Exception e){
logger.info("gpsTime:{} format error", esGpsInfoVO.getGpsTime());
return;
}
String deviceCode = esGpsInfoVO.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfoVO.getLatitude();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfoVO.getLongitude();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
BeanUtil.copyProperties(esGpsInfoVO,esGpsInfo,new CopyOptions());
esGpsInfo.setLat(latitude);
esGpsInfo.setLng(esGpsInfoVO.getLongitude());
esGpsInfo.setOrientation(esGpsInfoVO.getDirection());
esGpsInfo.setInfoSource(cityCode);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(200 == response.getCode()){
logger.info("topic={},data2es={}",topic,"success");
}else{
logger.error("topic={},data2es={}",topic,"fail");
}
}
}

View File

@ -1,98 +0,0 @@
package org.dromara.kafka.consumer.handler;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.MessageListener;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:39
*/
public class KafkaConsumerRunnable implements Runnable {
private Map props;
private ThreadPoolExecutor taskExecutor;
private String cityCode;
private Logger logger = LoggerFactory.getLogger(KafkaConsumerRunnable.class);
public KafkaConsumerRunnable(Map props, ThreadPoolExecutor taskExecutor,
String cityCode) {
this.props = props;
this.taskExecutor = taskExecutor;
this.cityCode = cityCode;
}
private DefaultKafkaConsumerFactory buildConsumerFactory(){
return new DefaultKafkaConsumerFactory<String, String>(props);
}
private ContainerProperties containerProperties(String[] topic, MessageListener<String, Object> messageListener) {
ContainerProperties containerProperties = new ContainerProperties(topic);
containerProperties.setMessageListener(messageListener);
return containerProperties;
}
private KafkaListenerContainerFactory buildListenerFactory(){
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(buildConsumerFactory());
factory.setConcurrency(4);
factory.setBatchListener(true);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
@Override
public void run() {
KafkaConsumer<String,Object> consumer = new KafkaConsumer<>(props);
List topics = (List) props.get("topics");
consumer.subscribe(topics);
consumer.poll(0); // 令订阅生效
List<TopicPartition> topicPartitions = new ArrayList<>();
Map<String, List<PartitionInfo>> stringListMap = consumer.listTopics();
for (Object topic : topics) {
String topic1 = (String) topic;
List<PartitionInfo> partitionInfos = stringListMap.get(topic1);
for (PartitionInfo partitionInfo : partitionInfos) {
TopicPartition partition = new TopicPartition(topic1, partitionInfo.partition());
topicPartitions.add(partition);
}
}
consumer.seekToEnd(topicPartitions); // 如果传Collections.emptyList()表示移动所有订阅topic分区offset到最末端
while (true) {
ConsumerRecords<String, Object> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, Object> record : records) {
taskExecutor.submit(new ConsumerWorker(record, cityCode));
}
}
}
}

View File

@ -1,108 +0,0 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-10-28 14:48
*/
public class KafkaSecurityUtil {
static Logger logger = LoggerFactory.getLogger(KafkaSecurityUtil.class);
public static void main(String[] args) {
EsGpsInfo esGpsInfo = new EsGpsInfo();
String realtime = "2021/11/04 12:00:11";
DateTime dateTime = DateUtil.parse(realtime);
esGpsInfo.setGpsTime(dateTime.toJdkDate());
logger.info("esGpsInfo:{},deviceType={},gpsTime={}",esGpsInfo.toString(),
esGpsInfo.getDeviceType(),dateTime.toJdkDate().toString());
}
/**
* keytab
*/
private static final String USER_KEYTAB_FILE = "user.keytab";
/**
*
*/
private static final String USER_PRINCIPAL = "aqdsj_ruansi@HADOOP.COM";
public static void securityPrepare() throws IOException
{
logger.error("进入了---securityPrepare");
//String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
//String krbFile = filePath + "krb5.conf";
//ClassPathResource classPathResource = new ClassPathResource("krb5.conf");
//String krbFile = classPathResource.getAbsolutePath();
String krbFile = "/gpsstore/krb5.conf";
// String userKeyTableFile = filePath + USER_KEYTAB_FILE;
//ClassPathResource classPathResource1 = new ClassPathResource(USER_KEYTAB_FILE);
String userKeyTableFile = "/gpsstore/user.keytab";
//windows路径下分隔符替换
userKeyTableFile = userKeyTableFile.replace("\\", "\\\\");
krbFile = krbFile.replace("\\", "\\\\");
LoginUtil.setKrb5Config(krbFile);
LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com");
logger.error("userKeyTableFile路径---{}",userKeyTableFile);
LoginUtil.setJaasFile(USER_PRINCIPAL, userKeyTableFile);
}
public static Boolean isSecurityModel()
{
Boolean isSecurity = false;
//String krbFilePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator + "kafkaSecurityMode";
//ClassPathResource classPathResource = new ClassPathResource("kafkaSecurityMode");
InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("kafkaSecurityMode");
/*File file = classPathResource.getFile();
if(!file.exists()){
return isSecurity;
}*/
Properties securityProps = new Properties();
try
{
securityProps.load(inputStream);
if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode")))
{
isSecurity = true;
}
}
catch (Exception e)
{
logger.info("The Exception occured : {}.", e);
}
return isSecurity;
}
/*
*
*/
private static boolean isFileExists(String fileName)
{
File file = new File(fileName);
return file.exists();
}
}

View File

@ -1,215 +0,0 @@
package org.dromara.kafka.consumer.handler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-10-28 15:40
*/
public class LoginUtil
{
public enum Module
{
STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client");
private String name;
private Module(String name)
{
this.name = name;
}
public String getName()
{
return name;
}
}
/**
* line operator string
*/
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
/**
* jaas file postfix
*/
private static final String JAAS_POSTFIX = ".jaas.conf";
/**
* is IBM jdk or not
*/
private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM");
/**
* IBM jdk login module
*/
private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required";
/**
* oracle jdk login module
*/
private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required";
/**
* Zookeeper quorum principal.
*/
public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal";
/**
* java security krb5 file path
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java security login file path
*/
public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config";
/**
* jaas.conf
*
* @param principal
* @param keytabPath
* @throws IOException
*/
public static void setJaasFile(String principal, String keytabPath)
throws IOException
{
String jaasPath =
new File(System.getProperty("java.io.tmpdir")) + File.separator + System.getProperty("user.name")
+ JAAS_POSTFIX;
// windows路径下分隔符替换
jaasPath = jaasPath.replace("\\", "\\\\");
// 删除jaas文件
deleteJaasFile(jaasPath);
writeJaasFile(jaasPath, principal, keytabPath);
System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath);
}
/**
* zookeeperprincipal
*
* @param zkServerPrincipal
* @throws IOException
*/
public static void setZookeeperServerPrincipal(String zkServerPrincipal)
throws IOException
{
System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal);
String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL);
if (ret == null)
{
throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null.");
}
if (!ret.equals(zkServerPrincipal))
{
throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + ".");
}
}
/**
* krb5
*
* @param krb5ConfFile
* @throws IOException
*/
public static void setKrb5Config(String krb5ConfFile)
throws IOException
{
System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile);
String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF);
if (ret == null)
{
throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null.");
}
if (!ret.equals(krb5ConfFile))
{
throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + ".");
}
}
/**
* jaas
*
* @throws IOException
*
*/
private static void writeJaasFile(String jaasPath, String principal, String keytabPath)
throws IOException
{
FileWriter writer = new FileWriter(new File(jaasPath));
try
{
writer.write(getJaasConfContext(principal, keytabPath));
writer.flush();
}
catch (IOException e)
{
throw new IOException("Failed to create jaas.conf File");
}
finally
{
writer.close();
}
}
private static void deleteJaasFile(String jaasPath)
throws IOException
{
File jaasFile = new File(jaasPath);
if (jaasFile.exists())
{
if (!jaasFile.delete())
{
throw new IOException("Failed to delete exists jaas file.");
}
}
}
private static String getJaasConfContext(String principal, String keytabPath)
{
Module[] allModule = Module.values();
StringBuilder builder = new StringBuilder();
for (Module modlue : allModule)
{
builder.append(getModuleContext(principal, keytabPath, modlue));
}
return builder.toString();
}
private static String getModuleContext(String userPrincipal, String keyTabPath, Module module)
{
StringBuilder builder = new StringBuilder();
if (IS_IBM_JDK)
{
builder.append(module.getName()).append(" {").append(LINE_SEPARATOR);
builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR);
builder.append("credsType=both").append(LINE_SEPARATOR);
builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR);
builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR);
builder.append("debug=true;").append(LINE_SEPARATOR);
builder.append("};").append(LINE_SEPARATOR);
}
else
{
builder.append(module.getName()).append(" {").append(LINE_SEPARATOR);
builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR);
builder.append("useKeyTab=true").append(LINE_SEPARATOR);
builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR);
builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR);
builder.append("useTicketCache=false").append(LINE_SEPARATOR);
builder.append("storeKey=true").append(LINE_SEPARATOR);
builder.append("debug=true;").append(LINE_SEPARATOR);
builder.append("};").append(LINE_SEPARATOR);
}
return builder.toString();
}
}

View File

@ -1,130 +0,0 @@
package org.dromara.kafka.consumer.handler;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.dromara.kafka.consumer.config.KafkaPropertiesConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:15
*/
@Component
public class RealConsumer implements CommandLineRunner {
private String kafkaServers;
private String groupId;
private String topics;
private String cityCode = "3400";
@Autowired
KafkaPropertiesConfig kafkaPropertiesConfig;
@Autowired
ThreadPoolExecutor dtpExecutor2;
private Logger logger = LoggerFactory.getLogger(RealConsumer.class);
@Override
public void run(String... args) throws Exception {
kafkaServers = "127.0.0.1:9092";
topics = "topic.send.2,topic.send.3,topic.send.4,topic.send.5,topic.send.8";
groupId = "group_ruansi_xuancheng";
cityCode = "3418";
if(args.length > 0){
/*kafkaServers = args[0];
topics = args[1];
groupId = args[2];
cityCode = args[3];*/
}
ExecutorService executorService = Executors.newSingleThreadExecutor();
Map kafkaProp = getKafkaProp();
if (KafkaSecurityUtil.isSecurityModel())
{
try
{
logger.info("Securitymode start.");
//!!注意,安全认证时,需要用户手动修改为自己申请的机机账号
//认证方式 SASL_PLAINTEXT 或者 PLAINTEXT
kafkaProp.put("security.protocol","SASL_PLAINTEXT");
//服务名
kafkaProp.put("sasl.kerberos.service.name","kafka");
//域名
kafkaProp.put("kerberos.domain.name","hadoop.hadoop.com");
KafkaSecurityUtil.securityPrepare();
}
catch (IOException e)
{
logger.error("Security prepare failure.");
logger.error("The IOException occured.", e);
return;
}
logger.info("Security prepare success.");
}
KafkaConsumerRunnable runnable = new KafkaConsumerRunnable(kafkaProp,dtpExecutor2,cityCode);
executorService.execute(runnable);
}
/**
* kafka
* @return
*/
private Map<String, Object> getKafkaProp() {
// Properties map = new Properties();
Map<String, Object> map = new HashMap<>();
map.put("bootstrap.servers",kafkaServers);
map.put("group.id",groupId);
map.put("enable.auto.commit", "true");
map.put("auto.commit.interval.ms", "1000");
map.put("session.timeout.ms", "30000");
map.put("key.deserializer", StringDeserializer.class);
map.put("value.deserializer", StringDeserializer.class);
map.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG,5);
// map.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG,1000 * 5);
// map.put("ack.mode", "manual_immediate");
// //认证方式 SASL_PLAINTEXT 或者 PLAINTEXT
// map.put("security.protocol","SASL_PLAINTEXT");
// //服务名
// map.put("sasl.kerberos.service.name","kafka");
// //域名
// map.put("kerberos.domain.name","hadoop.hadoop.com");
String[] split = topics.split(",");
List list = CollectionUtils.arrayToList(split);
map.put("topics", list);
return map;
}
}

View File

@ -1,32 +0,0 @@
# Tomcat
server:
port: 9214
# Spring
spring:
application:
# 应用名称
name: stwzhj-consumer
profiles:
# 环境配置
active: @profiles.active@
--- # nacos 配置
spring:
cloud:
nacos:
# nacos 服务地址
server-addr: @nacos.server@
username: @nacos.username@
password: @nacos.password@
discovery:
# 注册组
group: @nacos.discovery.group@
namespace: ${spring.profiles.active}
config:
# 配置组
group: @nacos.config.group@
namespace: ${spring.profiles.active}
config:
import:
- optional:nacos:application-common.yml

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -1,24 +0,0 @@
package org.dromara.data2es.config;
import org.dromara.data2es.handler.RedisExpireListener;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
@Configuration
public class RedisListenerConfig {
@Bean
RedisMessageListenerContainer listenerContainer(RedisConnectionFactory connectionFactory) {
RedisMessageListenerContainer listenerContainer = new RedisMessageListenerContainer();
listenerContainer.setConnectionFactory(connectionFactory);
return listenerContainer;
}
@Bean
KeyExpirationEventMessageListener redisKeyExpirationListener(RedisMessageListenerContainer listenerContainer) {
return new RedisExpireListener(listenerContainer);
}
}

View File

@ -1,70 +0,0 @@
package org.dromara.data2es.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.controller.DataToEsController;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.redis.connection.Message;
import org.springframework.data.redis.connection.MessageListener;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.Objects;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-11-08 16:40
*/
@Component
public class RedisExpireListener extends KeyExpirationEventMessageListener {
@Autowired
DataToEsController dataToEsController;
Logger logger = LoggerFactory.getLogger(RedisExpireListener.class);
/**
* Creates new {@link MessageListener} for {@code __keyevent@*__:expired} messages.
*
* @param listenerContainer must not be {@literal null}.
*/
public RedisExpireListener(RedisMessageListenerContainer listenerContainer) {
super(listenerContainer);
}
@Override
public void onMessage(Message message, byte[] pattern) {
String expireKey = message.toString();
if(StringUtils.isNotEmpty(expireKey) &&
expireKey.startsWith(RedisConstants.ORG_CODE_PRE)){
String[] split = expireKey.split(":");
EsGpsInfoVO2 esGpsInfoVO2 = new EsGpsInfoVO2();
esGpsInfoVO2.setDeviceType(split[2]);
esGpsInfoVO2.setDeviceCode(split[3]);
String zzjgdm = split[1];
String deviceType = split[2];
String deviceCode = split[3];
if(StringUtils.isNotEmpty(zzjgdm)) {
JSONObject object = RedisUtils.getBucket(RedisConstants.ONLINE_USERS + zzjgdm + ":"
+ deviceType+":"+deviceCode);
EsGpsInfoVO2 gpsInfo = BeanUtil.toBean(object, EsGpsInfoVO2.class);
gpsInfo.setGpsTime(new Date());
gpsInfo.setOnline(0);
dataToEsController.saveGpsInfo(gpsInfo);
}
}
logger.info("redis key expired:key={}", expireKey);
}
}

View File

@ -1,120 +0,0 @@
package org.dromara.data2es.handler;
/*
*
* es redis kafka
* */
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONPObject;
import com.alibaba.fastjson2.util.JSONObject1O;
import jodd.util.StringUtil;
import org.apache.commons.lang.StringUtils;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.domain.EsGpsInfo;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.dromara.data2es.service.IGpsService;
import org.dromara.data2es.util.ConfigConstants;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
@Configuration
public class RequestHandler {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
@Autowired
private RestHighLevelClient restHighLevelClient;
private Logger logger = LoggerFactory.getLogger(RequestHandler.class);
/*@Async
public CompletableFuture<EsGpsInfo> doRequest(EsGpsInfo esGpsInfo){
EsGpsInfo entity = gpsService.createEntity(esGpsInfo);
return CompletableFuture.completedFuture(entity);
}*/
@Async
public void sendToKafka(EsGpsInfoVO2 esGpsInfoVO2) {
if (!Objects.isNull(esGpsInfoVO2)) {
String deviceType = esGpsInfoVO2.getDeviceType();
if(StringUtil.isEmpty(deviceType)){
deviceType = "99";
}
String infoSource = esGpsInfoVO2.getInfoSource();
if(StringUtils.isEmpty(infoSource)){
infoSource = "other";
}
/**
* 使
*/
//EsGpsInfoVO esGpsInfoVO = deviceInfoContext.doGetInfo(esGpsInfo);
//EsGpsInfoVO2 esGpsInfoVO2 = getInfo(esGpsInfo);
//kafkaProducer.send(esGpsInfo, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+ infoSource);
//todo 2023年3月30日 cpu过载暂时隐藏
kafkaTemplate.send(ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType, JSON.toJSONString(esGpsInfoVO2));
//kafkaProducer.send(esGpsInfoVO2, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType);
//地市的kafka数据如接收地市某个设备的数据可以对接此kafka topic
//todo 暂时隐藏
kafkaTemplate.send(ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+infoSource+"."+deviceType,JSON.toJSONString(esGpsInfoVO2));
}
}
/**
* 线
* @param map
*/
@Async
public void redisOnlineUserBatch(Map<String,String> map, long time){
RedisUtils.batchInsert(map,time);
}
@Async
public void redisDeleteBatch(List<String> deleteKeys){
RedisUtils.deleteObject(deleteKeys);
}
/**
* es
* @param bulkRequest
*/
@Async
public void esRealBulkSave(BulkRequest bulkRequest){
try {
BulkResponse response = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
boolean b = response.hasFailures();
String bulkErrorMsg = response.buildFailureMessage();
logger.info("b={}", bulkErrorMsg);
} catch (IOException e) {
e.printStackTrace();
logger.error("batchInsert error={}",e.getMessage());
}
}
}

View File

@ -1,4 +0,0 @@
package org.dromara.data2es.mapper;
public class TDeviceMapper {
}

View File

@ -1,13 +0,0 @@
package org.dromara.data2es.schedule;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-05-18 18:23
*/
public class RedisOnlineUserSchedule {
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -0,0 +1 @@
package org.dromara.data2gs.service;

View File

@ -1,221 +0,0 @@
package org.dromara.location.controller;
import cn.hutool.core.convert.Convert;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONObject;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import org.apache.dubbo.config.annotation.DubboReference;
import org.dromara.common.core.domain.R;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.system.api.RemoteDeviceService;
import org.dromara.system.api.domain.bo.RemoteDeviceBo;
import org.dromara.system.api.domain.vo.RemoteDeviceVo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import redis.clients.jedis.resps.GeoRadiusResponse;
import javax.annotation.Resource;
import java.util.*;
@Configuration
@RestController
public class LocationController {
@DubboReference
private RemoteDeviceService deviceService;
Logger logger = LoggerFactory.getLogger(LocationController.class);
/*
*
* */
@PostMapping("/getAllLocation")
public R getAllLocaltion(@RequestBody Map<String,Object> params){
String now = DateUtil.format(new Date(),"YYYY-MM-dd");
String keys = "online_users:";
String key = null; // 在不同条件下赋值 最后根据此key取值
if(CollectionUtils.isEmpty(params)){
key = keys +"*";
}
List<JSONObject> list = new ArrayList<>();
if (null != params.get("type")){
String type = params.get("type").toString();
key = keys + "*:[" +type+"]:*";
if (null != params.get("deptId")){
key = keys + params.get("deptId").toString() + "*:["+type+"]:*"; // key值为 online_users:2022-04-20:3401*:[01,02]:*
}
}
list = RedisUtils.searchAndGetKeysValues(key);
list.removeAll(Collections.singleton(null));
return R.ok(list);
}
@PostMapping("/getLocation")
public R getLocaltion(@RequestBody Map<String,Object> params){
String keys = "online_users:";
if(CollectionUtils.isEmpty(params)){
return R.fail("参数不能为空");
}
String zzjgdms = "";
List<Object> dlist = new ArrayList<>();
if (null != params.get("type")){ //类型不为空时 查询Redis数据
String type = params.get("type").toString();
// String[] types = Convert.toStrArray(type);
zzjgdms = params.get("zzjgdm").toString();
String[] zzjgdm = Convert.toStrArray(zzjgdms);
for (String s : zzjgdm) {
s = deptIdSub(s);
String key = keys + s + ":["+type+"]:*"; // key值为 online_users:3401xxxx:[01,02]:*
List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key);
list.removeAll(Collections.singleton(null));
dlist.addAll(list);
}
}else {
zzjgdms = params.get("zzjgdm").toString();
String[] zzjgdm = Convert.toStrArray(zzjgdms);
for (String s : zzjgdm) {
s = deptIdSub(s);
String key = keys + s + ":*";
List<JSONObject> list = RedisUtils.searchAndGetKeysValues(key);
list.removeAll(Collections.singleton(null));
dlist.addAll(list);
}
}
JSONArray.toJSONString(dlist);
return R.ok(dlist);
}
/*
* ID
* */
@PostMapping("/getLocationByDeviceId")
public R getLocationByDeviceId(@RequestBody Map<String,Object> params){
if(CollectionUtils.isEmpty(params)){
return R.fail("参数不能为空");
}
String keys = "online_users:";
String key = "";
if (null != params.get("deviceType")){
key = keys + params.get("type").toString()+":*";
}
if (null != params.get("deviceCode")){
key = keys +"*:"+params.get("deviceCode").toString();
}
if (null != params.get("deviceCode") && null != params.get("deviceType")){
key = keys + ":*:" + params.get("deviceType").toString()+":" + params.get("deviceCode").toString();
}
List<RemoteDeviceVo> list = new ArrayList<>();
List<JSONObject> maps = RedisUtils.searchAndGetKeysValues(key);
if (maps.size()>0){
return R.ok(maps.get(0));
}
return R.ok(null);
}
/*
*
* */
@PostMapping("/getLocationByPoint")
public R getLocationByPoint(@RequestBody Map<String,Object> params){
String keys = "online_users:";
if(CollectionUtils.isEmpty(params)){
return R.fail("参数不能为空");
}
String lat = params.get("lat").toString();
String lng = params.get("lng").toString();
String dist = params.get("distance").toString();
/* List<GeoRadiusResponse> geoRadiusResponses = redisUtil.nearByXYReadonly(RedisConstants.ONLINE_USERS_GEO,
Double.parseDouble(lng), Double.parseDouble(lat), Double.parseDouble(dist));
List<Device> list = new ArrayList<>();
for (GeoRadiusResponse geoRadiusRespons : geoRadiusResponses) {
String memberByString = geoRadiusRespons.getMemberByString();
logger.info("member:"+memberByString);
String[] strs = memberByString.split("#");
logger.info("key值:"+keys+":"+strs[0]+":"+strs[1]+":"+strs[2]);
Object object = redisUtil.get(keys+":"+strs[0]+":"+strs[1]+":"+strs[2]);
if (null != object){
Device device = FastJSONUtil.parsePojo(object.toString(), Device.class);
//device = rebuildDevice(device);
list.add(device);
}
}*/
return R.ok();
}
/*+
*
* */
@PostMapping("/getObjByCondition")
public R getObjByCondition(@RequestBody Map<String,Object> params){
if(CollectionUtils.isEmpty(params)){
return R.fail("参数不能为空");
}
String zzjgdms = "";
String name = "";
if (null != params.get("deviceName")){
name = params.get("deviceName").toString();
}
if (null != params.get("zzjgdm")){
zzjgdms = params.get("zzjgdm").toString();
}
if (null != params.get("type")){
String type = params.get("type").toString();
RemoteDeviceBo device = new RemoteDeviceBo();
device.setValid(1);
device.setDeviceType(type);
device.setPoliceName(name);
/*if("01".equals(type) || "02".equals(type) || "06".equals(type) ||"07".equals(type) ||"08".equals(type) || "09".equals(type)){
device.setCarNum(name);
}else{
device.setPoliceNo(name);
}*/
List<RemoteDeviceVo> devices = new ArrayList<>();
if (!"".equals(zzjgdms)){ //前端选择机构时
String[] zzjgdm = zzjgdms.split(",");
for (String s : zzjgdm) {
device.setZzjgdm(s);
List<RemoteDeviceVo> tDeviceList = deviceService.deviceList(device);
devices.addAll(tDeviceList);
}
}
if ("".equals(zzjgdms)){ //前端查询全部时
List<RemoteDeviceVo> tDeviceList = deviceService.deviceList(device);
devices.addAll(tDeviceList);
}
return R.ok(devices);
}
return R.ok();
}
public String deptIdSub(String zzjgdm){
if (zzjgdm.endsWith("0000000000")){ // 省厅 即全部
zzjgdm = zzjgdm.substring(0,2)+"*";
}else if(zzjgdm.endsWith("00000000")){ //地市
zzjgdm = zzjgdm.substring(0,4)+"*";
}else if(zzjgdm.endsWith("000000")){ // 分局
zzjgdm = zzjgdm.substring(0,6)+"*";
}else{ // 支队
zzjgdm = zzjgdm.substring(0,8)+"*";
}
return zzjgdm;
}
}

View File

@ -1,155 +0,0 @@
package org.dromara.location.service.impl;
import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor;
import org.dromara.location.service.ISearchService;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@RequiredArgsConstructor
@Service
public class SearchServiceImpl implements ISearchService {
@Autowired
private RestHighLevelClient restHighLevelClient;
@Override
public List<Map> searchCar(String deviceCode, String startTime, String endTime,String deviceType) throws RuntimeException{
List<Map> sourceList = new ArrayList<Map>();
List<String> esIndexByTime = findEsIndexByTime(startTime, endTime);
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
DateTime startDate = DateUtil.parse(startTime, "yyyy-MM-dd HH:mm:ss");
DateTime endDate = DateUtil.parse(endTime, "yyyy-MM-dd HH:mm:ss");
BoolQueryBuilder boolBuilder = QueryBuilders.boolQuery();
// 匹配第一个
TermQueryBuilder termTerminalBuilder1 = QueryBuilders.termQuery("deviceCode", deviceCode);
// 匹配第二个
TermQueryBuilder termTerminalBuilder2 = QueryBuilders.termQuery("deviceType", deviceType);
boolBuilder.must(termTerminalBuilder1);
boolBuilder.must(termTerminalBuilder2);
System.out.print(format.format(startDate));
boolBuilder.must(QueryBuilders.rangeQuery("gpsTime")
.gte(format.format(startDate))
.lte(format.format(endDate)) );
Scroll scroll = new Scroll(TimeValue.timeValueMinutes(1L));
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
sourceBuilder.query(boolBuilder).sort("gpsTime",SortOrder.ASC).size(5000);
SearchRequest rq = new SearchRequest();
// boolBuilder.withIndicesOptions(IndicesOptions.lenientExpandOpen());
rq.scroll(scroll);
rq.source(sourceBuilder);
rq.indices(esIndexByTime.toArray(new String[0]));
// 忽略因索引不存在导致的错误
rq.indicesOptions(IndicesOptions.lenientExpandOpen());
// SearchResponse rp = null;
try {
SearchResponse rp = restHighLevelClient.search(rq,RequestOptions.DEFAULT);
SearchHit[] searchHits = rp.getHits().getHits();
for (SearchHit searchHit : searchHits) {
Map sourceAsMap = searchHit.getSourceAsMap();
sourceList.add(sourceAsMap);
}
//遍历搜索命中的数据,直到没有数据
String scrollId = rp.getScrollId();
while (searchHits != null && searchHits.length > 0) {
SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(scroll);
try {
rp = restHighLevelClient.scroll(scrollRequest,RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
scrollId = rp.getScrollId();
searchHits = rp.getHits().getHits();
if (searchHits != null && searchHits.length > 0) {
for (SearchHit searchHit : searchHits) {
Map sourceAsMap = searchHit.getSourceAsMap();
sourceList.add(sourceAsMap);
}
}
}
//清除滚屏
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(scrollId);//也可以选择setScrollIds()将多个scrollId一起使用
ClearScrollResponse clearScrollResponse = null;
try {
clearScrollResponse = restHighLevelClient.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
boolean succeeded = clearScrollResponse.isSucceeded();
} catch (Exception e) {
e.printStackTrace();
}
return sourceList;
}
private List<String> findEsIndexByTime(String startTime, String endTime) {
startTime = startTime.substring(0, 10).replaceAll("-","");//yyyyMMdd
endTime = endTime.substring(0, 10).replaceAll("-","");
Date start = DateUtil.parse(startTime, "yyyyMMdd");
Date end = DateUtil.parse(endTime, "yyyyMMdd");
List<DateTime> dateTimes = DateUtil.rangeToList(start, end, DateField.DAY_OF_YEAR);
List<String> list = new ArrayList<>();
String hash;
int len = dateTimes.size() < 7 ? dateTimes.size() : 7;
for (int i = 0; i < len; i++) {
hash = dateTimes.get(i).toString();
hash = hash.substring(0, 10).replaceAll("-","");
list.add("gpsinfo" + hash);
}
return list;
}
// 工具方法
public static <T> Consumer<T> consumerWithIndex(BiConsumer<T, Integer> consumer) {
class Obj {
int i;
}
Obj obj = new Obj();
return t -> {
int index = obj.i++;
consumer.accept(t, index);
};
}
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -1,148 +0,0 @@
package org.dromara.system.controller.system;
import jdk.dynalink.linker.LinkerServices;
import lombok.RequiredArgsConstructor;
import org.dromara.common.core.domain.R;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.common.web.core.BaseController;
import org.dromara.system.domain.DeviceRedis;
import org.dromara.system.domain.bo.TDeviceBo;
import org.dromara.system.domain.vo.DeviceStaticsVo;
import org.dromara.system.domain.vo.SysDeptVo;
import org.dromara.system.domain.vo.SysDictDataVo;
import org.dromara.system.service.*;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RequiredArgsConstructor
@RestController
public class IndexStaticsController extends BaseController {
private final ISysDeptService deptService;
private final ITDeviceService deviceService;
private final ISysDictTypeService dictTypeService;
private final IDeviceRedisService redisService;
/*
* 线
* */
@GetMapping("/topPan")
public R topPan(String zzjgdm){
DeviceRedis redis = new DeviceRedis();
redis.setZzjgdm(zzjgdm);
return R.ok(redisService.countByCondition(redis));
}
/*
* 线
* */
@PostMapping("/onLineBar")
public R onLineBar(){
List<SysDeptVo> deptVoList = deptService.getDsList();
List<DeviceStaticsVo> staticsVoList = deviceService.countByDs();
List<DeviceStaticsVo> list = new ArrayList<>(); //用来接收处理后的统计结果
for (SysDeptVo deptVo : deptVoList) {
boolean bl = false; //用来统计结果是否有当前这个机构
for (DeviceStaticsVo staticsVo : staticsVoList) {
String deptId = staticsVo.getZzjgdm()+"00000000";
if (deptId.equals(deptVo.getDeptId())){
staticsVo.setZzjgdm(deptId);
staticsVo.setZzjgmc(deptVo.getDeptName().replaceAll("公安局",""));
int onlineCo = RedisUtils.searchKeys("org_code:"+staticsVo.getZzjgdm()+"*");
staticsVo.setOnlineCo(onlineCo);
list.add(staticsVo);
bl = true;
break;
}
}
if (!bl){
DeviceStaticsVo staticsVo = new DeviceStaticsVo();
staticsVo.setZzjgdm(deptVo.getDeptId());
staticsVo.setZzjgmc(deptVo.getDeptName().replaceAll("公安局",""));
staticsVo.setCo(0);
staticsVo.setOnlineCo(0);
list.add(staticsVo);
}
}
return R.ok(list);
}
/*
* Code线
* */
@GetMapping("/dsOnlineCount")
public R dsOnlineCount(String code){
TDeviceBo bo = new TDeviceBo();
bo.setInfoSource(code);
Long co = deviceService.countByCondition(bo);
int onlineCo = 0;
if (null == code || "".equals(code)){
onlineCo = RedisUtils.searchKeys("org_code:*");
}else {
onlineCo = RedisUtils.searchKeys("org_code:"+code+"*");
}
HashMap map = new HashMap();
map.put("co",co);
map.put("onlineCo",onlineCo);
return R.ok(map);
}
/*
*
* */
@GetMapping("/deviceCount")
public R deviceCount(String code){
List<SysDictDataVo> dataList = dictTypeService.selectDictDataByType("zd_device_type");
List<HashMap> list = new ArrayList<>();
for (SysDictDataVo data : dataList) {
HashMap map = new HashMap();
TDeviceBo deviceInfo = new TDeviceBo();
deviceInfo.setDeviceType(data.getDictValue());
deviceInfo.setInfoSource(code);
Long co = deviceService.countByCondition(deviceInfo);
map.put("name",data.getDictLabel());
map.put("all",co);
list.add(map);
}
return R.ok(list);
}
@PostMapping("/getDeviceType")
public R getDeviceType(){
List<SysDictDataVo> dataList = dictTypeService.selectDictDataByType("zd_device_type");
return R.ok(dataList);
}
@GetMapping("/countByType")
public R countByType(String type){
List<HashMap> list = new ArrayList<>();
List<SysDeptVo> deptVoList = deptService.getDsList();
for (SysDeptVo deptVo : deptVoList) {
HashMap map = new HashMap();
TDeviceBo deviceInfo = new TDeviceBo();
deviceInfo.setDeviceType(type);
deviceInfo.setInfoSource(deptVo.getDeptId().substring(0,4));
Long co = deviceService.countByCondition(deviceInfo);
map.put("name",deptVo.getDeptName().replaceAll("公安局",""));
map.put("all",co);
list.add(map);
}
return R.ok(list);
}
}

View File

@ -1,20 +0,0 @@
package org.dromara.system.mapper;
import org.dromara.system.domain.TDevice;
import org.dromara.system.domain.vo.DeviceStaticsVo;
import org.dromara.system.domain.vo.TDeviceVo;
import org.dromara.common.mybatis.core.mapper.BaseMapperPlus;
import java.util.List;
/**
* deviceMapper
*
* @author luuy
* @date 2024-11-19
*/
public interface TDeviceMapper extends BaseMapperPlus<TDevice, TDeviceVo> {
List<DeviceStaticsVo> countByDs();
}

View File

@ -1,29 +0,0 @@
package org.dromara.system.schedule;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.system.domain.DeviceRedis;
import org.dromara.system.service.IDeviceRedisService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import java.util.List;
@Configuration
public class DeviceRedisSchedule {
@Autowired
IDeviceRedisService redisService;
/*
* Redis online_usert_device_redis
* */
@Scheduled(cron = "0/30 * * * * ?")
public void handleDeviceRedis(){
List<JSONObject> jlist = RedisUtils.searchAndGetKeysValues("online_users:*");
redisService.insertBatch(BeanUtil.copyToList(jlist, DeviceRedis.class));
}
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -1,143 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.dromara.system.mapper.SysDeptMapper">
<resultMap type="org.dromara.system.domain.vo.SysDeptVo" id="SysDeptResult">
</resultMap>
<select id="selectDeptList" resultMap="SysDeptResult">
select
<if test="ew.getSqlSelect != null">
${ew.getSqlSelect}
</if>
<if test="ew.getSqlSelect == null">
*
</if>
from sys_dept ${ew.getCustomSqlSegment}
</select>
<select id="countDeptById" resultType="Long">
select count(*) from sys_dept where del_flag = '0' and dept_id = #{deptId}
</select>
<select id="selectDeptListByRoleId" resultType="Long">
select d.dept_id
from sys_dept d
left join sys_role_dept rd on d.dept_id = rd.dept_id
where rd.role_id = #{roleId}
<if test="deptCheckStrictly">
and d.dept_id not in (select d.parent_id from sys_dept d inner join sys_role_dept rd on d.dept_id = rd.dept_id and rd.role_id = #{roleId})
</if>
order by d.parent_id, d.order_num
</select>
<!-- 各机构设备在线总数 参数deviceType -->
<select id="deviceStatics" parameterType="String" resultMap="SysDeptResult">
select * from (
-- 安徽省
SELECT '0' dept_id,'安徽省' dept_name, '-1' parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 全省 各设备总数
(SELECT substr(zzjgdm, 1, 2) dept_id,count(*) co from (SELECT * FROM t_device
<where>valid = 1
<if test="
deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 2) HAVING substr(zzjgdm,1,2) is not null ) td
on substr(d.dept_id,1,2) = td.dept_id
LEFT JOIN
-- 全省 各设备在线数
(SELECT substr(zzjgdm, 1, 2) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 2) ) rd
on substr(d.dept_id,1,2) = rd.dept_id
WHERE d.dept_id = '340000000000'
union
-- 市局机构
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 市局 各设备总数
(SELECT substr(zzjgdm, 1, 4) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 4) HAVING substr(zzjgdm,1,4) is not null ) td
on substr(d.dept_id,1,4) = td.dept_id
LEFT JOIN
-- 市局 各设备在线数
(SELECT substr(zzjgdm, 1, 4) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 4) ) rd
on substr(d.dept_id,1,4) = rd.dept_id
WHERE d.parent_id = '0'
union
--分局
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 分局 各设备总数
(SELECT substr(zzjgdm, 1, 6) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 6) HAVING substr(zzjgdm,1,6) is not null ) td
on substr(d.dept_id,1,6) = td.dept_id
LEFT JOIN
-- 分局 各设备在线数
(SELECT substr(zzjgdm, 1, 6) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 6) ) rd
on substr(d.dept_id,1,6) = rd.dept_id
WHERE d.type = 1
union
--支队 机关
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 支队 机关 各设备总数
(SELECT substr(zzjgdm, 1, 8) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 8) HAVING substr(zzjgdm,1,8) is not null ) td
on substr(d.dept_id,1,8) = td.dept_id
LEFT JOIN
-- 支队 机关 各设备在线数
(SELECT substr(zzjgdm, 1, 8) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 8) ) rd
on substr(d.dept_id,1,8) = rd.dept_id
WHERE (length(d.ancestors) - length(translate(d.ancestors,',',''))+1) = 3 and d.type = 2
) a
order by a.dept_id asc
</select>
</mapper>

View File

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.dromara.system.mapper.TDeviceMapper">
<resultMap id="deviceStaticsResult" type="org.dromara.system.domain.vo.DeviceStaticsVo">
</resultMap>
<select id="countByDs" resultMap="deviceStaticsResult">
SELECT SUBSTR(zzjgdm,1,4) zzjgdm,count(*) co from t_device GROUP BY SUBSTR(zzjgdm,1,4) HAVING SUBSTR(zzjgdm,1,4) is not null
</select>
</mapper>

View File

@ -9,7 +9,7 @@
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-consumer</artifactId>
<artifactId>wzhj-consumer</artifactId>
<dependencies>

View File

@ -0,0 +1,22 @@
package org.dromara.kafka.consumer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:12
*/
@SpringBootApplication
@EnableAsync
@ServletComponentScan
public class KafkaConsumerApplication {
public static void main(String[] args){
SpringApplication.run(KafkaConsumerApplication.class,args);
}
}

View File

@ -27,7 +27,7 @@ public class AsyncConfig {
taskExecutor.setMaxPoolSize(20);
taskExecutor.setQueueCapacity(200);
taskExecutor.setKeepAliveSeconds(60);
taskExecutor.setThreadNamePrefix("hfapp--kafkaConsumer--");
taskExecutor.setThreadNamePrefix("wzhj--kafkaConsumer--");
taskExecutor.setWaitForTasksToCompleteOnShutdown(true);
taskExecutor.setAwaitTerminationSeconds(60);
taskExecutor.setRejectedExecutionHandler(new ThreadPoolExecutor.DiscardOldestPolicy());

View File

@ -15,7 +15,6 @@ import java.util.Date;
@Data
public class EsGpsInfo implements Serializable {
private static final long serialVersionUID = 7455495841680488351L;
/**
* 21id
* kafka21id

View File

@ -0,0 +1,206 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import cn.hutool.core.convert.ConvertException;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.ruansee.response.ApiResponse;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfoVO;
import org.dromara.kafka.consumer.util.KafkaAsyncUtil;
import org.dromara.system.api.domain.bo.RemoteDeviceBo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.LinkedBlockingDeque;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:44
*/
@Slf4j
@Configuration
public class ConsumerWorker {
private Logger logger = LoggerFactory.getLogger(ConsumerWorker.class);
public static LinkedBlockingDeque linkedBlockingDeque = new LinkedBlockingDeque<>(5000);
public static LinkedBlockingDeque basedataDeque = new LinkedBlockingDeque<>(5000);
@Autowired
private KafkaAsyncUtil asyncUtils;
@KafkaListener(topics = "#{'${spring.kafka.consumer.topics}'.split(',')}",properties = {
"auto.offset.reset:latest"})
public void consumer(ConsumerRecord<String,Object> record) {
Object value = record.value();
EsGpsInfo esGpsInfo = JSONUtil.toBean((String) value, EsGpsInfo.class);
Date gpsTime = esGpsInfo.getGpsTime();
log.info("value={}",value);
if(Objects.isNull(gpsTime)){
log.error("gpsTime == null,deviceCode={}",esGpsInfo.getDeviceCode());
return;
}
String deviceType = esGpsInfo.getDeviceType();
if(StringUtils.isBlank(deviceType)){
log.error("deviceType is null, deviceCode={}",esGpsInfo.getDeviceCode());
return;
}
if(DateUtil.between(gpsTime,new Date(), DateUnit.MINUTE) < 30){
esGpsInfo.setOnline(1);
}
logger.info("esGpsInfo={}",esGpsInfo);
RemoteGpsInfo gpsInfo = BeanUtil.toBean(esGpsInfo, RemoteGpsInfo.class);
try {
asyncUtils.saveData(gpsInfo);
} catch (Exception e) {
e.printStackTrace();
}
// boolean offer = linkedBlockingDeque.offer(esGpsInfo);
}
private void luanrequest(Object value) {
RemoteGpsInfo esGpsInfo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
esGpsInfo = JSONUtil.toBean(jsonObject, RemoteGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
try {
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
logger.info("esGpsInfo={}",esGpsInfo);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic=jysb_dwxx,data2es={},gpsTime={}","success",esGpsInfo.getGpsTime());
}else{
logger.info("topic=jysb_dwxx,data2es={}",response.getMsg());
}
}
/*
*
* */
private void baseDataRequest(Object value){
RemoteDeviceBo deviceBo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
deviceBo = JSONUtil.toBean(jsonObject, RemoteDeviceBo.class);
}catch (ConvertException e){
logger.info("Device=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(deviceBo)){
logger.info("deviceBo=null no error");
return;
}
if (StringUtils.isEmpty(deviceBo.getDeviceCode())){
logger.info("deviceCode is null");
return;
}
if (StringUtils.isEmpty(deviceBo.getInfoSource())){
logger.info("infoSource is null");
return;
}
if (!StringUtils.isEmpty(deviceBo.getCreateTime())){
try {
Date createTime = new Date(Long.valueOf(jsonObject.getStr("createTime")));
deviceBo.setCreateTime(DateUtil.format(createTime, "yyyy-MM-dd HH:mm:ss"));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
}
if (!StringUtils.isEmpty(deviceBo.getUpdateTime())){
try {
Date updateTime = new Date(Long.valueOf(jsonObject.getStr("updateTime")));
deviceBo.setUpdateTime(DateUtil.format(updateTime, "yyyy-MM-dd HH:mm:ss"));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
}
logger.info("deviceBo={}",deviceBo);
boolean offer = basedataDeque.offer(deviceBo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic=jysb_sbxx,data2es={},deviceCode={}","success",deviceBo.getDeviceCode());
}else{
logger.info("topic=jysb_sbxx,data2es={}",response.getMsg());
}
}
}

View File

@ -7,6 +7,8 @@ import org.apache.dubbo.config.annotation.DubboReference;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.system.api.RemoteDeviceService;
import org.dromara.system.api.domain.bo.RemoteDeviceBo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Configuration;
@ -31,21 +33,28 @@ public class DataInsertBatchHandler implements CommandLineRunner {
@DubboReference
private RemoteDataToEsService gpsService;
@Override
public void run(String... args) throws Exception {
ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor();
LinkedBlockingDeque linkedBlockingDeque = ConsumerWorker.linkedBlockingDeque;
LinkedBlockingDeque linkedBlockingDeque = ConsumerWorker.linkedBlockingDeque; //定位信息队列
// LinkedBlockingDeque baseDataDeque = ConsumerWorker.basedataDeque; //基础信息队列
singleThreadExecutor.execute(new Runnable() {
@Override
public void run() {
while (true) {
try {
List<RemoteGpsInfo> list = new ArrayList<>();
// List<RemoteDeviceBo> bases = new ArrayList<>();
Queues.drain(linkedBlockingDeque, list, 200, 5, TimeUnit.SECONDS);
// Queues.drain(baseDataDeque, bases, 100, 5, TimeUnit.SECONDS);
log.info("batch size={}", list.size());
if(CollectionUtil.isNotEmpty(list)) {
gpsService.saveDataBatch(list);
}
/*if(CollectionUtil.isNotEmpty(bases)) {
deviceService.batchSaveDevice(bases);
}*/
} catch (Exception e) {
log.error("缓存队列批量消费异常:{}", e.getMessage());
}

View File

@ -0,0 +1,37 @@
package org.dromara.kafka.consumer.util;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import org.apache.dubbo.config.annotation.DubboReference;
import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.stereotype.Component;
import java.text.ParseException;
import java.util.Date;
@EnableAsync
@Component
public class KafkaAsyncUtil {
@DubboReference
RemoteDataToEsService dataEsService;
/**
* data
* @param bytes
* @throws ParseException
*/
@Async(value = "taskExecutor")
public void saveData(RemoteGpsInfo esGpsInfo) throws Exception {
//logger.info("当前线程名={}",Thread.currentThread().getName());
//和redis过期监听时间一定要一致
R response = dataEsService.saveData(esGpsInfo);
//logger.error("位置信息接口={},失败信息={},失败设备={}",response.getCode(),response.getMessage(),esGpsInfo.getDeviceId());
}
}

View File

@ -0,0 +1,34 @@
# Tomcat
server:
port: 9114
# Spring
spring:
application:
# 应用名称
name: wzhj-consumer
profiles:
# 环境配置
active: @profiles.active@
--- # nacos 配置
spring:
cloud:
nacos:
# nacos 服务地址
server-addr: @nacos.server@
username: @nacos.username@
password: @nacos.password@
discovery:
# 注册组
group: @nacos.discovery.group@
namespace: ${spring.profiles.active}
config:
# 配置组
group: @nacos.config.group@
namespace: ${spring.profiles.active}
config:
import:
- optional:nacos:application-common.yml
- optional:nacos:datasource.yml
- optional:nacos:${spring.application.name}.yml

View File

@ -0,0 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs" />
<property name="log.file" value="consumer" />
<property name="MAX_FILE_SIZE" value="10MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<!-- INFO日志Appender -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO">
<appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE_ERROR" />
</root>
</configuration>

View File

@ -9,10 +9,10 @@
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-data2es</artifactId>
<artifactId>wzhj-data2es</artifactId>
<description>
stwzhj-data2es位置汇聚数据处理
wzhj-data2es位置汇聚数据处理
</description>
<dependencies>

View File

@ -68,7 +68,7 @@ public class ElasticsearchConfig {
RestClientBuilder builder = RestClient.builder(httpHost);
// 设置用户名、密码
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
// credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password));
credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(userName, password));
// 连接延时配置
builder.setRequestConfigCallback(requestConfigBuilder -> {
requestConfigBuilder.setConnectTimeout(connectTimeOut);

View File

@ -1,5 +1,6 @@
package org.dromara.data2es.config;
import org.apache.kafka.clients.admin.AdminClientConfig;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.dromara.data2es.producer.NewProducer;
@ -11,6 +12,8 @@ import org.springframework.kafka.core.KafkaAdmin;
import org.springframework.stereotype.Component;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
/**
@ -27,8 +30,8 @@ public class KafkaConfig {
// private String kafkaServers = "140.168.2.31:21007,140.168.2.32:21007,140.168.2.33:21007";
// private String kafkaServers = "53.208.61.105:6667,53.208.61.106:6667,53.208.61.107:6667";//六安GA网
// private String kafkaServers = "34.72.62.93:9092";//六安视频网
// private String kafkaServers = "127.0.0.1:9092";//本地
private String kafkaServers = "53.207.8.71:9092,53.193.3.15:9092,53.160.0.237:9092,53.104.56.58:9092,53.128.22.61:9092";//省厅 马伟提供
private String kafkaServers = "53.238.79.33:9092,53.238.79.34:9092,53.238.79.35:9092";//本地
// private String kafkaServers = "53.238.79.4:9092,53.238.79.5:9092,53.238.79.6:9092";//省厅 马伟提供
private String groupId = "ruansiProducer";
@ -129,7 +132,7 @@ public class KafkaConfig {
//设置自定义的分区策略类默认不传key是粘性分区尽量往一个分区中发消息。如果key不为null则默认是按照key的hashcode与 partition的取余来决定哪个partition
//props.put("partitioner.class","com.kafka.myparitioner.CidPartitioner");
props.put(securityProtocol, "SASL_PLAINTEXT");
props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"zkxc\" password=\"zkxcKafka07252023\";");
props.put("sasl.jaas.config", "org.apache.kafka.common.security.scram.ScramLoginModule required username=\"rsoft\" password=\"rsoft-2026\";");
props.put("sasl.mechanism", "SCRAM-SHA-256");
KafkaProducer<String, String> producer = new KafkaProducer<>(props);
// KafkaProducer producer = new KafkaProducer<>(props);
@ -139,7 +142,21 @@ public class KafkaConfig {
@Bean
public KafkaAdmin admin(KafkaProperties properties){
KafkaAdmin admin = new KafkaAdmin(properties.buildAdminProperties());
Map<String, Object> configs = new HashMap<>();
// 1. 集群地址
configs.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG,
kafkaServers);
// 2. SASL认证和命令行的client.properties完全一致
configs.put("security.protocol", "SASL_PLAINTEXT");
configs.put("sasl.mechanism", "SCRAM-SHA-256");
configs.put("sasl.jaas.config",
"org.apache.kafka.common.security.scram.ScramLoginModule required " +
"username=\"rsoft\" password=\"rsoft-2026\";");
// 3. 解决超时核心配置
configs.put(AdminClientConfig.REQUEST_TIMEOUT_MS_CONFIG, 60000); // 60s超时
configs.put(AdminClientConfig.RETRY_BACKOFF_MS_CONFIG, 2000); // 重试间隔
configs.put(AdminClientConfig.CONNECTIONS_MAX_IDLE_MS_CONFIG, 300000);
KafkaAdmin admin = new KafkaAdmin(configs);
admin.setFatalIfBrokerNotAvailable(true);
return admin;
}

View File

@ -0,0 +1,40 @@
package org.dromara.data2es.config;
import org.dromara.data2es.handler.RedisExpireListener;
import org.dromara.data2es.handler.RedisExpireRecoveryHandler;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.PatternTopic;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
@Configuration
public class RedisListenerConfig {
@Bean
RedisMessageListenerContainer listenerContainer(
RedisConnectionFactory connectionFactory,
RedisExpireRecoveryHandler recoveryHandler) {
RedisMessageListenerContainer container = new RedisMessageListenerContainer();
container.setConnectionFactory(connectionFactory);
// 添加连接监听器用于故障转移恢复
container.addMessageListener(recoveryHandler, new PatternTopic("__keyspace@*__:expired"));
return container;
}
@Bean
KeyExpirationEventMessageListener redisKeyExpirationListener(
RedisMessageListenerContainer listenerContainer,
RedisExpireRecoveryHandler recoveryHandler) {
return new RedisExpireListener(listenerContainer, recoveryHandler);
}
@Bean
RedisExpireRecoveryHandler redisExpireRecoveryHandler() {
return new RedisExpireRecoveryHandler();
}
}

View File

@ -0,0 +1,21 @@
package org.dromara.data2es.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Configuration;
@Data
@Configuration
@RefreshScope
@ConfigurationProperties(prefix = "ruansi")
public class RuansiConfiguration {
private boolean sendToThirdEnabled;
private String startUpdateTime;
private String dsPreurl;
}

View File

@ -0,0 +1,23 @@
package org.dromara.data2es.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.SchedulingConfigurer;
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
import org.springframework.scheduling.config.ScheduledTaskRegistrar;
@Configuration
public class SchedulerConfig implements SchedulingConfigurer {
@Override
public void configureTasks(ScheduledTaskRegistrar taskRegistrar) {
ThreadPoolTaskScheduler taskScheduler = new ThreadPoolTaskScheduler();
taskScheduler.setPoolSize(5); // 设置线程池大小
taskScheduler.setThreadNamePrefix("scheduled-task-");
taskScheduler.setErrorHandler(throwable -> {
// 统一处理未捕获异常
System.err.println("定时任务异常: " + throwable.getMessage());
});
taskScheduler.initialize();
taskRegistrar.setTaskScheduler(taskScheduler);
}
}

View File

@ -32,11 +32,6 @@ public class DataToEsController extends BaseController {
public R saveGpsInfo(@RequestBody EsGpsInfoVO2 esGpsInfo ){
R apiResponse = new R<>();
try {
if(StringUtils.isBlank(esGpsInfo.getInfoSource())){
apiResponse.setCode(500);
apiResponse.setMsg("infoSource为空");
return apiResponse;
}
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
apiResponse = R.ok(offer);
} catch (Exception e) {
@ -87,16 +82,15 @@ public class DataToEsController extends BaseController {
EsGpsInfoVO2 esGpsInfo = new EsGpsInfoVO2();
HashMap<String, Object> map = new HashMap<>();
esGpsInfo.setDeviceCode("34153800001320000101");
esGpsInfo.setDeviceType("05");
esGpsInfo.setInfoSource("3401");
esGpsInfo.setDeviceCode("34180201001310000071");
esGpsInfo.setDeviceType("5");
esGpsInfo.setGpsTime(new Date());
esGpsInfo.setLat("31.1" + (a + i));
esGpsInfo.setLng("117.2" + (b + i));
esGpsInfo.setZzjgdm("340100000000");
esGpsInfo.setZzjgmc("合肥市公安局");
esGpsInfo.setCarNum("霍邱看守所01");
esGpsInfo.setLat("30.68" + (a + i));
esGpsInfo.setLng("118.40" + (b + i));
esGpsInfo.setZzjgdm("341802400000");
esGpsInfo.setZzjgmc("宣州分局济川派出所");
esGpsInfo.setPoliceName("057486_郭超");
saveGpsInfo(esGpsInfo);
//gpsService.saveData(map);

View File

@ -0,0 +1,32 @@
package org.dromara.data2es.domain;
import com.baomidou.mybatisplus.annotation.TableName;
import lombok.Data;
import java.util.Date;
/**
* <p>description: </p>
*
* @author chenle
* @date 2023-03-22 9:51
*/
@Data
@TableName("td_ds_qinwu")
public class DSQinwuEntity {
private int id;
private String category;
private String linkId;//外部系统id
private String imei;
private boolean majorPersonTerminal;
private boolean majorVehicleTerminal;
private String name;
private String orgId;
private String orgName;
private String type;
//时间戳
private Date updateTime;
private String policeNumber;
private String policeName;
}

View File

@ -28,7 +28,6 @@ public class EsGpsInfo implements Serializable {
@JsonFormat(pattern="yyyy-MM-dd HH:mm:ss",timezone="GMT+8")
private Date gpsTime;
//3401 ,3402 地市代码
private String infoSource;
private Integer online;

View File

@ -18,4 +18,5 @@ public class EsGpsInfoVO2 extends EsGpsInfo {
private String policeName;
private String phoneNum;
private String carNum;
private String deviceName;
}

View File

@ -0,0 +1,176 @@
package org.dromara.data2es.domain.vo;
import lombok.Data;
import java.util.List;
/**
* <p>description: </p>
*
* @author chenle
* @date 2023-03-20 11:53
*/
@Data
public class DSResponse {
/**
* msg : Success
* ret : ok
* dataStore : [{"category":"SCZD","id":"SCZD","imei":"SCZD","majorPersonTerminal":false,"majorVehicleTerminal":false,"name":"SCZD","orgId":"SCZD","orgName":"SCZD","type":"SCZD"}]
*/
private String msg;
private String ret;
private List<DataStoreBean> dataStore;
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public String getRet() {
return ret;
}
public void setRet(String ret) {
this.ret = ret;
}
public List<DataStoreBean> getDataStore() {
return dataStore;
}
public void setDataStore(List<DataStoreBean> dataStore) {
this.dataStore = dataStore;
}
public static class DataStoreBean {
/**
* category : SCZD
* id : SCZD
* imei : SCZD
* majorPersonTerminal : false
* majorVehicleTerminal : false
* name : SCZD
* orgId : SCZD
* orgName : SCZD
* type : SCZD
*/
private String category;
private String id;
private String imei;
private boolean majorPersonTerminal;
private boolean majorVehicleTerminal;
private String name;
private String personName;
private String orgId;
private String orgName;
private String type;
private String policeNumber;
//时间戳
private String updateTime;
public String getPoliceNumber() {
return policeNumber;
}
public void setPoliceNumber(String policeNumber) {
this.policeNumber = policeNumber;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getImei() {
return imei;
}
public void setImei(String imei) {
this.imei = imei;
}
public boolean isMajorPersonTerminal() {
return majorPersonTerminal;
}
public void setMajorPersonTerminal(boolean majorPersonTerminal) {
this.majorPersonTerminal = majorPersonTerminal;
}
public boolean isMajorVehicleTerminal() {
return majorVehicleTerminal;
}
public void setMajorVehicleTerminal(boolean majorVehicleTerminal) {
this.majorVehicleTerminal = majorVehicleTerminal;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPersonName() {
return personName;
}
public void setPersonName(String personName) {
this.personName = personName;
}
public String getOrgId() {
return orgId;
}
public void setOrgId(String orgId) {
this.orgId = orgId;
}
public String getOrgName() {
return orgName;
}
public void setOrgName(String orgName) {
this.orgName = orgName;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getUpdateTime() {
return updateTime;
}
public void setUpdateTime(String updateTime) {
this.updateTime = updateTime;
}
}
}

View File

@ -0,0 +1,28 @@
package org.dromara.data2es.domain.vo;
import lombok.Data;
import org.dromara.data2es.domain.EsGpsInfo;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-10-11 15:14
*/
@Data
public class EsGpsInfoVO3 extends EsGpsInfo {
private static final long serialVersionUID = -4252583194984423318L;
private String zzjgdm;
private String zzjgmc;
private String policeNo;
private String policeName;
private String phoneNum;
private String carNum;
//勤务IdDS公司自己系统内有deviceCode 和 勤务ID 的关联可以直接使用这个id
//其他公司没有这个关联关系所以还需要上面的policeNo和policeName等信息用于展示
private String linkId;
private String typeOfDevice;
}

View File

@ -1,5 +1,6 @@
package org.dromara.data2es.dubbo;
import cn.hutool.core.bean.BeanUtil;
import lombok.RequiredArgsConstructor;
import org.apache.dubbo.config.annotation.DubboService;
import org.dromara.common.core.domain.R;
@ -21,6 +22,21 @@ public class RemoteDataToEsServiceImpl implements RemoteDataToEsService {
@Override
public R saveDataBatch(List<RemoteGpsInfo> gpsInfoList) {
return gpsService.saveDataBatch(MapstructUtils.convert(gpsInfoList, EsGpsInfoVO2.class));
return gpsService.saveDataBatch(BeanUtil.copyToList(gpsInfoList, EsGpsInfoVO2.class));
}
@Override
public R saveData(RemoteGpsInfo gpsInfo) throws Exception {
return gpsService.saveData(BeanUtil.toBean(gpsInfo, EsGpsInfoVO2.class));
}
@Override
public R updateOnlineStatusBatch(List<RemoteGpsInfo> gpsInfoList) {
return gpsService.updateOnlineStatusBatch(BeanUtil.copyToList(gpsInfoList, EsGpsInfoVO2.class));
}
@Override
public R updateOnlineStatus(RemoteGpsInfo gpsInfo) {
return null;
}
}

View File

@ -7,6 +7,7 @@ import org.dromara.data2es.controller.DataToEsController;
import org.dromara.data2es.domain.EsGpsInfo;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.dromara.data2es.service.IGpsService;
import org.dromara.data2es.service.StoreDataService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Configuration;
@ -31,6 +32,9 @@ public class DataInsertBatchHandler implements CommandLineRunner {
@Autowired
IGpsService gpsService;
@Autowired
StoreDataService storeDataService;
@Override
public void run(String... args) throws Exception {
ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor();
@ -45,6 +49,7 @@ public class DataInsertBatchHandler implements CommandLineRunner {
log.info("batch size={}", list.size());
if(CollectionUtil.isNotEmpty(list)) {
gpsService.saveDataBatch(list);
storeDataService.saveDataByPersonTypeBatch(list);
}
} catch (Exception e) {
log.error("缓存队列批量消费异常:{}", e.getMessage());

View File

@ -0,0 +1,164 @@
package org.dromara.data2es.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.core.utils.StringUtils;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.controller.DataToEsController;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.redisson.Redisson;
import org.redisson.api.RLock;
import org.redisson.api.RTopic;
import org.redisson.api.RedissonClient;
import org.redisson.connection.ConnectionListener;
import org.redisson.connection.ConnectionManager;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.connection.Message;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
import org.springframework.stereotype.Component;
import javax.annotation.PostConstruct;
import java.net.InetSocketAddress;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
@Component
@Slf4j
public class RedisExpireListener extends KeyExpirationEventMessageListener {
private final RedisExpireRecoveryHandler recoveryHandler;
@Autowired
DataToEsController dataToEsController;
private volatile boolean active = true;
public RedisExpireListener(
RedisMessageListenerContainer listenerContainer,
RedisExpireRecoveryHandler recoveryHandler) {
super(listenerContainer);
this.recoveryHandler = recoveryHandler;
recoveryHandler.registerListener(this);
}
@Override
public void init() {
try {
super.init();
log.info("Redis过期监听器初始化成功");
} catch (Exception e) {
log.error("监听器初始化失败", e);
}
}
public void reconnect() {
if (!active) return;
try {
log.info("尝试重新注册过期事件监听器...");
// 停止当前监听
super.destroy();
// 重新初始化
super.init();
log.info("过期事件监听器重新注册成功");
} catch (Exception e) {
log.error("重新注册监听器失败", e);
}
}
@Override
public void onMessage(Message message, byte[] pattern) {
if (!active) return;
String expireKey = message.toString();
log.info("过期的Key={}", expireKey);
if (StringUtils.isNotEmpty(expireKey) &&
expireKey.startsWith(RedisConstants.ONLINE_USERS_TEN)) {
log.info("在线定位过期的Key={}", expireKey);
handleExpiredEvent(expireKey);
}
}
private void handleExpiredEvent(String expiredKey) {
RedissonClient redisson = RedisUtils.getClient();
RLock lock = redisson.getLock("LOCK:" + expiredKey);
try {
if (lock.tryLock(0, 30, TimeUnit.SECONDS)) {
// 实际业务逻辑
String[] split = expiredKey.split(":");
String deviceType = split[2];
String deviceCode = split[3];
if ("5".equals(deviceType) || "9".equals(deviceType) ||
"8".equals(deviceType) || "7".equals(deviceType)) {
return;
}
log.info("处理过期Key: {}", expiredKey);
JSONObject object = RedisUtils.getBucket(RedisConstants.ONLINE_USERS + deviceType + ":" + deviceCode);
if (Objects.isNull(object)) {
log.info("redis key={},Object=nulldeviceType={},deviceCode={}",
expiredKey, deviceType, deviceCode);
return;
}
EsGpsInfoVO2 gpsInfo = BeanUtil.toBean(object, EsGpsInfoVO2.class);
gpsInfo.setOnline(0);
dataToEsController.saveGpsInfo(gpsInfo);
log.info("处理完成: key={}", expiredKey);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
log.error("处理过期事件被中断", e);
} catch (Exception e) {
log.error("处理过期事件异常", e);
} finally {
if (lock.isHeldByCurrentThread()) {
lock.unlock();
}
}
}
@Override
public void destroy() {
active = false;
try {
super.destroy();
} catch (Exception e) {
throw new RuntimeException(e);
}
log.info("Redis过期监听器已停止");
}
// 添加连接状态监听使用Redisson事件总线
@PostConstruct
public void addSentinelConnectionListener() {
try {
RedissonClient redisson = RedisUtils.getClient();
// 订阅Redisson连接事件
RTopic connectionEvents = redisson.getTopic("__redisson_connection_event");
connectionEvents.addListener(String.class, (channel, msg) -> {
if ("CONNECTED".equals(msg)) {
log.info("Redis连接已建立: {}", msg);
// 标记需要恢复监听
recoveryHandler.markReconnected();
} else if ("DISCONNECTED".equals(msg)) {
log.warn("Redis连接断开: {}", msg);
}
});
log.info("已注册Redisson连接事件监听器");
} catch (Exception e) {
log.warn("无法添加Redisson连接事件监听器", e);
}
}
}

View File

@ -0,0 +1,36 @@
package org.dromara.data2es.handler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.connection.Message;
import org.springframework.data.redis.connection.MessageListener;
import org.springframework.stereotype.Component;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
@Component
public class RedisExpireRecoveryHandler implements MessageListener {
private static final Logger log = LoggerFactory.getLogger(RedisExpireRecoveryHandler.class);
private final AtomicBoolean reconnected = new AtomicBoolean(false);
private final AtomicReference<RedisExpireListener> listenerRef = new AtomicReference<>();
public void registerListener(RedisExpireListener listener) {
this.listenerRef.set(listener);
}
@Override
public void onMessage(Message message, byte[] pattern) {
// 检测到任何事件时,检查是否需要恢复监听
if (reconnected.compareAndSet(true, false) && listenerRef.get() != null) {
log.warn("检测到Redis事件尝试重新注册主监听器...");
listenerRef.get().reconnect();
}
}
public void markReconnected() {
reconnected.set(true);
}
}

View File

@ -0,0 +1,214 @@
package org.dromara.data2es.handler;
/*
*
* es redis kafka
* */
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONPObject;
import com.alibaba.fastjson.serializer.SerializerFeature;
import com.alibaba.fastjson2.util.JSONObject1O;
import jodd.util.StringUtil;
import org.apache.commons.lang.StringUtils;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.domain.EsGpsInfo;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.dromara.data2es.domain.vo.EsGpsInfoVO3;
import org.dromara.data2es.service.IGpsService;
import org.dromara.data2es.util.ConfigConstants;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
@Configuration
public class RequestHandler {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
@Autowired
private RestHighLevelClient restHighLevelClient;
private Logger logger = LoggerFactory.getLogger(RequestHandler.class);
/*@Async
public CompletableFuture<EsGpsInfo> doRequest(EsGpsInfo esGpsInfo){
EsGpsInfo entity = gpsService.createEntity(esGpsInfo);
return CompletableFuture.completedFuture(entity);
}*/
@Async
public void sendToKafka(EsGpsInfoVO2 esGpsInfoVO2) {
if (!Objects.isNull(esGpsInfoVO2)) {
String deviceType = esGpsInfoVO2.getDeviceType();
if(StringUtil.isEmpty(deviceType)){
deviceType = "6";
}
/**
* 使
*/
//EsGpsInfoVO esGpsInfoVO = deviceInfoContext.doGetInfo(esGpsInfo);
//EsGpsInfoVO2 esGpsInfoVO2 = getInfo(esGpsInfo);
//kafkaProducer.send(esGpsInfo, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+ infoSource);
//todo 2023年3月30日 cpu过载暂时隐藏
kafkaTemplate.send(ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType,
JSON.toJSONString(esGpsInfoVO2, SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullStringAsEmpty));
//kafkaProducer.send(esGpsInfoVO2, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType);
//地市的kafka数据如接收地市某个设备的数据可以对接此kafka topic
//todo 暂时隐藏
}
}
/**
* 线
* @param map
*/
@Async
public void redisOnlineUserBatch(Map<String,String> map, long time){
RedisUtils.batchInsert(map,time);
}
/**
* 线
* @param map
*/
public void batchPut(Map<String,String> map){
RedisUtils.batchPut(map);
}
public void batchPutWithExpire(Map<String,String> map,long time){
RedisUtils.batchPutWithExpire(map,time, TimeUnit.SECONDS);
}
@Async
public void redisDeleteBatch(List<String> deleteKeys){
RedisUtils.deleteObject(deleteKeys);
}
/**
* es
* @param bulkRequest
*/
@Async
public void esRealBulkSave(BulkRequest bulkRequest){
try {
BulkResponse response = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
boolean b = response.hasFailures();
String bulkErrorMsg = response.buildFailureMessage();
logger.info("b={}", bulkErrorMsg);
} catch (IOException e) {
e.printStackTrace();
logger.error("batchInsert error={}",e.getMessage());
}
}
/**
* 线
* @param esGpsInfoVo2
*/
@Async
public void redisOnlineUser(EsGpsInfoVO2 esGpsInfoVo2){
//todo 存储的value应该改为 esGpsInfoVo2为后面大屏展示提供数据
if(null == esGpsInfoVo2){
logger.error("redis存入对象为空");
return;
}
Date gpsTime = esGpsInfoVo2.getGpsTime();
String jsonValue = JSON.toJSONString(esGpsInfoVo2, SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullStringAsEmpty);
if(!Objects.isNull(gpsTime)){
//设置永不过期,便于前端查询设备的最后位置 ----2022年9月15日
long betweenS = DateUtil.between(gpsTime, new Date(), DateUnit.SECOND);
//过期时间应该是20分钟减去设备定位时间与当前时间的差值比如设备已经比当前时间晚15分钟了那么设备的过期时间应该只剩5分钟了
long onlineTime = 60*10 - betweenS;
String deviceCode = esGpsInfoVo2.getDeviceCode();
String deviceType = esGpsInfoVo2.getDeviceType();
//设置永不过期,便于前端查询设备的最后位置 ----2022年9月15日
RedisUtils.set(RedisConstants.ONLINE_USERS +
deviceType
+ ":" + deviceCode, jsonValue, RedisConstants.REDIS_NEVER_EXPIRE);
logger.error("redis存入,deviceCode={}",deviceCode);
//地理位置空间查询
long b = RedisUtils.geoAdd(Double.valueOf(esGpsInfoVo2.getLng()),
Double.valueOf(esGpsInfoVo2.getLat()), deviceCode +"#"+ deviceType);
if(onlineTime > 0) {
//设置一个过期时间方便key自动过期监听设置离线 [RedisExpireListener]
RedisUtils.set(RedisConstants.ONLINE_USERS_TEN +
deviceType
+ ":" + deviceCode, jsonValue, onlineTime);
}
}
}
/**
*
* @param esGpsInfo
*/
@Async
public void redisOnlineUserByPerson(EsGpsInfo esGpsInfo){
EsGpsInfoVO3 esGpsInfoVO3 = (EsGpsInfoVO3) esGpsInfo;
String jsonValue = JSON.toJSONString(esGpsInfoVO3, SerializerFeature.WriteMapNullValue,SerializerFeature.WriteNullStringAsEmpty);
Date gpsTime = esGpsInfoVO3.getGpsTime();
if(!Objects.isNull(gpsTime)){
//设置永不过期,便于前端查询设备的最后位置 ----2022年9月15日
RedisUtils.set(RedisConstants.ONLINE_USERS +
esGpsInfoVO3.getDeviceType()
+ ":" + esGpsInfoVO3.getDeviceCode(), jsonValue, RedisConstants.REDIS_NEVER_EXPIRE);
//设置一个10分钟过期的。然后用redis监听、监听过期的数据然后重新发送到kafka
RedisUtils.set(RedisConstants.ONLINE_USERS_TEN +
esGpsInfoVO3.getDeviceType()
+ ":" + esGpsInfoVO3.getDeviceCode(), jsonValue, 60*10);
long b = RedisUtils.geoAdd( Double.valueOf(esGpsInfoVO3.getLng()), Double.valueOf(esGpsInfoVO3.getLat()), esGpsInfoVO3.getDeviceCode()+"#"+esGpsInfoVO3.getDeviceType());
}
}
}

View File

@ -0,0 +1,14 @@
package org.dromara.data2es.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.dromara.data2es.domain.DSQinwuEntity;
import org.springframework.stereotype.Repository;
/**
* <p>description: </p>
*
*/
@Repository
public interface DSQinwuMapper extends BaseMapper<DSQinwuEntity> {
}

View File

@ -0,0 +1,158 @@
package org.dromara.data2es.schedule;
import cn.hutool.core.date.DateUtil;
import cn.hutool.http.HttpUtil;
import cn.hutool.json.JSONUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.dromara.data2es.config.RuansiConfiguration;
import org.dromara.data2es.domain.DSQinwuEntity;
import org.dromara.data2es.domain.vo.DSResponse;
import org.dromara.data2es.service.DSQinwuService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.util.CollectionUtils;
import java.util.*;
/**
* <p>description: </p>
* DS
* @author chenle
* @date 2023-03-21 17:37
*/
@Configuration
@Slf4j
@RequiredArgsConstructor
public class BaseDataSchedule {
private String lastUpdateTime;
// private String preUrl = "http://53.238.84.10:28080/ds-portal-web";
@Autowired
DSQinwuService dsQinwuService;
private final RuansiConfiguration ruansiConfiguration;
/**
*
*/
@Scheduled(cron = "0/30 * * * * ?")
public void updateDsQw(){
if(StringUtils.isBlank(lastUpdateTime)){
DSQinwuEntity qinwu = dsQinwuService.lastOne();
lastUpdateTime = DateUtil.formatDateTime(qinwu.getUpdateTime());
}
String suffixUrl = "/v1/terminal/updateTime-terminal";
DSResponse dsResponse = requestList(lastUpdateTime, suffixUrl);
List<DSResponse.DataStoreBean> dataStores = dsResponse.getDataStore();
if(CollectionUtils.isEmpty(dataStores)){
log.info("没有可更新的设备");
return;
}
log.info("updateTime={},deviceSize={}",lastUpdateTime,dataStores.size());
List<DSQinwuEntity> newDeviceList = generateEntityList(dataStores);
// List<DSQinwuEntity> newDeviceList = generateEntityList2(dataStores);
if(CollectionUtils.isEmpty(newDeviceList)){
log.info("未查询到设备newDeviceList = null");
return;
}
int count = dsQinwuService.saveOrUpdate(newDeviceList);
log.info("更新或插入的count={}",count);
if(count == newDeviceList.size()){ //如果有一个失败的情况出现,那么更新时间就不变
DSQinwuEntity qinwu = dsQinwuService.lastOne();
Date updateTime = qinwu.getUpdateTime();
lastUpdateTime = DateUtil.formatDateTime(updateTime);
log.info("timestamp={},lastUpdateTime={}",updateTime,lastUpdateTime);
}
}
/**
* list
* @param dataStores ds
* @return
*/
private List<DSQinwuEntity> generateEntityList(List<DSResponse.DataStoreBean> dataStores) {
List<DSQinwuEntity> newDeviceList = new ArrayList<>();
for (DSResponse.DataStoreBean dataStoreBean : dataStores) {
String deviceCode = dataStoreBean.getImei();
String policeName = dataStoreBean.getName();
String type = dataStoreBean.getType();//类型 PDT、ZFJLY等
if(StringUtils.isBlank(deviceCode) || StringUtils.isBlank(policeName) || StringUtils.isBlank(type)){
log.info("deviceCode、policeName、type有一个为空deviceCode={}",deviceCode);
continue;
}
DSQinwuEntity entity = new DSQinwuEntity();
// BeanUtil.copyProperties(dataStoreBean,com.ruansee.common_kafka.entity,"id","updateTime");
entity.setCategory(dataStoreBean.getCategory());
entity.setLinkId(dataStoreBean.getId());
entity.setImei(dataStoreBean.getImei());
entity.setMajorPersonTerminal(dataStoreBean.isMajorPersonTerminal());
entity.setMajorVehicleTerminal(dataStoreBean.isMajorVehicleTerminal());
entity.setOrgId(dataStoreBean.getOrgId());
entity.setOrgName(dataStoreBean.getOrgName());
entity.setName(dataStoreBean.getName());
entity.setType(dataStoreBean.getType());
entity.setPoliceNumber(dataStoreBean.getPoliceNumber());
entity.setPoliceName(dataStoreBean.getPersonName());
try {
entity.setUpdateTime(DateUtil.date(Long.valueOf(dataStoreBean.getUpdateTime())));
}catch (Exception e){
log.info("时间转换错误,msg={}",e.getMessage());
continue;
}
newDeviceList.add(entity);
}
return newDeviceList;
}
/**
* list
* @param dataStores ds
* @return
*/
private List<DSQinwuEntity> generateEntityList2(List<DSResponse.DataStoreBean> dataStores) {
List<DSQinwuEntity> newDeviceList = new ArrayList<>();
for (int i =1;i<100;i++) {
DSQinwuEntity entity = new DSQinwuEntity();
entity.setCategory("123");
entity.setLinkId("asdasdsasd");
entity.setImei("12343435345" + new Random(100).nextInt());
entity.setMajorPersonTerminal(false);
entity.setMajorVehicleTerminal(false);
entity.setOrgId("341100000000");
entity.setOrgName("滁州市公安局");
entity.setName("张三");
entity.setType("PDT");
try {
entity.setUpdateTime(DateUtil.date(System.currentTimeMillis()));
}catch (Exception e){
log.info("时间转换错误,msg={}",e.getMessage());
continue;
}
newDeviceList.add(entity);
}
return newDeviceList;
}
private DSResponse requestList(String updateTime, String suffixUrl) {
Map<String,Object> map = new HashMap<>();
map.put("updateTime",updateTime);
String content = HttpUtil.get(ruansiConfiguration.getDsPreurl() + suffixUrl, map);
DSResponse dsResponse = JSONUtil.toBean(content, DSResponse.class);
return dsResponse;
}
}

Some files were not shown because too many files have changed in this diff Show More