Compare commits

...

30 Commits

Author SHA1 Message Date
luyya 6586808a50 添加高精度类型字段 2026-04-02 09:01:25 +08:00
luyya 53cc200641 融合通信 2026-03-26 09:11:24 +08:00
luyya 12c4aa5d3d 融合通信 2026-03-25 14:04:26 +08:00
luyya 3456c73183 融合通信电子围栏 2026-03-23 15:07:35 +08:00
luyya d9396f33d1 融合通信对接合肥数据 2026-03-19 11:35:54 +08:00
luyya 20c066890e 融合通信 2026-03-18 15:07:09 +08:00
luyya 145c276c3d 融合通信 2026-03-18 09:14:46 +08:00
luyya 5ab0418957 融合通信 2026-03-16 15:42:46 +08:00
luyya 43652655de 融合通信 2026-03-16 15:19:16 +08:00
luyya 8b41028448 合肥位置汇聚 2026-03-03 18:37:46 +08:00
luyya d667538721 亳州位置汇聚 2026-02-27 09:16:42 +08:00
luyya 623c35da0a 亳州位置汇聚 2026-01-28 09:32:20 +08:00
luyya 3a814fc911 亳州位置汇聚修改 2026-01-04 14:43:04 +08:00
luyya fd026067bb 亳州位置汇聚公车UDP时间转换问题 2025-11-24 17:13:17 +08:00
luyya 00498de68e 亳州位置汇聚对接DS警务通定位并添加定时下线任务 2025-11-21 17:54:48 +08:00
luyya 2e1676aa36 亳州修改 2025-10-16 11:08:22 +08:00
luyya 4022dfa7b0 亳州对接警综机构 2025-09-19 17:01:06 +08:00
luyya 3c85d5950d 亳州位置汇聚首页统计修改 2025-07-11 14:48:20 +08:00
luyya 638c54684f 亳州位置汇聚改动 2025-07-10 18:03:58 +08:00
luyya ac0080fe7f 亳州位置汇聚 2025-07-05 15:14:40 +08:00
luyya 83fac17fe1 亳州位置汇聚 2025-06-30 16:05:33 +08:00
luyya 79146e77c4 宿州记录仪基础数据定时认为修改 2025-06-30 09:18:03 +08:00
luyya c4316ecf1c 宿州consumer改名 2025-06-27 10:49:33 +08:00
luyya 688cb8383c 处理宿州历史轨迹查询不到数据问题 2025-06-12 09:14:19 +08:00
luyya b887742212 宿州修改 2025-06-09 09:12:16 +08:00
luyya 0d1218f655 宿州改动 2025-06-05 17:39:46 +08:00
luyya 0ff37e767e 添加websocket功能 2025-05-26 15:43:30 +08:00
luyya a2115294cb 宿州新版位置汇聚 2025-04-27 09:32:57 +08:00
luyya 86666b8a04 宣城新版位置汇聚 2025-03-25 11:57:14 +08:00
luyya 692c2a31bd 宣城新版位置汇聚 2025-03-03 11:00:15 +08:00
487 changed files with 17088 additions and 2557 deletions

View File

@ -28,7 +28,7 @@ datasource:
spring:
datasource:
type: com.zaxxer.hikari.HikariDataSource
type: com.alibaba.druid.pool.DruidDataSource
# 动态数据源文档 https://www.kancloud.cn/tracy5546/dynamic-datasource/content
dynamic:
# 性能分析插件(有性能损耗 不建议生产环境使用)
@ -37,18 +37,35 @@ spring:
seata: ${seata.enabled}
# 严格模式 匹配不到数据源则报错
strict: true
hikari:
# 最大连接池数量
maxPoolSize: 20
# 最小空闲线程数量
# Druid数据源配置
druid:
# 初始化连接数
initialSize: 5
# 最小空闲连接数
minIdle: 10
# 配置获取连接等待超时的时间
connectionTimeout: 30000
# 校验超时时间
validationTimeout: 5000
# 空闲连接存活最大时间默认10分钟
idleTimeout: 600000
# 此属性控制池中连接的最长生命周期值0表示无限生命周期默认30分钟
maxLifetime: 1800000
# 多久检查一次连接的活性
keepaliveTime: 30000
# 最大连接数
maxActive: 20
# 获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
# 配置一个连接在池中最大生存的时间,单位是毫秒
maxEvictableIdleTimeMillis: 900000
# 配置检测连接是否有效
validationQuery: SELECT 1
# 申请连接时执行validationQuery检测连接是否有效
testWhileIdle: true
# 申请连接时执行validationQuery检测连接是否有效
testOnBorrow: false
# 归还连接时执行validationQuery检测连接是否有效
testOnReturn: false
# 是否缓存preparedStatement也就是PSCache
poolPreparedStatements: true
# 配置监控统计拦截的filters
filters: stat,wall,slf4j
# 通过connectProperties属性来打开mergeSql功能慢SQL记录
connectionProperties: druid.stat.mergeSql=true;druid.stat.slowSqlMillis=5000
# 合并多个DruidDataSource的监控数据
useGlobalDataSourceStat: true

View File

@ -89,12 +89,12 @@
<id>prod</id>
<properties>
<profiles.active>prod</profiles.active>
<nacos.server>127.0.0.1:8848</nacos.server>
<nacos.server>53.16.17.14:8848</nacos.server>
<nacos.discovery.group>DEFAULT_GROUP</nacos.discovery.group>
<nacos.config.group>DEFAULT_GROUP</nacos.config.group>
<nacos.username>nacos</nacos.username>
<nacos.password>nacos</nacos.password>
<logstash.address>127.0.0.1:4560</logstash.address>
<nacos.password>Ycgis!2509</nacos.password>
<logstash.address>53.16.17.14:4560</logstash.address>
</properties>
</profile>
</profiles>
@ -376,7 +376,7 @@
<modules>
<module>stwzhj-auth</module>
<module>stwzhj-gateway</module>
<module>wzhj-gateway</module>
<module>stwzhj-visual</module>
<module>stwzhj-modules</module>
<module>stwzhj-api</module>

View File

@ -14,6 +14,7 @@
<module>stwzhj-api-resource</module>
<module>stwzhj-api-workflow</module>
<module>stwzhj-api-data2es</module>
<module>stwzhj-api-location</module>
</modules>
<artifactId>stwzhj-api</artifactId>

View File

@ -47,6 +47,12 @@
<version>${revision}</version>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api-location</artifactId>
<version>${revision}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

View File

@ -4,8 +4,15 @@ import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import java.util.List;
import java.util.concurrent.ExecutionException;
public interface RemoteDataToEsService {
R saveDataBatch(List<RemoteGpsInfo> gpsInfoList);
R saveData(RemoteGpsInfo gpsInfo) throws Exception;
R updateOnlineStatusBatch(List<RemoteGpsInfo> gpsInfoList);
R updateOnlineStatus(RemoteGpsInfo gpsInfo);
}

View File

@ -0,0 +1,33 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api</artifactId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-api-location</artifactId>
<description>
stwzhj-api-location
</description>
<dependencies>
<!-- stwzhj Common Core-->
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-core</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-excel</artifactId>
</dependency>
</dependencies>
</project>

View File

@ -0,0 +1,8 @@
package org.dromara.location.api;
import java.util.List;
public interface RemoteElasticSearchService {
List<String> linstenDataStatus();
}

View File

@ -1,5 +1,10 @@
package org.dromara.system.api;
import org.dromara.system.api.domain.bo.RemoteDeptBo;
import org.dromara.system.api.domain.vo.RemoteDeptVo;
import java.util.List;
/**
*
*
@ -15,4 +20,6 @@ public interface RemoteDeptService {
*/
String selectDeptNameByIds(String deptIds);
List<RemoteDeptVo> selectDept(RemoteDeptBo bo);
}

View File

@ -52,6 +52,8 @@ public interface RemoteUserService {
*/
LoginUser getUserInfoByEmail(String email, String tenantId) throws UserException;
LoginUser getUserInfoByIdCard(String idCard, String tenantId) throws UserException;
/**
* openid
*

View File

@ -0,0 +1,70 @@
package org.dromara.system.api.domain.bo;
import io.github.linpeilie.annotations.AutoMapper;
import jakarta.validation.constraints.Email;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.NotNull;
import jakarta.validation.constraints.Size;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.io.Serializable;
/**
* sys_dept
*
* @author Michelle.Chung
*/
@Data
@NoArgsConstructor
public class RemoteDeptBo implements Serializable {
/**
* id
*/
private String deptId;
/**
* ID
*/
private String parentId;
/**
*
*/
private String deptName;
/**
*
*/
private String deptCategory;
/**
*
*/
private Integer orderNum;
/**
*
*/
private Long leader;
/**
*
*/
private String phone;
/**
*
*/
private String email;
/**
* 0 1
*/
private String status;
private String fullName;
}

View File

@ -83,6 +83,12 @@ public class RemoteDeviceBo implements Serializable {
*/
private String remark1;
private String createTime;
private String updateTime;
private String[] zzjgdms;
/**
* 2
*/

View File

@ -0,0 +1,96 @@
package org.dromara.system.api.domain.vo;
import com.alibaba.excel.annotation.ExcelIgnoreUnannotated;
import com.alibaba.excel.annotation.ExcelProperty;
import io.github.linpeilie.annotations.AutoMapper;
import lombok.Data;
import org.dromara.common.excel.annotation.ExcelDictFormat;
import org.dromara.common.excel.convert.ExcelDictConvert;
import java.io.Serial;
import java.io.Serializable;
import java.util.Date;
/**
* sys_dept
*
* @author Michelle.Chung
*/
@Data
public class RemoteDeptVo implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
/**
* id
*/
private String deptId;
/**
* id
*/
private String parentId;
/**
*
*/
private String parentName;
/**
*
*/
private String ancestors;
/**
*
*/
private String deptName;
/**
*
*/
private String deptCategory;
/**
*
*/
private Integer orderNum;
/**
* ID
*/
private Long leader;
/**
*
*/
private String leaderName;
/**
*
*/
private String phone;
/**
*
*/
private String email;
/**
* 0 1
*/
private String status;
/**
*
*/
private Date createTime;
private Integer allCount;
private Integer onlineCount;
private String fullName;
}

View File

@ -130,6 +130,8 @@ public class LoginUser implements Serializable {
*/
private String deviceType;
private String manageDeptId;
/**
* id
*/

View File

@ -46,7 +46,7 @@ public class CaptchaController {
@GetMapping("/code")
public R<CaptchaVo> getCode() {
CaptchaVo captchaVo = new CaptchaVo();
boolean captchaEnabled = captchaProperties.getEnabled();
boolean captchaEnabled = false;
if (!captchaEnabled) {
captchaVo.setCaptchaEnabled(false);
return R.ok(captchaVo);

View File

@ -3,10 +3,12 @@ package org.dromara.auth.controller;
import cn.dev33.satoken.exception.NotLoginException;
import cn.hutool.core.codec.Base64;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.net.URLDecoder;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import me.zhyd.oauth.model.AuthResponse;
@ -19,8 +21,11 @@ import org.dromara.auth.domain.vo.LoginVo;
import org.dromara.auth.domain.vo.TenantListVo;
import org.dromara.auth.form.RegisterBody;
import org.dromara.auth.form.SocialLoginBody;
import org.dromara.auth.form.SsoLoginBody;
import org.dromara.auth.properties.DsSsoProperties;
import org.dromara.auth.service.IAuthStrategy;
import org.dromara.auth.service.SysLoginService;
import org.dromara.auth.util.RSAUtil;
import org.dromara.common.core.constant.UserConstants;
import org.dromara.common.core.domain.R;
import org.dromara.common.core.domain.model.LoginBody;
@ -49,6 +54,7 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/**
* token
@ -61,6 +67,8 @@ import java.util.concurrent.TimeUnit;
public class TokenController {
private final SocialProperties socialProperties;
private final DsSsoProperties ssoProperties;
private final SysLoginService sysLoginService;
private final ScheduledExecutorService scheduledExecutorService;
@ -110,6 +118,55 @@ public class TokenController {
return R.ok(loginVo);
}
@RequestMapping("/jzLogin")
public void wjyLogin(HttpServletRequest request, HttpServletResponse response) throws Exception{
System.out.print("进入jzLogin");
String userInfo = request.getParameter("userInfo");
log.info("接收userInfo={}",userInfo);
String privateKey = "MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMSmBCx1ghaVLT/x96ERi9lo3XY/dzKFc9y5f7RtPJihU6fijX4eFSNrhJAjnP2pkJfCXLEmaPznmoUfrUhmq3yeXNzRNILQ+XAUcGkX13oMqW9qM/u5ZHlBnBwMiCZVOZ4pmaZ+hgA8jiNjhh1AqBkRpbvlfnlyC0fBv5FcXOQ3AgMBAAECgYBhfp2bHTc90v/4MBLBfF2FkIQb2CNUnO1whVdXMMFWOsWq+puD5447XVb/z2hKKwNQ+j/SZGSw3nKxhZ73wk9KPkGo9o4gjdwbgo6Qnk8sGQFpCg+5aFH1CgMy5PaM2ZcxbPOfO7tardS1D9u3Vm88688Qdk5sLLWkf6JuNP6LIQJBAPQ4w4qMOfI7Uss90A7/SGHWUjzLvsEv5eq/jXpRhJnyX5tHDCgPCq4CQKwBfUbzdapMtsf065aZCWZR51+V3g0CQQDOIeYX3TTDadzg/cC8tG/AVnxbsoY2Cy9fXmzJCfXIOxZPQIL1WF7HDveTXRxhp6CjSkzVcFJ9K1MFbl/WCf5TAkBqqawFg87/E5fKelm3Yxlq2Z8bPGU4nMHO8BBLXvUKA4hsGfCeQkuIyiUSuqfLaCN42H5wAHEwWfB4BseFwL+1AkBgqfrp4/0TSnifNcAFfiQNsUD+C1juzQ35DiG7oSUWMcdDgQAop7lCZd1pyLc/gElPac9gJB06v29eU6UzjVorAkEAymdUmrgcezAi+aMA0+QcenvPiom3jQkduvfBjOiSLSWm5oB8+mMlExl8Vm9FtKMms6zOs/LcTZzxonl//oe4Ag==";
com.alibaba.fastjson.JSONObject json = null;
if (needsUrlDecode(userInfo)){
byte[] b1 = RSAUtil.decrypt(privateKey, URLDecoder.decode(userInfo, StandardCharsets.UTF_8));
json = com.alibaba.fastjson.JSONObject.parseObject(new String(b1));
}else {
byte[] b1 = RSAUtil.decrypt(privateKey, userInfo);
json = com.alibaba.fastjson.JSONObject.parseObject(new String(b1));
}
log.info("解码后用户信息={}",json.toString());
String identityNo = json.getString("cardNo");
// String identityNo = "340321199705196996";
/* String identityNo = request.getParameter("userInfo");
String policeId = "999013";*/
String clientId = "e5cd7e4891bf95d1d19206ce24a7b32e";
String grantType = "sso";
RemoteClientVo clientVo = remoteClientService.queryByClientId(clientId);
SsoLoginBody loginBody = new SsoLoginBody();
loginBody.setClientId(clientId);
loginBody.setGrantType(grantType);
loginBody.setIdcard(identityNo);
LoginVo loginVo = IAuthStrategy.login(JsonUtils.toJsonString(loginBody), clientVo, grantType);
response.sendRedirect(ssoProperties.getRedirctUrl()+loginVo.getAccessToken()+"&clientId="+loginVo.getClientId());
}
private static final Pattern ENCODED_PATTERN = Pattern.compile("%[0-9A-Fa-f]{2}");
/**
* URL
* @param str
* @return true URLfalse
*/
public static boolean needsUrlDecode(String str) {
if (str == null || str.isEmpty()) {
return false;
}
return ENCODED_PATTERN.matcher(str).find();
}
/**
* Token
*

View File

@ -0,0 +1,13 @@
package org.dromara.auth.form;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.dromara.common.core.domain.model.LoginBody;
@Data
@EqualsAndHashCode(callSuper = true)
public class SsoLoginBody extends LoginBody {
//身份证号码
private String idcard;
}

View File

@ -0,0 +1,24 @@
package org.dromara.auth.properties;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Configuration;
@Data
@Configuration
@RefreshScope
@ConfigurationProperties(prefix = "ds.sso")
public class DsSsoProperties {
private String url;
private String clientId;
private String clientSecret;
//返回前端的地址
private String redirctUrl;
}

View File

@ -0,0 +1,60 @@
package org.dromara.auth.service.impl;
import cn.dev33.satoken.stp.SaLoginModel;
import cn.dev33.satoken.stp.StpUtil;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.dubbo.config.annotation.DubboReference;
import org.dromara.auth.domain.vo.LoginVo;
import org.dromara.auth.form.SsoLoginBody;
import org.dromara.auth.service.IAuthStrategy;
import org.dromara.auth.service.SysLoginService;
import org.dromara.common.json.utils.JsonUtils;
import org.dromara.common.satoken.utils.LoginHelper;
import org.dromara.system.api.RemoteUserService;
import org.dromara.system.api.domain.vo.RemoteClientVo;
import org.dromara.system.api.model.LoginUser;
import org.springframework.stereotype.Service;
/*
*
* jwt
* */
@Slf4j
@Service("sso" + IAuthStrategy.BASE_NAME)
@RequiredArgsConstructor
public class SsoAuthStrategy implements IAuthStrategy {
private final SysLoginService loginService;
@DubboReference
private RemoteUserService remoteUserService;
@Override
public LoginVo login(String body, RemoteClientVo client) {
SsoLoginBody loginBody = JsonUtils.parseObject(body, SsoLoginBody.class);
// ValidatorUtils.validate(loginBody);
String tenantId = loginBody.getTenantId();
String idcard = loginBody.getIdcard();
LoginUser loginUser = remoteUserService.getUserInfoByIdCard(idcard, tenantId);
// loginService.checkLogin(LoginType.PASSWORD, tenantId, username); //此处不需要验证
loginUser.setClientKey(client.getClientKey());
loginUser.setDeviceType(client.getDeviceType());
SaLoginModel model = new SaLoginModel();
model.setDevice(client.getDeviceType());
// 自定义分配 不同用户体系 不同 token 授权时间 不设置默认走全局 yml 配置
// 例如: 后台用户30分钟过期 app用户1天过期
model.setTimeout(client.getTimeout());
model.setActiveTimeout(client.getActiveTimeout());
model.setExtra(LoginHelper.CLIENT_KEY, client.getClientId());
// 生成token
LoginHelper.login(loginUser, model);
LoginVo loginVo = new LoginVo();
loginVo.setAccessToken(StpUtil.getTokenValue());
loginVo.setExpireIn(StpUtil.getTokenTimeout());
loginVo.setClientId(client.getClientId());
return loginVo;
}
}

View File

@ -0,0 +1,215 @@
package org.dromara.auth.util;
import cn.hutool.core.net.URLDecoder;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.codec.binary.Base64;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import java.io.ByteArrayOutputStream;
import java.nio.charset.StandardCharsets;
import java.security.*;
import java.security.interfaces.RSAPrivateKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.regex.Pattern;
/**
* RSA 使DHRSA
* DHRSA
*
* @author kfchen
* @version 1.0.1
* @date 2019-09-12
*/
public class RSAUtil {
public static final String KEY_ALGORITHM = "RSA";
private static org.bouncycastle.jce.provider.BouncyCastleProvider bouncyCastleProvider = null;
/**
* BouncyCastleProvider
*
* @return
*/
public static synchronized org.bouncycastle.jce.provider.BouncyCastleProvider getBcpInstance() {
if (bouncyCastleProvider == null) {
bouncyCastleProvider = new org.bouncycastle.jce.provider.BouncyCastleProvider();
}
return bouncyCastleProvider;
}
/**
*
*
* @param privateKeyStr
* @param data
* @return
* @throws Exception
*/
public static byte[] decryptForJSEncrypt(String privateKeyStr, String data)
throws Exception {
RSAPrivateKey privateKey = loadPrivateKeyByStr(privateKeyStr);
if (privateKey == null) {
throw new Exception("解密私钥为空, 请设置");
}
Cipher cipher = null;
try {
// 使用默认RSA
cipher = Cipher.getInstance("RSA");
byte[] cipherData = Base64.decodeBase64(data.getBytes());
cipher.init(Cipher.DECRYPT_MODE, privateKey);
byte[] output = cipher.doFinal(cipherData);
return output;
} catch (NoSuchAlgorithmException e) {
throw new Exception("无此解密算法");
} catch (NoSuchPaddingException e) {
e.printStackTrace();
return null;
} catch (InvalidKeyException e) {
throw new Exception("解密私钥非法,请检查");
} catch (IllegalBlockSizeException e) {
throw new Exception("密文长度非法");
} catch (BadPaddingException e) {
throw new Exception("密文数据已损坏");
}
}
public static RSAPrivateKey loadPrivateKeyByStr(String privateKeyStr)
throws Exception {
try {
byte[] buffer = Base64.decodeBase64(privateKeyStr);
PKCS8EncodedKeySpec keySpec = new PKCS8EncodedKeySpec(buffer);
KeyFactory keyFactory = KeyFactory.getInstance("RSA");
return (RSAPrivateKey) keyFactory.generatePrivate(keySpec);
} catch (NoSuchAlgorithmException e) {
throw new Exception("无此算法");
} catch (InvalidKeySpecException e) {
throw new Exception("私钥非法");
} catch (NullPointerException e) {
throw new Exception("私钥数据为空");
}
}
/**
* * *
*
* @param privateKey *
* @param data *
* @return *
* @throws Exception
*/
public static byte[] decrypt(String privateKey, String data) throws Exception {
try {
byte[] raw = decryptBASE64(data);
// 对密钥解密
byte[] keyBytes = decryptBASE64(privateKey);
// 取得私钥
PKCS8EncodedKeySpec pkcs8KeySpec = new PKCS8EncodedKeySpec(keyBytes);
KeyFactory keyFactory = KeyFactory.getInstance(KEY_ALGORITHM);
PrivateKey pk = keyFactory.generatePrivate(pkcs8KeySpec);
Cipher cipher = Cipher.getInstance("RSA", getBcpInstance());
cipher.init(cipher.DECRYPT_MODE, pk);
int blockSize = cipher.getBlockSize();
ByteArrayOutputStream bout = new ByteArrayOutputStream(64);
int j = 0;
while (raw.length - j * blockSize > 0) {
bout.write(cipher.doFinal(raw, j * blockSize, blockSize));
j++;
}
return bout.toByteArray();
} catch (Exception e) {
throw new Exception(e.getMessage());
}
}
/**
* <br>
*
*
* @param data
* @param key
* @return
* @throws Exception
*/
public static byte[] encryptByPrivateKey(byte[] data, String key) throws Exception {
// 对密钥解密
byte[] keyBytes = decryptBASE64(key);
// 取得私钥
PKCS8EncodedKeySpec pkcs8KeySpec = new PKCS8EncodedKeySpec(keyBytes);
KeyFactory keyFactory = KeyFactory.getInstance(KEY_ALGORITHM);
Key privateKey = keyFactory.generatePrivate(pkcs8KeySpec);
// 对数据加密
Cipher cipher = Cipher.getInstance(keyFactory.getAlgorithm());
cipher.init(Cipher.ENCRYPT_MODE, privateKey);
return cipher.doFinal(data);
}
/**
* base64
*/
public static byte[] decryptBASE64(String key) {
return Base64.decodeBase64(key);
}
/**
* base64
*/
public static String encryptBASE64(byte[] bytes) {
return Base64.encodeBase64String(bytes);
}
// 预编译正则表达式,匹配 % 后跟两个十六进制字符
private static final Pattern ENCODED_PATTERN = Pattern.compile("%[0-9A-Fa-f]{2}");
/**
* URL
* @param str
* @return true URLfalse
*/
public static boolean needsUrlDecode(String str) {
if (str == null || str.isEmpty()) {
return false;
}
return ENCODED_PATTERN.matcher(str).find();
}
/**
* main
*/
public static void main(String[] args) throws Exception {
String publicKey = "MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQC7V+wyxy6D+lBNHfz1eSF7NJ3QSpAJ/9P+4uDv8w1MKm0lzQKmbNJEHaKr3zzQzlRrWdo675DuqVtCtMVCR1oha5ONEaxo8uYazTSOlLRVDEjL/JDcIQIL+1Zn7p/pu3UusmHVxQQTFftxK7MS6uWd/TeNr5sYmseT27922rNlVQIDAQAB";//RSACoder.getPublicKey(keyMap);
// String privateKey = "MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBALtX7DLHLoP6UE0d/PV5IXs0ndBKkAn/0/7i4O/zDUwqbSXNAqZs0kQdoqvfPNDOVGtZ2jrvkO6pW0K0xUJHWiFrk40RrGjy5hrNNI6UtFUMSMv8kNwhAgv7Vmfun+m7dS6yYdXFBBMV+3ErsxLq5Z39N42vmxiax5Pbv3bas2VVAgMBAAECgYA2KtzTbb3oUQbI1WPfge+JVRRQNk7hZUPhja3mEwuEOzwl0tnyTsKZCUyQj1JhOVc1Vtx/b+ci6nrFlcj3OfQ3+NXhShUSnAXopjlL/1TNA5WNDqqlG8R0IaA/zvqmcr9j55idGvGAcN7Us/EWZjqWNFkZ7jJhUDwXNRtjCpqhIQJBAOF1sMqZKAVdE8ugYTw8XgPWcO/CFd/UKNP/IReC7/i36zDIwnFinPEa5vmvPk5ff3B4K482N7vS0ztGQeTUGhkCQQDUuHd0EmuQYI5VYbpeEIBJ8uTnEJ/9Jik3FqAxJpo5+vRuxYb6Ul57rMMP0vlTwm92NXhRwVVmQLML81kqzwSdAkBKlVRWsTO8En5p7jQ8aPvw6Q+TQPFbiiO3fb7EuFwKO01iZgds2uJwICYokwBZjN3rptVCGv109NlL0oIZ4NCBAkAhU/WDPg+UQ0yJnBIzAqrW6zNp8HTgmMYEV4spqD6pruP1JwWK2rTq85psNEXUFbaY8q+zP3M93g0vZiUmS+ERAkEA0lBhEJcJWhc9WPoWLqA5HKxUoKkJzgC2ljeTxtz6oY/YdLErq4FUcM3RIGG6kYqMIjrZfww+jqjJ4v8lIZgh4A==";//RSACoder.getPrivateKey(keyMap);
String privateKey = "MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMSmBCx1ghaVLT/x96ERi9lo3XY/dzKFc9y5f7RtPJihU6fijX4eFSNrhJAjnP2pkJfCXLEmaPznmoUfrUhmq3yeXNzRNILQ+XAUcGkX13oMqW9qM/u5ZHlBnBwMiCZVOZ4pmaZ+hgA8jiNjhh1AqBkRpbvlfnlyC0fBv5FcXOQ3AgMBAAECgYBhfp2bHTc90v/4MBLBfF2FkIQb2CNUnO1whVdXMMFWOsWq+puD5447XVb/z2hKKwNQ+j/SZGSw3nKxhZ73wk9KPkGo9o4gjdwbgo6Qnk8sGQFpCg+5aFH1CgMy5PaM2ZcxbPOfO7tardS1D9u3Vm88688Qdk5sLLWkf6JuNP6LIQJBAPQ4w4qMOfI7Uss90A7/SGHWUjzLvsEv5eq/jXpRhJnyX5tHDCgPCq4CQKwBfUbzdapMtsf065aZCWZR51+V3g0CQQDOIeYX3TTDadzg/cC8tG/AVnxbsoY2Cy9fXmzJCfXIOxZPQIL1WF7HDveTXRxhp6CjSkzVcFJ9K1MFbl/WCf5TAkBqqawFg87/E5fKelm3Yxlq2Z8bPGU4nMHO8BBLXvUKA4hsGfCeQkuIyiUSuqfLaCN42H5wAHEwWfB4BseFwL+1AkBgqfrp4/0TSnifNcAFfiQNsUD+C1juzQ35DiG7oSUWMcdDgQAop7lCZd1pyLc/gElPac9gJB06v29eU6UzjVorAkEAymdUmrgcezAi+aMA0+QcenvPiom3jQkduvfBjOiSLSWm5oB8+mMlExl8Vm9FtKMms6zOs/LcTZzxonl//oe4Ag==";
// 私钥解密 "abcd#123456"
String s1 = "pa+6STSQ5ZK94/xMzLE4VCFnbYgan1oUADGv0N07fu1QXfzuVoLLDPyPbduFQwxVB1ZFbkrJIhB56ZtkdqN0A12bNQ13MylyfJfL3NJTUJYukINmAOB8WgB8t9ZIyH2EZRK6YbtGgy2MS6YsisJ93G4ofrEHkv4r3nnl++9ZUzE=";
// String userInfo = "Hwlfh/41CulvHFgenFTv%2BUiyN2CaXz9s0ypdy9yN1TkrtNARpfkpgjDJF7ZETfdmNDNRedkdOeNThZB0Ma23UQ5RHD4dr6DWV9LhZisxpY1nm3IN49F/Pz9Z0CUh//ttXT0f35B86UXwNni0ITLgfH0kMCA9aoShmtU0eg01EqB9CthbVeCKjVudYnR4VG%2B%2Bb%2BStYNnFpa0pcyPvf8F5GtPPuQGQYn9SbfL52i8gEFPi3oshVxlQBzi9yy/r%2BL%2B8LtiBp1ojeGe6n1D0HB8/RRqPWn7PxXKHvqCiwmHTkDClJlXNdWxzkZPzZSMi5nrL9QEZSGQ91oSHzlI/suUKWg==";
// byte[] b1 = decrypt(privateKey, URLDecoder.decode(userInfo, StandardCharsets.UTF_8) );
String userInfo = "iu6HWB0lqozQ5UQxvtZDy2CdlMSOaJ4fXHduRLtQpnzj/m5pAbaOyWZ0rUe+sqIc887XgdTjNODEeFtM6adgBmS6DZmzAYIt+6I/o5Me7HUVl/EpMwO1PlT98lz/mzOXdPHGZTv/6DLXEDxR3fr17agP6oXq/YbrPsClJ1e5K/qpxNxCqgEv03Kl7gM27ZHBs0PCkUyzv8Wj3yES28tTYpJyKKK4LZvEt1S7c+g9WT+GgjO9tLEh4lUxyVKmBEAJfKcrCQzcu8e3E8yRjNdtqyk3/XTF/kagoSTHFqt3EHxdQBXM8XM2xzkZNcdohaWD7JuaiWXHsuLCtYVO5dYY+Q==";
JSONObject json = null;
if (needsUrlDecode(userInfo)){
byte[] b1 = RSAUtil.decrypt(privateKey, URLDecoder.decode(userInfo, StandardCharsets.UTF_8));
json = JSONObject.parseObject(new String(b1));
}else {
byte[] b1 = RSAUtil.decrypt(privateKey, userInfo);
json = JSONObject.parseObject(new String(b1));
}
byte[] b1 = decrypt(privateKey, userInfo);
System.out.println(new String(b1));
// JSONObject json = JSONObject.parseObject(new String(b1));
// 私钥加密 "abcd#123456"
String s2 = "abcd#123456";
byte[] b2 = encryptByPrivateKey(s2.getBytes(),privateKey);
// 转base64字符串后作为参数发送
String s = encryptBASE64(b2);
// System.out.println(s);
}
}

View File

@ -6,7 +6,7 @@ server:
spring:
application:
# 应用名称
name: stwzhj-auth
name: wzhj-auth
profiles:
# 环境配置
active: @profiles.active@

View File

@ -1,28 +1,49 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<property name="log.path" value="logs" />
<property name="log.file" value="auth" />
<property name="MAX_FILE_SIZE" value="10MB" />
<property name="MAX_HISTORY" value="30" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<!-- INFO日志Appender -->
<appender name="FILE_INFO" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>INFO</level>
<onMatch>ACCEPT</onMatch>
<onMismatch>DENY</onMismatch>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/info/info.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<include resource="logback-common.xml" />
<!-- ERROR日志Appender -->
<appender name="FILE_ERROR" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.${log.file}.log</file>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>ERROR</level>
</filter>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}/error/error.${log.file}.%d{yyyy-MM-dd}.%i.log.gz</fileNamePattern>
<maxFileSize>${MAX_FILE_SIZE}</maxFileSize>
<maxHistory>${MAX_HISTORY}</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>
<!-- 根Logger配置禁用控制台输出 -->
<root level="INFO">
<appender-ref ref="FILE_INFO" />
<appender-ref ref="FILE_ERROR" />
</root>
</configuration>

View File

@ -21,8 +21,12 @@ public class RedisConstants {
public static final long REDIS_ONLINE_USER_NEVER_EXPIRE = -1;
public static final long REDIS_NEVER_EXPIRE = 0L;
public static final long FIVE_MINUTES_REDIS_ONLINE_USER_EXPIRE_TIME = 60 * 5;
public static final String ONLINE_USERS_TEN = "ten:online_users:";
public static String getUserTokenKey(String token) {
return CCL_CODING_SSO_TOKEN + token;

View File

@ -40,6 +40,10 @@
<groupId>org.apache.dubbo</groupId>
<artifactId>dubbo-metadata-report-redis</artifactId>
<exclusions>
<exclusion>
<groupId>io.lettuce</groupId>
<artifactId>lettuce-core</artifactId>
</exclusion>
<exclusion>
<groupId>redis.clients</groupId>
<artifactId>jedis</artifactId>
@ -51,6 +55,11 @@
<artifactId>jedis</artifactId>
<version>5.1.0</version>
</dependency>
<!-- Lettuce -->
<dependency>
<groupId>io.lettuce</groupId>
<artifactId>lettuce-core</artifactId>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>

View File

@ -49,10 +49,15 @@ import static org.apache.dubbo.metadata.report.support.Constants.DEFAULT_METADAT
public class RedisMetadataReport extends AbstractMetadataReport {
private static final String REDIS_DATABASE_KEY = "database";
private static final String SENTINEL_KEY = "sentinel";
private static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(RedisMetadataReport.class);
// protected , for test
protected JedisPool pool;
protected JedisSentinelPool sentinelPool;
private Set<HostAndPort> jedisClusterNodes;
private int timeout;
private String password;
@ -75,6 +80,14 @@ public class RedisMetadataReport extends AbstractMetadataReport {
for (URL tmpUrl : urls) {
jedisClusterNodes.add(new HostAndPort(tmpUrl.getHost(), tmpUrl.getPort()));
}
} else if (url.getParameter(SENTINEL_KEY,false)) {
Set<String> sentinels = new HashSet<>();
List<URL> urls = url.getBackupUrls();
for (URL tmpUrl : urls) {
sentinels.add(tmpUrl.getHost()+":"+ tmpUrl.getPort());
}
int database = url.getParameter(REDIS_DATABASE_KEY, 0);
sentinelPool = new JedisSentinelPool("mymaster",sentinels ,new GenericObjectPoolConfig<>(), timeout, password, database);
} else {
int database = url.getParameter(REDIS_DATABASE_KEY, 0);
pool = new JedisPool(new JedisPoolConfig(), url.getHost(), url.getPort(), timeout, password, database);
@ -128,11 +141,25 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private void storeMetadata(BaseMetadataIdentifier metadataIdentifier, String v) {
if (pool != null) {
storeMetadataStandalone(metadataIdentifier, v);
}else if(sentinelPool != null) {
storeMetadataInSentinel(metadataIdentifier, v);
} else {
storeMetadataInCluster(metadataIdentifier, v);
}
}
private void storeMetadataInSentinel(BaseMetadataIdentifier metadataIdentifier, String v) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.set(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY), v, jedisParams);
} catch (Throwable e) {
String msg =
"Failed to put " + metadataIdentifier + " to redis cluster " + v + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private void storeMetadataInCluster(BaseMetadataIdentifier metadataIdentifier, String v) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -158,11 +185,24 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private void deleteMetadata(BaseMetadataIdentifier metadataIdentifier) {
if (pool != null) {
deleteMetadataStandalone(metadataIdentifier);
}else if(sentinelPool != null) {
deleteMetadataSentinel(metadataIdentifier);
} else {
deleteMetadataInCluster(metadataIdentifier);
}
}
private void deleteMetadataSentinel(BaseMetadataIdentifier metadataIdentifier) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.del(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY));
} catch (Throwable e) {
String msg = "Failed to delete " + metadataIdentifier + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private void deleteMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -187,11 +227,24 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private String getMetadata(BaseMetadataIdentifier metadataIdentifier) {
if (pool != null) {
return getMetadataStandalone(metadataIdentifier);
}else if(sentinelPool != null) {
return getMetadataSentinel(metadataIdentifier);
} else {
return getMetadataInCluster(metadataIdentifier);
}
}
private String getMetadataSentinel(BaseMetadataIdentifier metadataIdentifier) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
return jedisSentinel.get(metadataIdentifier.getUniqueKey(KeyTypeEnum.UNIQUE_KEY));
} catch (Throwable e) {
String msg = "Failed to get " + metadataIdentifier + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private String getMetadataInCluster(BaseMetadataIdentifier metadataIdentifier) {
try (JedisCluster jedisCluster =
new JedisCluster(jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {
@ -243,6 +296,8 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private boolean storeMapping(String key, String field, String value, String ticket) {
if (pool != null) {
return storeMappingStandalone(key, field, value, ticket);
}else if(sentinelPool != null) {
return storeMappingSentinel(key, field, value, ticket);
} else {
return storeMappingInCluster(key, field, value, ticket);
}
@ -278,6 +333,33 @@ public class RedisMetadataReport extends AbstractMetadataReport {
return false;
}
/**
* use 'watch' to implement cas.
* Find information about slot distribution by key.
*/
private boolean storeMappingSentinel(String key, String field, String value, String ticket) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.watch(key);
String oldValue = jedisSentinel.hget(key, field);
if (null == oldValue || null == ticket || oldValue.equals(ticket)) {
Transaction transaction = jedisSentinel.multi();
transaction.hset(key, field, value);
List<Object> result = transaction.exec();
if (null != result) {
jedisSentinel.publish(buildPubSubKey(), field);
return true;
}
}
jedisSentinel.unwatch();
} catch (Throwable e) {
String msg = "Failed to put " + key + ":" + field + " to redis " + value + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
return false;
}
/**
* use 'watch' to implement cas.
* Find information about slot distribution by key.
@ -339,6 +421,8 @@ public class RedisMetadataReport extends AbstractMetadataReport {
private String getMappingData(String key, String field) {
if (pool != null) {
return getMappingDataStandalone(key, field);
}else if(sentinelPool != null) {
return getMappingDataSentinel(key, field);
} else {
return getMappingDataInCluster(key, field);
}
@ -355,6 +439,17 @@ public class RedisMetadataReport extends AbstractMetadataReport {
}
}
private String getMappingDataSentinel(String key, String field) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
return jedisSentinel.hget(key, field);
} catch (Throwable e) {
String msg = "Failed to get " + key + ":" + field + " from redis , cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
}
private String getMappingDataStandalone(String key, String field) {
try (Jedis jedis = pool.getResource()) {
return jedis.hget(key, field);
@ -502,6 +597,14 @@ public class RedisMetadataReport extends AbstractMetadataReport {
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
} else if (sentinelPool != null) {
try (Jedis jedisSentinel = sentinelPool.getResource()) {
jedisSentinel.subscribe(notifySub, path);
} catch (Throwable e) {
String msg = "Failed to subscribe " + path + ", cause: " + e.getMessage();
logger.error(TRANSPORT_FAILED_RESPONSE, "", "", msg, e);
throw new RpcException(msg, e);
}
} else {
try (JedisCluster jedisCluster = new JedisCluster(
jedisClusterNodes, timeout, timeout, 2, password, new GenericObjectPoolConfig<>())) {

View File

@ -19,6 +19,18 @@ dubbo:
password: ${spring.cloud.nacos.password}
parameters:
namespace: ${spring.profiles.active}
# metadata-report:
# address: redis://${spring.data.redis.host}:${spring.data.redis.port}
# group: DUBBO_GROUP
# username: dubbo
# password: Ycgis@2509
# # 集群开关
# sentinel: false
# parameters:
# namespace: ${spring.profiles.active}
# database: ${spring.data.redis.database}
# timeout: ${spring.data.redis.timeout}
# backup: 53.176.146.98:26380,53.176.146.99:26380,53.176.146.100:26380
metadata-report:
address: redis://${spring.data.redis.host}:${spring.data.redis.port}
group: DUBBO_GROUP
@ -43,3 +55,12 @@ dubbo:
retries: 0
# 初始化检查
check: false
logging:
level:
# 设置 Dubbo 核心包的日志级别为 DEBUG
org.apache.dubbo: DEBUG
# 如果需要更细粒度的调试,可指定元数据报告模块
org.apache.dubbo.metadata: DEBUG
# Redis 客户端日志(可选)
io.lettuce.core: WARN # 避免 Redis 连接日志过多

View File

@ -33,13 +33,11 @@ public class BaseEntity implements Serializable {
/**
*
*/
@TableField(fill = FieldFill.INSERT)
private String createDept;
/**
*
*/
@TableField(fill = FieldFill.INSERT)
private Long createBy;
/**
@ -51,7 +49,6 @@ public class BaseEntity implements Serializable {
/**
*
*/
@TableField(fill = FieldFill.INSERT_UPDATE)
private Long updateBy;
/**

View File

@ -38,12 +38,12 @@ public enum DataScopeType {
/**
*
*/
DEPT("3", " #{#deptName} = #{#user.deptId} ", " 1 = 0 "),
DEPT("3", " #{#deptName} = #{#user.manageDeptId} ", " 1 = 0 "),
/**
*
*/
DEPT_AND_CHILD("4", " #{#deptName} IN ( #{@sdss.getDeptAndChild( #user.deptId )} )", " 1 = 0 "),
DEPT_AND_CHILD("4", " #{#deptName} IN ( #{@sdss.getDeptAndChild( #user.manageDeptId )} )", " 1 = 0 "),
/**
*

View File

@ -25,6 +25,7 @@ import org.springframework.core.task.VirtualThreadTaskExecutor;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Objects;
import java.util.TimeZone;
/**
@ -94,6 +95,22 @@ public class RedisConfiguration {
.setReadMode(clusterServersConfig.getReadMode())
.setSubscriptionMode(clusterServersConfig.getSubscriptionMode());
}
// 哨兵模式
RedissonProperties.Sentinel sentinel = redissonProperties.getSentinel();
if (Objects.nonNull(sentinel)) {
config.useSentinelServers()
.setNameMapper(new KeyPrefixHandler(redissonProperties.getKeyPrefix()))
.setTimeout(sentinel.getTimeout())
.setClientName(sentinel.getClientName())
.setIdleConnectionTimeout(sentinel.getIdleConnectionTimeout())
.setSubscriptionConnectionPoolSize(sentinel.getSubscriptionConnectionPoolSize())
.setMasterConnectionMinimumIdleSize(sentinel.getMasterConnectionMinimumIdleSize())
.setMasterConnectionPoolSize(sentinel.getMasterConnectionPoolSize())
.setSlaveConnectionMinimumIdleSize(sentinel.getSlaveConnectionMinimumIdleSize())
.setSlaveConnectionPoolSize(sentinel.getSlaveConnectionPoolSize())
.setReadMode(sentinel.getReadMode())
.setSubscriptionMode(sentinel.getSubscriptionMode());
}
log.info("初始化 redis 配置");
};
}

View File

@ -40,6 +40,8 @@ public class RedissonProperties {
*/
private ClusterServersConfig clusterServersConfig;
private Sentinel sentinel;
@Data
@NoArgsConstructor
public static class SingleServerConfig {
@ -132,4 +134,60 @@ public class RedissonProperties {
}
@Data
@NoArgsConstructor
public static class Sentinel {
/**
*
*/
private String clientName;
/**
* master
*/
private int masterConnectionMinimumIdleSize;
/**
* master
*/
private int masterConnectionPoolSize;
/**
* slave
*/
private int slaveConnectionMinimumIdleSize;
/**
* slave
*/
private int slaveConnectionPoolSize;
/**
*
*/
private int idleConnectionTimeout;
/**
*
*/
private int timeout;
/**
*
*/
private int subscriptionConnectionPoolSize;
/**
*
*/
private ReadMode readMode;
/**
*
*/
private SubscriptionMode subscriptionMode;
}
}

View File

@ -1,9 +1,11 @@
package org.dromara.common.redis.utils;
import cn.hutool.core.date.DateUnit;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.core.utils.SpringUtils;
import org.redisson.api.*;
import org.springframework.dao.DataAccessException;
@ -345,6 +347,29 @@ public class RedisUtils {
return rSet.add(data);
}
public static <T> void set(final String key, String data,long time) {
if (time > 0){
CLIENT.getBucket(key).set(data, time, TimeUnit.SECONDS);
}else {
CLIENT.getBucket(key).set(data);
}
/*RSet<T> rSet = CLIENT.getSet(key);
if (time > 0){
rSet.expireAsync(time,TimeUnit.SECONDS);
}
return rSet.add(data);*/
}
public static <T> void del(final String key) {
CLIENT.getBucket(key).delete();
/*RSet<T> rSet = CLIENT.getSet(key);
if (time > 0){
rSet.expireAsync(time,TimeUnit.SECONDS);
}
return rSet.add(data);*/
}
/**
* Set
* <p>
@ -573,6 +598,73 @@ public class RedisUtils {
System.out.println("redis:"+list);
}
/**
* RMap
*
* @param data
* @param timeout
* @param timeUnit
*/
public static void batchPutWithExpire(Map<String, String> data, long timeout, TimeUnit timeUnit) {
// 创建 RBatch 实例
RBatch batch = CLIENT.createBatch();
// 获取 RMapAsync 对象
RMapAsync<Object, Object> mapAsync = batch.getMap("myMap");
// 批量操作:将多个数据添加到 map 中
for (Map.Entry<String, String> entry : data.entrySet()) {
mapAsync.putAsync(entry.getKey(), entry.getValue());
}
// 执行批量操作
batch.execute();
// 获取同步的 RMap 对象并设置过期时间
RMap<Object, Object> mapSync = CLIENT.getMap("myMap");
mapSync.expire(timeout, timeUnit);
}
/**
*
*
* @param data
*/
public static void batchPut(Map<String, String> data) {
// 创建 RBatch 实例
RBatch batch = CLIENT.createBatch();
// 获取 RMapAsync 对象
RMapAsync<Object, Object> mapAsync = batch.getMap("myMap");
// 批量操作:将多个数据添加到 map 中
for (Map.Entry<String, String> entry : data.entrySet()) {
mapAsync.putAsync(entry.getKey(), entry.getValue());
}
// 执行批量操作
batch.execute();
}
/**
* key
*
* @param key key
* @return
*/
public static JSONObject getData(String key) {
// 获取同步的 RMap 对象
RMap<Object, Object> map = CLIENT.getMap("myMap");
// 根据 key 获取数据
Object value = map.get(key);
if (null == value){
return null;
}
return JSONUtil.parseObj(value.toString());
}
/*
*
* */
@ -596,12 +688,42 @@ public class RedisUtils {
return list;
}
/**
* Redis keys key
*
* @param pattern "user:*"
* @return key value
*/
public static List<JSONObject> getMatchingKeysAndValues(String pattern) {
RKeys rKeys = CLIENT.getKeys();
Iterable<String> keysIterable = rKeys.getKeysByPattern(pattern); // 获取匹配的 key
// 获取匹配的键值对
RMap<String, String> map = CLIENT.getMap("myMap");
List<JSONObject> list = new ArrayList<>();
// RBatch batch = CLIENT.createBatch();
// 批量获取这些key的值
for (String key : keysIterable) {
String value = map.get(key); // 获取每个 key 对应的 value
if (null != value){
JSONObject jsonObject = JSONUtil.parseObj(value);
list.add(jsonObject);
}
}
return list;
}
/*
* keyRBucket
* */
public static JSONObject getBucket(String key){
RBucket<Object> bucket = CLIENT.getBucket(key);
Object value = bucket.get();
if (null == value){
return null;
}
return JSONUtil.parseObj(value.toString());
}
@ -623,12 +745,18 @@ public class RedisUtils {
/*
* GEO
* */
/*public static void batchGeoAdd(Map<String, GeoEntry> entryMap){
RGeo<RMap<String, String>> geo = CLIENT.getGeo("myGeo");
Map<String, GeoEntry> entries = new HashMap<>();
entries.put("place1", new GeoEntry(13.361389, 38.115556, "Palermo"));
entries.put("place2", new GeoEntry(15.087269, 37.502669, "Catania"));
geo.p(entries);
}*/
public static long geoAdd(Double lng,Double lat,String member){
RGeo<String> geo = CLIENT.getGeo(RedisConstants.ONLINE_USERS_GEO);
long count1 = geo.add(lng, lat, member);
return count1;
}
// 查询半径周边 米内的成员
public static List<String> nearByXYReadonly(double centerLon,double centerLat,double distance){
RGeo<String> geo = CLIENT.getGeo(RedisConstants.ONLINE_USERS_GEO);
List<String> members = geo.radius(centerLon, centerLat, distance, GeoUnit.METERS);
return members;
}
}

View File

@ -36,6 +36,8 @@ public class LoginHelper {
public static final String USER_NAME_KEY = "userName";
public static final String DEPT_KEY = "deptId";
public static final String DEPT_NAME_KEY = "deptName";
public static final String MANAGE_DEPT__KEY = "manageDeptId";
public static final String DEPT_CATEGORY_KEY = "deptCategory";
public static final String CLIENT_KEY = "clientid";
@ -53,6 +55,7 @@ public class LoginHelper {
.setExtra(USER_KEY, loginUser.getUserId())
.setExtra(USER_NAME_KEY, loginUser.getUsername())
.setExtra(DEPT_KEY, loginUser.getDeptId())
.setExtra(MANAGE_DEPT__KEY,loginUser.getManageDeptId())
.setExtra(DEPT_NAME_KEY, loginUser.getDeptName())
.setExtra(DEPT_CATEGORY_KEY, loginUser.getDeptCategory())
);

View File

@ -9,6 +9,7 @@ import org.springframework.boot.autoconfigure.AutoConfiguration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.socket.WebSocketHandler;
import org.springframework.web.socket.config.annotation.EnableWebSocket;
import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
@ -19,7 +20,7 @@ import org.springframework.web.socket.server.HandshakeInterceptor;
*
* @author zendwang
*/
@AutoConfiguration
@Configuration
@ConditionalOnProperty(value = "websocket.enabled", havingValue = "true")
@EnableConfigurationProperties(WebSocketProperties.class)
@EnableWebSocket

View File

@ -56,6 +56,11 @@ public class PlusWebSocketHandler extends AbstractWebSocketHandler {
WebSocketUtils.publishMessage(webSocketMessageDto);
}
protected void handleStringMessage(WebSocketSession session, String message) throws Exception {
// 从WebSocket会话中获取登录用户信息
WebSocketUtils.sendMessage(session,message);
}
/**
*
*

View File

@ -2,6 +2,7 @@ package org.dromara.common.websocket.holder;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.WebSocketSession;
import java.util.Map;
@ -25,6 +26,7 @@ public class WebSocketSessionHolder {
* @param session WebSocket
*/
public static void addSession(Long sessionKey, WebSocketSession session) {
removeSession(sessionKey);
USER_SESSION_MAP.put(sessionKey, session);
}
@ -34,8 +36,10 @@ public class WebSocketSessionHolder {
* @param sessionKey
*/
public static void removeSession(Long sessionKey) {
if (USER_SESSION_MAP.containsKey(sessionKey)) {
USER_SESSION_MAP.remove(sessionKey);
WebSocketSession session = USER_SESSION_MAP.remove(sessionKey);
try {
session.close(CloseStatus.BAD_DATA);
} catch (Exception ignored) {
}
}

View File

@ -1,114 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<property name="log.pattern" value="%d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<!-- 控制台输出 -->
<appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<!-- 系统日志输出 -->
<appender name="file_info" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/info.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/info.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<appender name="file_error" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/error.log</file>
<!-- 循环政策:基于时间创建日志文件 -->
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/error.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大的历史 60天 -->
<maxHistory>60</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${log.pattern}</pattern>
</encoder>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!-- 过滤的级别 -->
<level>ERROR</level>
<!-- 匹配时的操作:接收(记录) -->
<onMatch>ACCEPT</onMatch>
<!-- 不匹配时的操作:拒绝(不记录) -->
<onMismatch>DENY</onMismatch>
</filter>
</appender>
<!-- info异步输出 -->
<appender name="async_info" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_info"/>
</appender>
<!-- error异步输出 -->
<appender name="async_error" class="ch.qos.logback.classic.AsyncAppender">
<!-- 不丢失日志.默认的,如果队列的80%已满,则会丢弃TRACT、DEBUG、INFO级别的日志 -->
<discardingThreshold>0</discardingThreshold>
<!-- 更改默认的队列的深度,该值会影响性能.默认值为256 -->
<queueSize>512</queueSize>
<!-- 添加附加的appender,最多只能添加一个 -->
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>
<appender-ref ref="async_info"/>
<appender-ref ref="async_error"/>
<appender-ref ref="file_console"/>
</root>
</configuration>

View File

@ -9,13 +9,20 @@
<modelVersion>4.0.0</modelVersion>
<modules>
<module>stwzhj-system</module>
<module>wzhj-system</module>
<module>stwzhj-gen</module>
<module>stwzhj-job</module>
<module>stwzhj-resource</module>
<module>stwzhj-workflow</module>
<module>stwzhj-data2es</module>
<module>wzhj-data2es</module>
<module>stwzhj-baseToSt</module>
<module>wzhj-consumer</module>
<module>wzhj-location</module>
<module>stwzhj-dataToGas</module>
<module>stwzhj-kafkaToWebsocket</module>
<module>wzhj-websocket</module>
<module>wzhj-extract</module>
<module>wzhj-udp</module>
</modules>
<artifactId>stwzhj-modules</artifactId>

View File

@ -1,34 +0,0 @@
package org.dromara.kafka.consumer;
import com.ruansee.redis.JedisConfig;
import com.ruansee.redis.RedisConfig;
import com.ruansee.redis.RedisUtil;
import com.ruansee.redis.RedissionLockUtil;
import org.dromara.kafka.consumer.config.KafkaPropertiesConfig;
import org.redisson.spring.starter.RedissonAutoConfiguration;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.data.redis.RedisAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisReactiveAutoConfiguration;
import org.springframework.boot.autoconfigure.data.redis.RedisRepositoriesAutoConfiguration;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.FilterType;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:12
*/
@SpringBootApplication
@EnableAsync
@EnableConfigurationProperties({KafkaPropertiesConfig.class})
@ServletComponentScan
public class KafkaConsumerApplication {
public static void main(String[] args){
SpringApplication.run(KafkaConsumerApplication.class,args);
}
}

View File

@ -1,136 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
public final class KafkaProperties
{
private static final Logger LOG = LoggerFactory.getLogger(KafkaProperties.class);
// Topic名称安全模式下需要以管理员用户添加当前用户的访问权限
public final static String TOPIC = "t_gps_realtime";
private static Properties serverProps = new Properties();
private static Properties producerProps = new Properties();
private static Properties consumerProps = new Properties();
private static Properties clientProps = new Properties();
private static KafkaProperties instance = null;
private KafkaProperties()
{
String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
try
{
File proFile = new File(filePath + "producer.properties");
if (proFile.exists())
{
producerProps.load(new FileInputStream(filePath + "producer.properties"));
}
File conFile = new File(filePath + "producer.properties");
if (conFile.exists())
{
consumerProps.load(new FileInputStream(filePath + "consumer.properties"));
}
File serFile = new File(filePath + "server.properties");
if (serFile.exists())
{
serverProps.load(new FileInputStream(filePath + "server.properties"));
}
File cliFile = new File(filePath + "client.properties");
if (cliFile.exists())
{
clientProps.load(new FileInputStream(filePath + "client.properties"));
}
}
catch (IOException e)
{
LOG.info("The Exception occured.", e);
}
}
public synchronized static KafkaProperties getInstance()
{
if (null == instance)
{
instance = new KafkaProperties();
}
return instance;
}
/**
*
* @param key properiteskey
* @param defValue
* @return
*/
public String getValues(String key, String defValue)
{
String rtValue = null;
if (null == key)
{
LOG.error("key is null");
}
else
{
rtValue = getPropertiesValue(key);
}
if (null == rtValue)
{
LOG.warn("KafkaProperties.getValues return null, key is " + key);
rtValue = defValue;
}
LOG.info("KafkaProperties.getValues: key is " + key + "; Value is " + rtValue);
return rtValue;
}
/**
* keyserver.properties
* @param key
* @return
*/
private String getPropertiesValue(String key)
{
String rtValue = serverProps.getProperty(key);
// server.properties中没有则再向producer.properties中获取
if (null == rtValue)
{
rtValue = producerProps.getProperty(key);
}
// producer中没有则再向consumer.properties中获取
if (null == rtValue)
{
rtValue = consumerProps.getProperty(key);
}
// consumer没有则再向client.properties中获取
if (null == rtValue)
{
rtValue = clientProps.getProperty(key);
}
return rtValue;
}
}

View File

@ -1,35 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Profile;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 15:13
*/
@ConfigurationProperties(prefix = "mykafka")
@Profile(value = "dev")
public
class KafkaPropertiesConfig {
private String serverUrl;
private MyConsumerProperties consumerProperties = new MyConsumerProperties();
public String getServerUrl() {
return serverUrl;
}
public void setServerUrl(String serverUrl) {
this.serverUrl = serverUrl;
}
public MyConsumerProperties getConsumerProperties() {
return consumerProperties;
}
public void setConsumerProperties(MyConsumerProperties consumerProperties) {
this.consumerProperties = consumerProperties;
}
}

View File

@ -1,28 +0,0 @@
package org.dromara.kafka.consumer.config;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-07 14:54
*/
public class MyConsumerProperties {
private String clientId;
private String groupId = "222";
public String getClientId() {
return clientId;
}
public void setClientId(String clientId) {
this.clientId = clientId;
}
public String getGroupId() {
return groupId;
}
public void setGroupId(String groupId) {
this.groupId = groupId;
}
}

View File

@ -1,159 +0,0 @@
package org.dromara.kafka.consumer.config;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.dromara.kafka.consumer.handler.KafkaSecurityUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
import java.util.Properties;
public class NewConsumer extends Thread{
private static final Logger LOG = LoggerFactory.getLogger(NewConsumer.class);
private final KafkaConsumer<Integer, String> consumer;
private final String topic;
// 一次请求的最大等待时间
private final int waitTime = 10000;
// Broker连接地址
private final String bootstrapServers = "bootstrap.servers";
// Group id
private final String groupId = "group.id";
// 消息内容使用的反序列化类
private final String valueDeserializer = "value.deserializer";
// 消息Key值使用的反序列化类
private final String keyDeserializer = "key.deserializer";
// 协议类型:当前支持配置为SASL_PLAINTEXT或者PLAINTEXT
private final String securityProtocol = "security.protocol";
// 服务名
private final String saslKerberosServiceName = "sasl.kerberos.service.name";
// 域名
private final String kerberosDomainName = "kerberos.domain.name";
// 是否自动提交offset
private final String enableAutoCommit = "enable.auto.commit";
// 自动提交offset的时间间隔
private final String autoCommitIntervalMs = "auto.commit.interval.ms";
// 会话超时时间
private final String sessionTimeoutMs = "session.timeout.ms";
/**
* keytab
*/
private static final String USER_KEYTAB_FILE = "user.keytab";
/**
*
*/
private static final String USER_PRINCIPAL = "aqdsj_ruansi";
/**
* NewConsumer
* @param topic Topic
*/
public NewConsumer(String topic) {
Properties props = new Properties();
KafkaProperties kafkaProc = KafkaProperties.getInstance();
// Broker连接地址
props.put(bootstrapServers,
kafkaProc.getValues(bootstrapServers, "localhost:21007"));
// Group id
props.put(groupId, "DemoConsumer");
// 是否自动提交offset
props.put(enableAutoCommit, "true");
// 自动提交offset的时间间隔
props.put(autoCommitIntervalMs, "1000");
// 会话超时时间
props.put(sessionTimeoutMs, "30000");
// 消息Key值使用的反序列化类
props.put(keyDeserializer,
"org.apache.kafka.common.serialization.IntegerDeserializer");
// 消息内容使用的反序列化类
props.put(valueDeserializer,
"org.apache.kafka.common.serialization.StringDeserializer");
// 安全协议类型
props.put(securityProtocol, kafkaProc.getValues(securityProtocol, "SASL_PLAINTEXT"));
// 服务名
props.put(saslKerberosServiceName, "kafka");
// 域名
props.put(kerberosDomainName, kafkaProc.getValues(kerberosDomainName, "hadoop.hadoop.com"));
consumer = new KafkaConsumer<Integer, String>(props);
this.topic = topic;
}
/**
* Topic
*/
public void doWork()
{
// 订阅
consumer.subscribe(Collections.singletonList(this.topic));
// 消息消费请求
ConsumerRecords<Integer, String> records = consumer.poll(waitTime);
// 消息处理
for (ConsumerRecord<Integer, String> record : records)
{
LOG.info("[NewConsumerExample], Received message: (" + record.key() + ", " + record.value()
+ ") at offset " + record.offset());
}
}
public static void main(String[] args)
{
if (KafkaSecurityUtil.isSecurityModel())
{
try
{
LOG.info("Securitymode start.");
//!!注意,安全认证时,需要用户手动修改为自己申请的机机账号
KafkaSecurityUtil.securityPrepare();
}
catch (IOException e)
{
LOG.error("Security prepare failure.");
LOG.error("The IOException occured : {}.", e);
return;
}
LOG.info("Security prepare success.");
}
NewConsumer consumerThread = new NewConsumer(KafkaProperties.TOPIC);
consumerThread.start();
// 等到60s后将consumer关闭实际执行过程中可修改
try
{
Thread.sleep(60000);
}
catch (InterruptedException e)
{
LOG.info("The InterruptedException occured : {}.", e);
}
finally
{
consumerThread.shutdown();
consumerThread.consumer.close();
}
}
@Override
public synchronized void start() {
doWork();
}
private void shutdown(){
Thread.currentThread().interrupt();
}
}

View File

@ -1,234 +0,0 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import cn.hutool.core.convert.ConvertException;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.ruansee.response.ApiResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfoVO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.LinkedBlockingDeque;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:44
*/
public class ConsumerWorker implements Runnable {
private ConsumerRecord<String, Object> record;
private Logger logger = LoggerFactory.getLogger(ConsumerWorker.class);
public static LinkedBlockingDeque linkedBlockingDeque = new LinkedBlockingDeque<>(5000);
private String cityCode ;
ConsumerWorker(ConsumerRecord<String, Object> record, String cityCode) {
this.record = record;
this.cityCode = cityCode;
}
@Override
public void run() {
//其他地市使用的方法,这里使用了一个巧妙的方法我们开发的地市都是传4位这种其他地市的cityCode传大于4位然后截取
if(cityCode.length() > 4){
cityCode = cityCode.substring(0,4);
normalRequest();
}else {
//六安、安庆等地市的方法,这些地市都是我们自己公司开发的东西。
luanrequest();
// luanrequestBatch();
}
}
/*
*
* */
private void luanrequestBatch() {
Object value = record.value();
String topic = record.topic();
List<EsGpsInfo> list = new ArrayList<>();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
List<JSONObject> jsonObjects = JSON.parseArray((String) value, JSONObject.class);
for (JSONObject jsonObject : jsonObjects) {
EsGpsInfo esGpsInfo;
/*try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}*/
try {
esGpsInfo = JSONUtil.toBean(jsonObject, EsGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
esGpsInfo.setInfoSource(cityCode);
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
list.add(esGpsInfo);
}
// dataToEsService.saveGpsInfoBatch(list);
}
private void luanrequest() {
Object value = record.value();
String topic = record.topic();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
RemoteGpsInfo esGpsInfo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
esGpsInfo = JSONUtil.toBean(jsonObject, RemoteGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
esGpsInfo.setInfoSource(cityCode);
try {
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
logger.info("esGpsInfo={}",esGpsInfo);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic={},data2es={},gpsTime={}",topic,"success",esGpsInfo.getGpsTime());
}else{
logger.info("topic={},data2es={}",topic,response.getMsg());
}
}
/**
*
*/
private void normalRequest() {
Object value = record.value();
String topic = record.topic();
logger.info("offset={},topic={},value={}", record.offset(), topic,value);
RemoteGpsInfo esGpsInfo = new RemoteGpsInfo();
EsGpsInfoVO esGpsInfoVO;
try {
esGpsInfoVO = JSONUtil.toBean(((String) value), EsGpsInfoVO.class);
}catch (ConvertException e){
logger.info("esGpsInfoVO=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfoVO)){
logger.info("esGpsInfoVO=null no error");
return;
}
try {
DateTime parse = DateUtil.parse(esGpsInfoVO.getGpsTime(), "yyyy-MM-dd HH:mm:ss");
}catch (Exception e){
logger.info("gpsTime:{} format error", esGpsInfoVO.getGpsTime());
return;
}
String deviceCode = esGpsInfoVO.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfoVO.getLatitude();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfoVO.getLongitude();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
BeanUtil.copyProperties(esGpsInfoVO,esGpsInfo,new CopyOptions());
esGpsInfo.setLat(latitude);
esGpsInfo.setLng(esGpsInfoVO.getLongitude());
esGpsInfo.setOrientation(esGpsInfoVO.getDirection());
esGpsInfo.setInfoSource(cityCode);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(200 == response.getCode()){
logger.info("topic={},data2es={}",topic,"success");
}else{
logger.error("topic={},data2es={}",topic,"fail");
}
}
}

View File

@ -1,98 +0,0 @@
package org.dromara.kafka.consumer.handler;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.PartitionInfo;
import org.apache.kafka.common.TopicPartition;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.MessageListener;
import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ThreadPoolExecutor;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:39
*/
public class KafkaConsumerRunnable implements Runnable {
private Map props;
private ThreadPoolExecutor taskExecutor;
private String cityCode;
private Logger logger = LoggerFactory.getLogger(KafkaConsumerRunnable.class);
public KafkaConsumerRunnable(Map props, ThreadPoolExecutor taskExecutor,
String cityCode) {
this.props = props;
this.taskExecutor = taskExecutor;
this.cityCode = cityCode;
}
private DefaultKafkaConsumerFactory buildConsumerFactory(){
return new DefaultKafkaConsumerFactory<String, String>(props);
}
private ContainerProperties containerProperties(String[] topic, MessageListener<String, Object> messageListener) {
ContainerProperties containerProperties = new ContainerProperties(topic);
containerProperties.setMessageListener(messageListener);
return containerProperties;
}
private KafkaListenerContainerFactory buildListenerFactory(){
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory();
factory.setConsumerFactory(buildConsumerFactory());
factory.setConcurrency(4);
factory.setBatchListener(true);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
@Override
public void run() {
KafkaConsumer<String,Object> consumer = new KafkaConsumer<>(props);
List topics = (List) props.get("topics");
consumer.subscribe(topics);
consumer.poll(0); // 令订阅生效
List<TopicPartition> topicPartitions = new ArrayList<>();
Map<String, List<PartitionInfo>> stringListMap = consumer.listTopics();
for (Object topic : topics) {
String topic1 = (String) topic;
List<PartitionInfo> partitionInfos = stringListMap.get(topic1);
for (PartitionInfo partitionInfo : partitionInfos) {
TopicPartition partition = new TopicPartition(topic1, partitionInfo.partition());
topicPartitions.add(partition);
}
}
consumer.seekToEnd(topicPartitions); // 如果传Collections.emptyList()表示移动所有订阅topic分区offset到最末端
while (true) {
ConsumerRecords<String, Object> records = consumer.poll(Duration.ofMillis(100));
for (ConsumerRecord<String, Object> record : records) {
taskExecutor.submit(new ConsumerWorker(record, cityCode));
}
}
}
}

View File

@ -1,108 +0,0 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-10-28 14:48
*/
public class KafkaSecurityUtil {
static Logger logger = LoggerFactory.getLogger(KafkaSecurityUtil.class);
public static void main(String[] args) {
EsGpsInfo esGpsInfo = new EsGpsInfo();
String realtime = "2021/11/04 12:00:11";
DateTime dateTime = DateUtil.parse(realtime);
esGpsInfo.setGpsTime(dateTime.toJdkDate());
logger.info("esGpsInfo:{},deviceType={},gpsTime={}",esGpsInfo.toString(),
esGpsInfo.getDeviceType(),dateTime.toJdkDate().toString());
}
/**
* keytab
*/
private static final String USER_KEYTAB_FILE = "user.keytab";
/**
*
*/
private static final String USER_PRINCIPAL = "aqdsj_ruansi@HADOOP.COM";
public static void securityPrepare() throws IOException
{
logger.error("进入了---securityPrepare");
//String filePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator;
//String krbFile = filePath + "krb5.conf";
//ClassPathResource classPathResource = new ClassPathResource("krb5.conf");
//String krbFile = classPathResource.getAbsolutePath();
String krbFile = "/gpsstore/krb5.conf";
// String userKeyTableFile = filePath + USER_KEYTAB_FILE;
//ClassPathResource classPathResource1 = new ClassPathResource(USER_KEYTAB_FILE);
String userKeyTableFile = "/gpsstore/user.keytab";
//windows路径下分隔符替换
userKeyTableFile = userKeyTableFile.replace("\\", "\\\\");
krbFile = krbFile.replace("\\", "\\\\");
LoginUtil.setKrb5Config(krbFile);
LoginUtil.setZookeeperServerPrincipal("zookeeper/hadoop.hadoop.com");
logger.error("userKeyTableFile路径---{}",userKeyTableFile);
LoginUtil.setJaasFile(USER_PRINCIPAL, userKeyTableFile);
}
public static Boolean isSecurityModel()
{
Boolean isSecurity = false;
//String krbFilePath = System.getProperty("user.dir") + File.separator + "src" + File.separator + "main" + File.separator + "resources" + File.separator + "kafkaSecurityMode";
//ClassPathResource classPathResource = new ClassPathResource("kafkaSecurityMode");
InputStream inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("kafkaSecurityMode");
/*File file = classPathResource.getFile();
if(!file.exists()){
return isSecurity;
}*/
Properties securityProps = new Properties();
try
{
securityProps.load(inputStream);
if ("yes".equalsIgnoreCase(securityProps.getProperty("kafka.client.security.mode")))
{
isSecurity = true;
}
}
catch (Exception e)
{
logger.info("The Exception occured : {}.", e);
}
return isSecurity;
}
/*
*
*/
private static boolean isFileExists(String fileName)
{
File file = new File(fileName);
return file.exists();
}
}

View File

@ -1,215 +0,0 @@
package org.dromara.kafka.consumer.handler;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-10-28 15:40
*/
public class LoginUtil
{
public enum Module
{
STORM("StormClient"), KAFKA("KafkaClient"), ZOOKEEPER("Client");
private String name;
private Module(String name)
{
this.name = name;
}
public String getName()
{
return name;
}
}
/**
* line operator string
*/
private static final String LINE_SEPARATOR = System.getProperty("line.separator");
/**
* jaas file postfix
*/
private static final String JAAS_POSTFIX = ".jaas.conf";
/**
* is IBM jdk or not
*/
private static final boolean IS_IBM_JDK = System.getProperty("java.vendor").contains("IBM");
/**
* IBM jdk login module
*/
private static final String IBM_LOGIN_MODULE = "com.ibm.security.auth.module.Krb5LoginModule required";
/**
* oracle jdk login module
*/
private static final String SUN_LOGIN_MODULE = "com.sun.security.auth.module.Krb5LoginModule required";
/**
* Zookeeper quorum principal.
*/
public static final String ZOOKEEPER_AUTH_PRINCIPAL = "zookeeper.server.principal";
/**
* java security krb5 file path
*/
public static final String JAVA_SECURITY_KRB5_CONF = "java.security.krb5.conf";
/**
* java security login file path
*/
public static final String JAVA_SECURITY_LOGIN_CONF = "java.security.auth.login.config";
/**
* jaas.conf
*
* @param principal
* @param keytabPath
* @throws IOException
*/
public static void setJaasFile(String principal, String keytabPath)
throws IOException
{
String jaasPath =
new File(System.getProperty("java.io.tmpdir")) + File.separator + System.getProperty("user.name")
+ JAAS_POSTFIX;
// windows路径下分隔符替换
jaasPath = jaasPath.replace("\\", "\\\\");
// 删除jaas文件
deleteJaasFile(jaasPath);
writeJaasFile(jaasPath, principal, keytabPath);
System.setProperty(JAVA_SECURITY_LOGIN_CONF, jaasPath);
}
/**
* zookeeperprincipal
*
* @param zkServerPrincipal
* @throws IOException
*/
public static void setZookeeperServerPrincipal(String zkServerPrincipal)
throws IOException
{
System.setProperty(ZOOKEEPER_AUTH_PRINCIPAL, zkServerPrincipal);
String ret = System.getProperty(ZOOKEEPER_AUTH_PRINCIPAL);
if (ret == null)
{
throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is null.");
}
if (!ret.equals(zkServerPrincipal))
{
throw new IOException(ZOOKEEPER_AUTH_PRINCIPAL + " is " + ret + " is not " + zkServerPrincipal + ".");
}
}
/**
* krb5
*
* @param krb5ConfFile
* @throws IOException
*/
public static void setKrb5Config(String krb5ConfFile)
throws IOException
{
System.setProperty(JAVA_SECURITY_KRB5_CONF, krb5ConfFile);
String ret = System.getProperty(JAVA_SECURITY_KRB5_CONF);
if (ret == null)
{
throw new IOException(JAVA_SECURITY_KRB5_CONF + " is null.");
}
if (!ret.equals(krb5ConfFile))
{
throw new IOException(JAVA_SECURITY_KRB5_CONF + " is " + ret + " is not " + krb5ConfFile + ".");
}
}
/**
* jaas
*
* @throws IOException
*
*/
private static void writeJaasFile(String jaasPath, String principal, String keytabPath)
throws IOException
{
FileWriter writer = new FileWriter(new File(jaasPath));
try
{
writer.write(getJaasConfContext(principal, keytabPath));
writer.flush();
}
catch (IOException e)
{
throw new IOException("Failed to create jaas.conf File");
}
finally
{
writer.close();
}
}
private static void deleteJaasFile(String jaasPath)
throws IOException
{
File jaasFile = new File(jaasPath);
if (jaasFile.exists())
{
if (!jaasFile.delete())
{
throw new IOException("Failed to delete exists jaas file.");
}
}
}
private static String getJaasConfContext(String principal, String keytabPath)
{
Module[] allModule = Module.values();
StringBuilder builder = new StringBuilder();
for (Module modlue : allModule)
{
builder.append(getModuleContext(principal, keytabPath, modlue));
}
return builder.toString();
}
private static String getModuleContext(String userPrincipal, String keyTabPath, Module module)
{
StringBuilder builder = new StringBuilder();
if (IS_IBM_JDK)
{
builder.append(module.getName()).append(" {").append(LINE_SEPARATOR);
builder.append(IBM_LOGIN_MODULE).append(LINE_SEPARATOR);
builder.append("credsType=both").append(LINE_SEPARATOR);
builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR);
builder.append("useKeytab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR);
builder.append("debug=true;").append(LINE_SEPARATOR);
builder.append("};").append(LINE_SEPARATOR);
}
else
{
builder.append(module.getName()).append(" {").append(LINE_SEPARATOR);
builder.append(SUN_LOGIN_MODULE).append(LINE_SEPARATOR);
builder.append("useKeyTab=true").append(LINE_SEPARATOR);
builder.append("keyTab=\"" + keyTabPath + "\"").append(LINE_SEPARATOR);
builder.append("principal=\"" + userPrincipal + "\"").append(LINE_SEPARATOR);
builder.append("useTicketCache=false").append(LINE_SEPARATOR);
builder.append("storeKey=true").append(LINE_SEPARATOR);
builder.append("debug=true;").append(LINE_SEPARATOR);
builder.append("};").append(LINE_SEPARATOR);
}
return builder.toString();
}
}

View File

@ -1,130 +0,0 @@
package org.dromara.kafka.consumer.handler;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.dromara.kafka.consumer.config.KafkaPropertiesConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:15
*/
@Component
public class RealConsumer implements CommandLineRunner {
private String kafkaServers;
private String groupId;
private String topics;
private String cityCode = "3400";
@Autowired
KafkaPropertiesConfig kafkaPropertiesConfig;
@Autowired
ThreadPoolExecutor dtpExecutor2;
private Logger logger = LoggerFactory.getLogger(RealConsumer.class);
@Override
public void run(String... args) throws Exception {
kafkaServers = "127.0.0.1:9092";
topics = "topic.send.2,topic.send.3,topic.send.4,topic.send.5,topic.send.8";
groupId = "group_ruansi_xuancheng";
cityCode = "3418";
if(args.length > 0){
/*kafkaServers = args[0];
topics = args[1];
groupId = args[2];
cityCode = args[3];*/
}
ExecutorService executorService = Executors.newSingleThreadExecutor();
Map kafkaProp = getKafkaProp();
if (KafkaSecurityUtil.isSecurityModel())
{
try
{
logger.info("Securitymode start.");
//!!注意,安全认证时,需要用户手动修改为自己申请的机机账号
//认证方式 SASL_PLAINTEXT 或者 PLAINTEXT
kafkaProp.put("security.protocol","SASL_PLAINTEXT");
//服务名
kafkaProp.put("sasl.kerberos.service.name","kafka");
//域名
kafkaProp.put("kerberos.domain.name","hadoop.hadoop.com");
KafkaSecurityUtil.securityPrepare();
}
catch (IOException e)
{
logger.error("Security prepare failure.");
logger.error("The IOException occured.", e);
return;
}
logger.info("Security prepare success.");
}
KafkaConsumerRunnable runnable = new KafkaConsumerRunnable(kafkaProp,dtpExecutor2,cityCode);
executorService.execute(runnable);
}
/**
* kafka
* @return
*/
private Map<String, Object> getKafkaProp() {
// Properties map = new Properties();
Map<String, Object> map = new HashMap<>();
map.put("bootstrap.servers",kafkaServers);
map.put("group.id",groupId);
map.put("enable.auto.commit", "true");
map.put("auto.commit.interval.ms", "1000");
map.put("session.timeout.ms", "30000");
map.put("key.deserializer", StringDeserializer.class);
map.put("value.deserializer", StringDeserializer.class);
map.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG,5);
// map.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG,1000 * 5);
// map.put("ack.mode", "manual_immediate");
// //认证方式 SASL_PLAINTEXT 或者 PLAINTEXT
// map.put("security.protocol","SASL_PLAINTEXT");
// //服务名
// map.put("sasl.kerberos.service.name","kafka");
// //域名
// map.put("kerberos.domain.name","hadoop.hadoop.com");
String[] split = topics.split(",");
List list = CollectionUtils.arrayToList(split);
map.put("topics", list);
return map;
}
}

View File

@ -1,32 +0,0 @@
# Tomcat
server:
port: 9214
# Spring
spring:
application:
# 应用名称
name: stwzhj-consumer
profiles:
# 环境配置
active: @profiles.active@
--- # nacos 配置
spring:
cloud:
nacos:
# nacos 服务地址
server-addr: @nacos.server@
username: @nacos.username@
password: @nacos.password@
discovery:
# 注册组
group: @nacos.discovery.group@
namespace: ${spring.profiles.active}
config:
# 配置组
group: @nacos.config.group@
namespace: ${spring.profiles.active}
config:
import:
- optional:nacos:application-common.yml

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -1,24 +0,0 @@
package org.dromara.data2es.config;
import org.dromara.data2es.handler.RedisExpireListener;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.connection.RedisConnectionFactory;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
@Configuration
public class RedisListenerConfig {
@Bean
RedisMessageListenerContainer listenerContainer(RedisConnectionFactory connectionFactory) {
RedisMessageListenerContainer listenerContainer = new RedisMessageListenerContainer();
listenerContainer.setConnectionFactory(connectionFactory);
return listenerContainer;
}
@Bean
KeyExpirationEventMessageListener redisKeyExpirationListener(RedisMessageListenerContainer listenerContainer) {
return new RedisExpireListener(listenerContainer);
}
}

View File

@ -1,70 +0,0 @@
package org.dromara.data2es.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import org.apache.commons.lang.StringUtils;
import org.dromara.common.core.utils.RedisConstants;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.controller.DataToEsController;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.redis.connection.Message;
import org.springframework.data.redis.connection.MessageListener;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
import org.springframework.data.redis.listener.RedisMessageListenerContainer;
import org.springframework.stereotype.Component;
import java.util.Date;
import java.util.Objects;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-11-08 16:40
*/
@Component
public class RedisExpireListener extends KeyExpirationEventMessageListener {
@Autowired
DataToEsController dataToEsController;
Logger logger = LoggerFactory.getLogger(RedisExpireListener.class);
/**
* Creates new {@link MessageListener} for {@code __keyevent@*__:expired} messages.
*
* @param listenerContainer must not be {@literal null}.
*/
public RedisExpireListener(RedisMessageListenerContainer listenerContainer) {
super(listenerContainer);
}
@Override
public void onMessage(Message message, byte[] pattern) {
String expireKey = message.toString();
if(StringUtils.isNotEmpty(expireKey) &&
expireKey.startsWith(RedisConstants.ORG_CODE_PRE)){
String[] split = expireKey.split(":");
EsGpsInfoVO2 esGpsInfoVO2 = new EsGpsInfoVO2();
esGpsInfoVO2.setDeviceType(split[2]);
esGpsInfoVO2.setDeviceCode(split[3]);
String zzjgdm = split[1];
String deviceType = split[2];
String deviceCode = split[3];
if(StringUtils.isNotEmpty(zzjgdm)) {
JSONObject object = RedisUtils.getBucket(RedisConstants.ONLINE_USERS + zzjgdm + ":"
+ deviceType+":"+deviceCode);
EsGpsInfoVO2 gpsInfo = BeanUtil.toBean(object, EsGpsInfoVO2.class);
gpsInfo.setGpsTime(new Date());
gpsInfo.setOnline(0);
dataToEsController.saveGpsInfo(gpsInfo);
}
}
logger.info("redis key expired:key={}", expireKey);
}
}

View File

@ -1,120 +0,0 @@
package org.dromara.data2es.handler;
/*
*
* es redis kafka
* */
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONPObject;
import com.alibaba.fastjson2.util.JSONObject1O;
import jodd.util.StringUtil;
import org.apache.commons.lang.StringUtils;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.data2es.domain.EsGpsInfo;
import org.dromara.data2es.domain.EsGpsInfoVO2;
import org.dromara.data2es.service.IGpsService;
import org.dromara.data2es.util.ConfigConstants;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.scheduling.annotation.Async;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CompletableFuture;
@Configuration
public class RequestHandler {
@Autowired
private KafkaTemplate<String, String> kafkaTemplate;
@Autowired
private RestHighLevelClient restHighLevelClient;
private Logger logger = LoggerFactory.getLogger(RequestHandler.class);
/*@Async
public CompletableFuture<EsGpsInfo> doRequest(EsGpsInfo esGpsInfo){
EsGpsInfo entity = gpsService.createEntity(esGpsInfo);
return CompletableFuture.completedFuture(entity);
}*/
@Async
public void sendToKafka(EsGpsInfoVO2 esGpsInfoVO2) {
if (!Objects.isNull(esGpsInfoVO2)) {
String deviceType = esGpsInfoVO2.getDeviceType();
if(StringUtil.isEmpty(deviceType)){
deviceType = "99";
}
String infoSource = esGpsInfoVO2.getInfoSource();
if(StringUtils.isEmpty(infoSource)){
infoSource = "other";
}
/**
* 使
*/
//EsGpsInfoVO esGpsInfoVO = deviceInfoContext.doGetInfo(esGpsInfo);
//EsGpsInfoVO2 esGpsInfoVO2 = getInfo(esGpsInfo);
//kafkaProducer.send(esGpsInfo, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+ infoSource);
//todo 2023年3月30日 cpu过载暂时隐藏
kafkaTemplate.send(ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType, JSON.toJSONString(esGpsInfoVO2));
//kafkaProducer.send(esGpsInfoVO2, ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+deviceType);
//地市的kafka数据如接收地市某个设备的数据可以对接此kafka topic
//todo 暂时隐藏
kafkaTemplate.send(ConfigConstants.KAFKA_TOPIC_SEND_PRE+"."+infoSource+"."+deviceType,JSON.toJSONString(esGpsInfoVO2));
}
}
/**
* 线
* @param map
*/
@Async
public void redisOnlineUserBatch(Map<String,String> map, long time){
RedisUtils.batchInsert(map,time);
}
@Async
public void redisDeleteBatch(List<String> deleteKeys){
RedisUtils.deleteObject(deleteKeys);
}
/**
* es
* @param bulkRequest
*/
@Async
public void esRealBulkSave(BulkRequest bulkRequest){
try {
BulkResponse response = restHighLevelClient.bulk(bulkRequest, RequestOptions.DEFAULT);
boolean b = response.hasFailures();
String bulkErrorMsg = response.buildFailureMessage();
logger.info("b={}", bulkErrorMsg);
} catch (IOException e) {
e.printStackTrace();
logger.error("batchInsert error={}",e.getMessage());
}
}
}

View File

@ -1,4 +0,0 @@
package org.dromara.data2es.mapper;
public class TDeviceMapper {
}

View File

@ -1,13 +0,0 @@
package org.dromara.data2es.schedule;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-05-18 18:23
*/
public class RedisOnlineUserSchedule {
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -0,0 +1 @@
package org.dromara.data2gs.service;

View File

@ -0,0 +1,139 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-modules</artifactId>
<version>${revision}</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-kafkaToWebsocket</artifactId>
<description>
stwzhj-kafkaToWebsocket kafka消息发送到Websocket
</description>
<dependencies>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-nacos</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-sentinel</artifactId>
</dependency>
<!-- RuoYi Common Log -->
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-log</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-dict</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-doc</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-web</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-dubbo</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-seata</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-idempotent</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-tenant</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-security</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-translation</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-sensitive</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-common-encrypt</artifactId>
</dependency>
<!-- RuoYi Api System -->
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api-system</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api-resource</artifactId>
</dependency>
<dependency>
<groupId>org.dromara</groupId>
<artifactId>stwzhj-api-location</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-websocket</artifactId>
</dependency>
<!-- kafka -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
</dependencies>
<build>
<finalName>${project.artifactId}</finalName>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
<version>${spring-boot.version}</version>
<executions>
<execution>
<goals>
<goal>repackage</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,26 @@
package org.dromara.kafka2Websocket;
import org.apache.dubbo.config.spring.context.annotation.EnableDubbo;
import org.dromara.kafka2Websocket.config.KafkaProperties;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.metrics.buffering.BufferingApplicationStartup;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.scheduling.annotation.EnableScheduling;
@EnableDubbo
@EnableScheduling
@SpringBootApplication
@EnableConfigurationProperties(KafkaProperties.class)
public class KafkaToSocketApplication {
public static void main(String[] args) {
SpringApplication application = new SpringApplication(KafkaToSocketApplication.class);
application.setApplicationStartup(new BufferingApplicationStartup(2048));
application.run(args);
System.out.println("(♥◠‿◠)ノ゙ Socket启动成功 ლ(´ڡ`ლ)゙ ");
}
}

View File

@ -0,0 +1,66 @@
package org.dromara.kafka2Websocket.config;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.listener.ConsumerAwareListenerErrorHandler;
import org.springframework.kafka.listener.ContainerProperties;
import java.util.HashMap;
import java.util.Map;
// 1、kafka配置类
@Configuration
@Slf4j
public class KafkaConfig {
@Bean
public ConsumerFactory<String, String> consumerFactory(KafkaProperties properties) {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, properties.getBootstrapServers());
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
config.put(ConsumerConfig.GROUP_ID_CONFIG,properties.getGroupId());
return new DefaultKafkaConsumerFactory<>(config);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory(
ConsumerFactory<String, String> consumerFactory) {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory);
factory.setBatchListener(true);
factory.setConcurrency(3);
// 核心配置:设置手动提交模式
// factory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
return factory;
}
/**
*
*
* @return
*/
@Bean
public ConsumerAwareListenerErrorHandler consumerAwareListenerErrorHandler() {
return (message, exception, consumer) -> {
log.error("消息{} , 异常原因{}", message, exception.getMessage());
log.error("consumerAwareListenerErrorHandler called");
return null;
};
}
}

View File

@ -0,0 +1,17 @@
package org.dromara.kafka2Websocket.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
import java.util.List;
// 2. 配置属性KafkaProperties.java
@Data
@ConfigurationProperties(prefix = "app.kafka")
public class KafkaProperties {
private String bootstrapServers;
private List<String> topics;
private String groupId;
private int concurrency = 3;
}

View File

@ -0,0 +1,58 @@
package org.dromara.kafka2Websocket.config;
import cn.hutool.core.util.StrUtil;
import jakarta.websocket.OnOpen;
import org.dromara.kafka2Websocket.dto.SharedState;
import org.dromara.kafka2Websocket.handle.KafkaWebSocketHandler;
import org.dromara.kafka2Websocket.interceptor.AuthInterceptor;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.socket.WebSocketHandler;
import org.springframework.web.socket.config.annotation.EnableWebSocket;
import org.springframework.web.socket.config.annotation.WebSocketConfigurer;
import org.springframework.web.socket.server.HandshakeInterceptor;
// 3. WebSocket配置WebSocketConfig.java
@Configuration
@EnableWebSocket
public class WebSocketConfig {
@Bean
public WebSocketConfigurer webSocketConfigurer(HandshakeInterceptor handshakeInterceptor,
WebSocketHandler webSocketHandler) {
// 如果WebSocket的路径为空则设置默认路径为 "/websocket"
// webSocketProperties.setPath("ws/websocket");
// webSocketProperties.setAllowedOrigins("*");
/* if (StrUtil.isBlank(webSocketProperties.getPath())) {
webSocketProperties.setPath("/websocket");
}*/
// 如果允许跨域访问的地址为空,则设置为 "*",表示允许所有来源的跨域请求
/* if (StrUtil.isBlank(webSocketProperties.getAllowedOrigins())) {
webSocketProperties.setAllowedOrigins("*");
}
*/
// 返回一个WebSocketConfigurer对象用于配置WebSocket
return registry -> registry
// 添加WebSocket处理程序和拦截器到指定路径设置允许的跨域来源
.addHandler(webSocketHandler,"ws/websocket")
.addInterceptors(handshakeInterceptor)
.setAllowedOrigins("*");
}
@Bean
public HandshakeInterceptor handshakeInterceptor() {
return new AuthInterceptor();
}
@Bean
public WebSocketHandler webSocketHandler() {
return new KafkaWebSocketHandler();
}
}

View File

@ -0,0 +1,26 @@
package org.dromara.kafka2Websocket.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* WebSocket
*
* @author zendwang
*/
//@ConfigurationProperties("websocket")
@Data
public class WebSocketProperties {
private Boolean enabled;
/**
*
*/
private String path;
/**
* 访
*/
private String allowedOrigins;
}

View File

@ -0,0 +1,113 @@
package org.dromara.kafka2Websocket.consumer;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.dromara.common.satoken.utils.LoginHelper;
import org.dromara.kafka2Websocket.config.KafkaProperties;
import org.dromara.kafka2Websocket.handle.KafkaWebSocketHandler;
import org.dromara.system.api.model.LoginUser;
import org.springframework.context.SmartLifecycle;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.KafkaTemplate;
import java.time.Duration;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
//@Configuration
//@Slf4j
public class KafkaConsumerManager implements SmartLifecycle {
private final AtomicBoolean running = new AtomicBoolean(false);
private final KafkaProperties properties;
private final KafkaWebSocketHandler webSocketHandler;
private final KafkaTemplate<String, String> kafkaTemplate;
private KafkaConsumer<String, String> consumer;
private Thread consumerThread;
public KafkaConsumerManager(KafkaProperties properties,
KafkaWebSocketHandler webSocketHandler,
KafkaTemplate<String, String> kafkaTemplate) {
this.properties = properties;
this.webSocketHandler = webSocketHandler;
this.kafkaTemplate = kafkaTemplate;
}
@Override
public void start() {
if (running.compareAndSet(false, true)) {
initializeConsumer();
consumerThread = new Thread(this::consumeMessages);
consumerThread.start();
}
}
private void initializeConsumer() {
Properties props = new Properties();
props.put("bootstrap.servers", properties.getBootstrapServers());
props.put("group.id", properties.getGroupId());
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("enable.auto.commit", "false");
props.put("auto.offset.reset", "earliest");
consumer = new KafkaConsumer<>(props);
consumer.subscribe(properties.getTopics());
}
private void consumeMessages() {
try {
//running.get() && webSocketHandler.hasConnections()
while (true) {
ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(1000));
processRecords(records);
consumer.commitSync();
}
} catch (Exception e) {
// 处理异常
} finally {
consumer.close();
}
}
private void processRecords(ConsumerRecords<String, String> records) {
LoginUser user = LoginHelper.getLoginUser();
records.forEach(record -> {
try {
// String formatted = formatMessage(record.topic(), record.value());
webSocketHandler.broadcast(record.value(), user.getManageDeptId());
} catch (Exception e) {
handleFailedMessage(record);
}
});
}
private String formatMessage(String topic, String payload) {
return String.format("{\"topic\":\"%s\",\"data\":%s}", topic, payload);
}
private void handleFailedMessage(ConsumerRecord<String, String> record) {
kafkaTemplate.send("dlq-" + record.topic(), record.value());
}
@Override
public void stop() {
if (running.compareAndSet(true, false)) {
if (consumer != null) consumer.wakeup();
if (consumerThread != null) {
try {
consumerThread.join(5000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
}
@Override
public boolean isRunning() {
return running.get();
}
}

View File

@ -0,0 +1,39 @@
package org.dromara.kafka2Websocket.consumer;
import lombok.extern.slf4j.Slf4j;
import org.dromara.kafka2Websocket.dto.SharedState;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
@Slf4j
public class KafkaConsumerService {
@Autowired
private SharedState state;
@Autowired
private MessageBufferManager bufferManager;
@KafkaListener(topics = "${spring.kafka.topics}")
public void processMessage(List<String> records) {
log.error("flag的值={}",state.getFlag());
try {
records.stream().forEach(record -> {
if (state.getFlag()){
bufferManager.bufferMessage("3413",record);
}
});
} catch (Exception e){
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,71 @@
package org.dromara.kafka2Websocket.consumer;
import com.alibaba.nacos.shaded.com.google.gson.Gson;
import jakarta.websocket.Session;
import org.dromara.kafka2Websocket.holder.WebSocketSessionHolder;
import org.dromara.kafka2Websocket.utils.SocketUtils;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import org.springframework.web.socket.WebSocketSession;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
@Component
public class MessageBufferManager {
// 配置参数:批量大小和缓冲时间
private static final int BATCH_SIZE = 500;
private static final long BUFFER_TIME_MS = 100;
// 用户ID -> 消息队列(线程安全)
private final ConcurrentHashMap<String, LinkedBlockingQueue<String>> userMessageQueues = new ConcurrentHashMap<>();
// 添加消息到缓冲区
public void bufferMessage(String userId, String message) {
LinkedBlockingQueue<String> queue = userMessageQueues.computeIfAbsent(userId, k -> new LinkedBlockingQueue<>());
queue.offer(message);
// 触发批量发送(如果达到阈值)
if (queue.size() >= BATCH_SIZE) {
sendBufferedMessages(userId);
}
}
// 定时任务:按时间窗口发送
@Scheduled(fixedRate = BUFFER_TIME_MS)
@Async
public void flushBufferedMessages() {
userMessageQueues.keySet().forEach(this::sendBufferedMessages);
}
// 发送并清空缓冲区
private void sendBufferedMessages(String userId) {
LinkedBlockingQueue<String> queue = userMessageQueues.get(userId);
if (queue == null || queue.isEmpty()) return;
List<String> messages = new ArrayList<>();
queue.drainTo(messages); // 原子操作:取出所有消息并清空队列
if (!messages.isEmpty()) {
String mergedMessage = mergeMessages(messages);
Map<String, CopyOnWriteArraySet<WebSocketSession>> allOnlineSessions = WebSocketSessionHolder.getAllOnlineSessions();
allOnlineSessions.forEach((key, value) -> {
SocketUtils.sendMessage(key,mergedMessage);
});
/*WebSocketSessionHolder.getSessions(userId).forEach(session -> {
if (session.isOpen()) {
// todo 发送消息
SocketUtils.sendMessage(session,mergedMessage);
}
});*/
}
}
// 合并消息示例JSON 数组)
private String mergeMessages(List<String> messages) {
return "{\"type\":\"batch\", \"data\":" + new Gson().toJson(messages) + "}";
}
}

View File

@ -0,0 +1,33 @@
package org.dromara.kafka2Websocket.dto;
import org.springframework.stereotype.Component;
import java.util.concurrent.atomic.AtomicBoolean;
@Component
public class SharedState {
// 定义 AtomicBoolean 实例
private final AtomicBoolean flag = new AtomicBoolean(false);
// 修改标志位(原子操作)
public void setFlag(boolean value) {
flag.set(value);
}
//获取
public boolean getFlag() {
return flag.get();
}
// 原子性地从 false 修改为 true类似锁的获取
public boolean tryActivate() {
return flag.compareAndSet(false, true);
}
public boolean tryActivateFalse() {
return flag.compareAndSet(true, false);
}
}

View File

@ -0,0 +1,29 @@
package org.dromara.kafka2Websocket.dto;
import lombok.Data;
import java.io.Serial;
import java.io.Serializable;
import java.util.List;
/**
* dto
*
* @author zendwang
*/
@Data
public class WebSocketMessageDto implements Serializable {
@Serial
private static final long serialVersionUID = 1L;
/**
* session key
*/
private List<Long> sessionKeys;
/**
*
*/
private String message;
}

View File

@ -0,0 +1,113 @@
package org.dromara.kafka2Websocket.handle;
import cn.dev33.satoken.session.SaSession;
import cn.dev33.satoken.stp.StpUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import jakarta.annotation.PreDestroy;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.satoken.utils.LoginHelper;
import org.dromara.kafka2Websocket.dto.SharedState;
import org.dromara.kafka2Websocket.holder.WebSocketSessionHolder;
import org.dromara.system.api.model.LoginUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
import org.springframework.kafka.listener.MessageListenerContainer;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.TextMessage;
import org.springframework.web.socket.WebSocketSession;
import org.springframework.web.socket.handler.TextWebSocketHandler;
import java.io.IOException;
import java.util.Collections;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import static org.dromara.common.satoken.utils.LoginHelper.LOGIN_USER_KEY;
// 4. WebSocket处理器WebSocketHandler.java
@Configuration
@Slf4j
public class KafkaWebSocketHandler extends TextWebSocketHandler {
private final ConcurrentHashMap<String, WebSocketSession> sessions = new ConcurrentHashMap<>();
private final AtomicInteger connectionCount = new AtomicInteger(0);
@Autowired
private SharedState state;
@Override
public void afterConnectionEstablished(WebSocketSession session) {
// session.getUri().getQuery();
// LoginUser loginUser = (LoginUser) session.getAttributes().get(LOGIN_USER_KEY);
sessions.put(session.getId(), session);
WebSocketSessionHolder.addSession(session.getId(),session);
connectionCount.incrementAndGet();
if(connectionCount.get() >0){ //有在线用户时
state.setFlag(true);
log.info("resume over ");
}
log.info("连接建立: {} 当前连接数: {}", session.getId(), sessions.size());
}
@Override
public void afterConnectionClosed(WebSocketSession session, CloseStatus status) {
sessions.remove(session.getId());
WebSocketSessionHolder.removeSession(session.getId());
connectionCount.decrementAndGet();
if(connectionCount.get() ==0){
// 暂停监听
state.setFlag(false);
log.info("pause");
}
log.info("连接关闭: {} 剩余连接数: {}", session.getId(), sessions.size());
}
public void broadcast(String message,String deptId) {
sessions.values().parallelStream()
.filter(WebSocketSession::isOpen)
.forEach(session -> sendMessage(session, deptId, message));
}
private void sendMessage(WebSocketSession session, String deptId, String message) {
try {
synchronized (session) { // 保证线程安全
JSONObject job = JSON.parseObject(message);
String zzjgdm = job.getString("zzjgdm");
if (deptId.endsWith("00000000")){ //如果管理机构是市局 不过滤
session.sendMessage(new TextMessage(message));
} else if (zzjgdm.substring(0,6).equals(deptId.substring(0,6))) { // 分局登录 分局接收
session.sendMessage(new TextMessage(message));
}else if (zzjgdm.substring(0,8).equals(deptId.substring(0,8))) { // 支队登录 支队接收
session.sendMessage(new TextMessage(message));
}
}
} catch (IOException e) {
// 处理发送失败
}
}
public boolean hasConnections() {
return connectionCount.get() > 0;
}
@PreDestroy
public void cleanUp() {
sessions.values().forEach(session -> {
try {
if (session.isOpen()) session.close();
} catch (IOException e) {
// 处理关闭异常
}
});
}
}

View File

@ -0,0 +1,116 @@
package org.dromara.kafka2Websocket.holder;
import jakarta.websocket.Session;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.WebSocketSession;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
/**
* WebSocketSession 线
*
* @author zendwang
*/
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class WebSocketSessionHolder {
private static final Map<String, WebSocketSession> USER_SESSION_MAP = new ConcurrentHashMap<>();
// 用户ID -> 会话集合(支持多设备)
private static final ConcurrentHashMap<String, CopyOnWriteArraySet<WebSocketSession>> userSessions = new ConcurrentHashMap<>();
/**
* WebSocketMap
*
* @param sessionKey
* @param session WebSocket
*/
public static void addSession(String sessionKey, WebSocketSession session) {
removeSession(sessionKey);
userSessions.computeIfAbsent(sessionKey, k -> new CopyOnWriteArraySet<>()).add(session);
USER_SESSION_MAP.put(sessionKey, session);
}
/**
* MapWebSocket
*
* @param sessionKey
*/
public static void removeSession(String sessionKey) {
WebSocketSession session = USER_SESSION_MAP.remove(sessionKey);
CopyOnWriteArraySet<WebSocketSession> sessions = userSessions.get(sessionKey);
if (sessions != null) {
sessions.remove(session);
if (sessions.isEmpty()) {
userSessions.remove(sessionKey);
}
}
try {
session.close(CloseStatus.BAD_DATA);
} catch (Exception ignored) {
}
}
/**
* MapWebSocket
*
* @param sessionKey
* @return WebSocketnull
*/
public static WebSocketSession getSessions(Long sessionKey) {
return USER_SESSION_MAP.get(sessionKey);
}
// 获取用户所有会话(过滤已关闭连接)
public static CopyOnWriteArraySet<WebSocketSession> getSessions(String userId) {
CopyOnWriteArraySet<WebSocketSession> sessions = userSessions.get(userId);
if (sessions == null) return new CopyOnWriteArraySet<>();
// 清理已关闭的会话
sessions.removeIf(session -> !session.isOpen());
return sessions;
}
/**
* MapWebSocket
*
* @return WebSocket
*/
public static Set<String> getSessionsAll() {
return USER_SESSION_MAP.keySet();
}
/**
* Map
*
* @param sessionKey
* @return truefalse
*/
public static Boolean existSession(Long sessionKey) {
return USER_SESSION_MAP.containsKey(sessionKey);
}
// 获取所有在线用户ID列表
public static List<String> getAllOnlineUserIds() {
List<String> userIds = new ArrayList<>(userSessions.keySet());
userIds.removeIf(userId -> getSessions(userId).isEmpty());
return Collections.unmodifiableList(userIds);
}
// 获取所有在线会话(跨用户)
public static Map<String, CopyOnWriteArraySet<WebSocketSession>> getAllOnlineSessions() {
Map<String, CopyOnWriteArraySet<WebSocketSession>> copy = new ConcurrentHashMap<>();
userSessions.forEach((userId, sessions) -> {
CopyOnWriteArraySet<WebSocketSession> activeSessions = getSessions(userId);
if (!activeSessions.isEmpty()) {
copy.put(userId, activeSessions);
}
});
return Collections.unmodifiableMap(copy);
}
}

View File

@ -0,0 +1,42 @@
package org.dromara.kafka2Websocket.interceptor;
import cn.dev33.satoken.exception.NotLoginException;
import cn.hutool.http.server.HttpServerRequest;
import com.sun.net.httpserver.HttpExchange;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.satoken.utils.LoginHelper;
import org.dromara.system.api.model.LoginUser;
import org.springframework.http.server.ServerHttpRequest;
import org.springframework.http.server.ServerHttpResponse;
import org.springframework.web.socket.WebSocketHandler;
import org.springframework.web.socket.server.HandshakeInterceptor;
import java.util.Map;
// 7. 认证拦截器AuthInterceptor.java
@Slf4j
public class AuthInterceptor implements HandshakeInterceptor {
@Override
public boolean beforeHandshake(ServerHttpRequest request,
ServerHttpResponse response,
WebSocketHandler wsHandler,
Map<String, Object> attributes) {
try {
// LoginUser loginUser = LoginHelper.getLoginUser();
// attributes.put("loginUser", loginUser);
return true;
} catch (NotLoginException e) {
log.error("WebSocket 认证失败'{}',无法访问系统资源", e.getMessage());
return false;
}
}
@Override
public void afterHandshake(ServerHttpRequest request,
ServerHttpResponse response,
WebSocketHandler wsHandler,
Exception exception) {}
}

View File

@ -0,0 +1,153 @@
package org.dromara.kafka2Websocket.utils;
import cn.hutool.core.collection.CollUtil;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.kafka2Websocket.dto.WebSocketMessageDto;
import org.dromara.kafka2Websocket.holder.WebSocketSessionHolder;
import org.springframework.web.socket.PongMessage;
import org.springframework.web.socket.TextMessage;
import org.springframework.web.socket.WebSocketMessage;
import org.springframework.web.socket.WebSocketSession;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Consumer;
/**
* WebSocket
*
* @author zendwang
*/
@Slf4j
@NoArgsConstructor(access = AccessLevel.PRIVATE)
public class SocketUtils {
/**
* WebSocket
*
* @param sessionKey id
* @param message
*/
public static void sendMessage(Long sessionKey, String message) {
WebSocketSession session = WebSocketSessionHolder.getSessions(sessionKey);
sendMessage(session, message);
}
/**
* WebSocket
*
* @param consumer WebSocket
*/
public static void subscribeMessage(Consumer<WebSocketMessageDto> consumer) {
RedisUtils.subscribe("global:websocket", WebSocketMessageDto.class, consumer);
}
/**
* WebSocket
*
* @param webSocketMessage WebSocket
*/
public static void publishMessage(WebSocketMessageDto webSocketMessage) {
List<Long> unsentSessionKeys = new ArrayList<>();
// 当前服务内session,直接发送消息
for (Long sessionKey : webSocketMessage.getSessionKeys()) {
if (WebSocketSessionHolder.existSession(sessionKey)) {
SocketUtils.sendMessage(sessionKey, webSocketMessage.getMessage());
continue;
}
unsentSessionKeys.add(sessionKey);
}
// 不在当前服务内session,发布订阅消息
if (CollUtil.isNotEmpty(unsentSessionKeys)) {
WebSocketMessageDto broadcastMessage = new WebSocketMessageDto();
broadcastMessage.setMessage(webSocketMessage.getMessage());
broadcastMessage.setSessionKeys(unsentSessionKeys);
RedisUtils.publish("global:websocket", broadcastMessage, consumer -> {
log.info("WebSocket发送主题订阅消息topic:{} session keys:{} message:{}",
"global:websocket", unsentSessionKeys, webSocketMessage.getMessage());
});
}
}
/**
* WebSocket()
*
* @param message
*/
public static void publishAll(String message) {
WebSocketMessageDto broadcastMessage = new WebSocketMessageDto();
broadcastMessage.setMessage(message);
RedisUtils.publish("global:websocket", broadcastMessage, consumer -> {
log.info("WebSocket发送主题订阅消息topic:{} message:{}", "global:websocket", message);
});
}
/**
* WebSocketPong
*
* @param session PongWebSocket
*/
public static void sendPongMessage(WebSocketSession session) {
sendMessage(session, new PongMessage());
}
/**
* WebSocket
*
* @param session WebSocket
* @param message
*/
public static void sendMessage(WebSocketSession session, String message) {
sendMessage(session, new TextMessage(message));
}
public static void sendMessage(String sid, String message) {
WebSocketSessionHolder.getSessions(sid).forEach(session ->{
sendMessage(session, new TextMessage(message));
});
}
/**
* WebSocketWebSocket
*
* @param session WebSocket
* @param message WebSocket
*/
private synchronized static void sendMessage(WebSocketSession session, WebSocketMessage<?> message) {
if (session == null || !session.isOpen()) {
log.warn("[send] session会话已经关闭");
} else {
try {
session.sendMessage(message);
} catch (IOException e) {
log.error("[send] session({}) 发送消息({}) 异常", session, message, e);
}
}
}
/**
* WebSocketWebSocket
*
* @param session WebSocket
* @param message WebSocket
*/
private synchronized static void sendAllMessage(WebSocketSession session, WebSocketMessage<?> message) {
if (session == null || !session.isOpen()) {
log.warn("[send] session会话已经关闭");
} else {
try {
session.sendMessage(message);
} catch (IOException e) {
log.error("[send] session({}) 发送消息({}) 异常", session, message, e);
}
}
}
}

View File

@ -0,0 +1,34 @@
# Tomcat
server:
port: 9216
# Spring
spring:
application:
# 应用名称
name: wzhj-kafka2websocket
profiles:
# 环境配置
active: @profiles.active@
--- # nacos 配置
spring:
cloud:
nacos:
# nacos 服务地址
server-addr: @nacos.server@
username: @nacos.username@
password: @nacos.password@
discovery:
# 注册组
group: @nacos.discovery.group@
namespace: ${spring.profiles.active}
config:
# 配置组
group: @nacos.config.group@
namespace: ${spring.profiles.active}
config:
import:
- optional:nacos:application-common.yml
- optional:nacos:datasource.yml
- optional:nacos:${spring.application.name}.yml

View File

@ -0,0 +1,86 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- &lt;!&ndash; 控制台输出 &ndash;&gt;
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>-->
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}${LOG_FILE}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${LOG_PATH}info/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${LOG_PATH}error.${LOG_FILE}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy ">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${LOG_PATH}error/${LOG_FILE}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender>
<root level="INFO">
<appender-ref ref="CONSOLE"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
</root>
</configuration>

View File

@ -1 +0,0 @@
package org.dromara.location;

View File

@ -1,155 +0,0 @@
package org.dromara.location.service.impl;
import cn.hutool.core.date.DateField;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import lombok.RequiredArgsConstructor;
import org.dromara.location.service.ISearchService;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.core.TimeValue;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
@RequiredArgsConstructor
@Service
public class SearchServiceImpl implements ISearchService {
@Autowired
private RestHighLevelClient restHighLevelClient;
@Override
public List<Map> searchCar(String deviceCode, String startTime, String endTime,String deviceType) throws RuntimeException{
List<Map> sourceList = new ArrayList<Map>();
List<String> esIndexByTime = findEsIndexByTime(startTime, endTime);
SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
DateTime startDate = DateUtil.parse(startTime, "yyyy-MM-dd HH:mm:ss");
DateTime endDate = DateUtil.parse(endTime, "yyyy-MM-dd HH:mm:ss");
BoolQueryBuilder boolBuilder = QueryBuilders.boolQuery();
// 匹配第一个
TermQueryBuilder termTerminalBuilder1 = QueryBuilders.termQuery("deviceCode", deviceCode);
// 匹配第二个
TermQueryBuilder termTerminalBuilder2 = QueryBuilders.termQuery("deviceType", deviceType);
boolBuilder.must(termTerminalBuilder1);
boolBuilder.must(termTerminalBuilder2);
System.out.print(format.format(startDate));
boolBuilder.must(QueryBuilders.rangeQuery("gpsTime")
.gte(format.format(startDate))
.lte(format.format(endDate)) );
Scroll scroll = new Scroll(TimeValue.timeValueMinutes(1L));
SearchSourceBuilder sourceBuilder = new SearchSourceBuilder();
sourceBuilder.query(boolBuilder).sort("gpsTime",SortOrder.ASC).size(5000);
SearchRequest rq = new SearchRequest();
// boolBuilder.withIndicesOptions(IndicesOptions.lenientExpandOpen());
rq.scroll(scroll);
rq.source(sourceBuilder);
rq.indices(esIndexByTime.toArray(new String[0]));
// 忽略因索引不存在导致的错误
rq.indicesOptions(IndicesOptions.lenientExpandOpen());
// SearchResponse rp = null;
try {
SearchResponse rp = restHighLevelClient.search(rq,RequestOptions.DEFAULT);
SearchHit[] searchHits = rp.getHits().getHits();
for (SearchHit searchHit : searchHits) {
Map sourceAsMap = searchHit.getSourceAsMap();
sourceList.add(sourceAsMap);
}
//遍历搜索命中的数据,直到没有数据
String scrollId = rp.getScrollId();
while (searchHits != null && searchHits.length > 0) {
SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId);
scrollRequest.scroll(scroll);
try {
rp = restHighLevelClient.scroll(scrollRequest,RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
scrollId = rp.getScrollId();
searchHits = rp.getHits().getHits();
if (searchHits != null && searchHits.length > 0) {
for (SearchHit searchHit : searchHits) {
Map sourceAsMap = searchHit.getSourceAsMap();
sourceList.add(sourceAsMap);
}
}
}
//清除滚屏
ClearScrollRequest clearScrollRequest = new ClearScrollRequest();
clearScrollRequest.addScrollId(scrollId);//也可以选择setScrollIds()将多个scrollId一起使用
ClearScrollResponse clearScrollResponse = null;
try {
clearScrollResponse = restHighLevelClient.clearScroll(clearScrollRequest, RequestOptions.DEFAULT);
} catch (IOException e) {
e.printStackTrace();
}
boolean succeeded = clearScrollResponse.isSucceeded();
} catch (Exception e) {
e.printStackTrace();
}
return sourceList;
}
private List<String> findEsIndexByTime(String startTime, String endTime) {
startTime = startTime.substring(0, 10).replaceAll("-","");//yyyyMMdd
endTime = endTime.substring(0, 10).replaceAll("-","");
Date start = DateUtil.parse(startTime, "yyyyMMdd");
Date end = DateUtil.parse(endTime, "yyyyMMdd");
List<DateTime> dateTimes = DateUtil.rangeToList(start, end, DateField.DAY_OF_YEAR);
List<String> list = new ArrayList<>();
String hash;
int len = dateTimes.size() < 7 ? dateTimes.size() : 7;
for (int i = 0; i < len; i++) {
hash = dateTimes.get(i).toString();
hash = hash.substring(0, 10).replaceAll("-","");
list.add("gpsinfo" + hash);
}
return list;
}
// 工具方法
public static <T> Consumer<T> consumerWithIndex(BiConsumer<T, Integer> consumer) {
class Obj {
int i;
}
Obj obj = new Obj();
return t -> {
int index = obj.i++;
consumer.accept(t, index);
};
}
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -6,7 +6,7 @@ server:
spring:
application:
# 应用名称
name: ruoyi-resource
name: wzhj-resource
profiles:
# 环境配置
active: @profiles.active@

View File

@ -1,28 +1,106 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}"/>
<property name="log.path" value="logs/${project.artifactId}" />
<property name="log.file" value="${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<appender name="file_console" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${log.path}/console.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
<!-- 日志文件名格式 -->
<fileNamePattern>${log.path}/console.%d{yyyy-MM-dd}.log</fileNamePattern>
<!-- 日志最大 1天 -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<encoder>
<pattern>${console.log.pattern}</pattern>
<pattern>${log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<!-- 过滤的级别 -->
<level>INFO</level>
</filter>
</appender>
<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Info 级别的日志,只是过滤 info 还是会输出 Error 日志,因为 Error 的级别高,
所以我们使用下面的策略,可以避免输出 Error 的日志-->
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<!--过滤 Error-->
<level>ERROR</level>
<!--匹配到就禁止-->
<onMatch>DENY</onMatch>
<!--没有匹配到就允许-->
<onMismatch>ACCEPT</onMismatch>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${log.path}${log.file}</File>
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<fileNamePattern>${log.path}info/${log.file}.%d{yyyy-MM-dd}.%i.gz</fileNamePattern>
<maxFileSize>50MB</maxFileSize>
<maxHistory>20</maxHistory> <!-- 保留180天 -->
</rollingPolicy>
</appender>
<!--error log-->
<appender name="ERRORFILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<!--如果只是想要 Error 级别的日志,那么需要过滤一下,默认是 info 级别的ThresholdFilter-->
<filter class="ch.qos.logback.classic.filter.ThresholdFilter">
<level>Error</level>
</filter>
<!--日志名称如果没有File 属性那么只会使用FileNamePattern的文件路径规则
如果同时有<File><FileNamePattern>,那么当天日志是<File>,明天会自动把今天
的日志改名为今天的日期。即,<File> 的日志都是当天的。
-->
<File>${log.path}error.${log.file}</File>
<!--滚动策略,按照时间滚动 TimeBasedRollingPolicy-->
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<!--文件路径,定义了日志的切分方式——把每一天的日志归档到一个文件中,以防止日志填满整个磁盘空间-->
<FileNamePattern>${log.path}error/${log.file}.%d{yyyy-MM-dd}.%i.gz</FileNamePattern>
<!--只保留最近90天的日志-->
<maxFileSize>50MB</maxFileSize>
<maxHistory>180</maxHistory>
<!--用来指定日志文件的上限大小,那么到了这个值,就会删除旧的日志-->
<!--<totalSizeCap>1GB</totalSizeCap>-->
</rollingPolicy>
<!--日志输出编码格式化-->
<encoder>
<charset>UTF-8</charset>
<pattern>%date [%level] [%thread] %logger{60} [%file : %line] %msg%n</pattern>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="FILE"/>
<appender-ref ref="ERRORFILE"/>
<appender-ref ref="file_console"/>
</root>
</configuration>

View File

@ -1,148 +0,0 @@
package org.dromara.system.controller.system;
import jdk.dynalink.linker.LinkerServices;
import lombok.RequiredArgsConstructor;
import org.dromara.common.core.domain.R;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.common.web.core.BaseController;
import org.dromara.system.domain.DeviceRedis;
import org.dromara.system.domain.bo.TDeviceBo;
import org.dromara.system.domain.vo.DeviceStaticsVo;
import org.dromara.system.domain.vo.SysDeptVo;
import org.dromara.system.domain.vo.SysDictDataVo;
import org.dromara.system.service.*;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RequiredArgsConstructor
@RestController
public class IndexStaticsController extends BaseController {
private final ISysDeptService deptService;
private final ITDeviceService deviceService;
private final ISysDictTypeService dictTypeService;
private final IDeviceRedisService redisService;
/*
* 线
* */
@GetMapping("/topPan")
public R topPan(String zzjgdm){
DeviceRedis redis = new DeviceRedis();
redis.setZzjgdm(zzjgdm);
return R.ok(redisService.countByCondition(redis));
}
/*
* 线
* */
@PostMapping("/onLineBar")
public R onLineBar(){
List<SysDeptVo> deptVoList = deptService.getDsList();
List<DeviceStaticsVo> staticsVoList = deviceService.countByDs();
List<DeviceStaticsVo> list = new ArrayList<>(); //用来接收处理后的统计结果
for (SysDeptVo deptVo : deptVoList) {
boolean bl = false; //用来统计结果是否有当前这个机构
for (DeviceStaticsVo staticsVo : staticsVoList) {
String deptId = staticsVo.getZzjgdm()+"00000000";
if (deptId.equals(deptVo.getDeptId())){
staticsVo.setZzjgdm(deptId);
staticsVo.setZzjgmc(deptVo.getDeptName().replaceAll("公安局",""));
int onlineCo = RedisUtils.searchKeys("org_code:"+staticsVo.getZzjgdm()+"*");
staticsVo.setOnlineCo(onlineCo);
list.add(staticsVo);
bl = true;
break;
}
}
if (!bl){
DeviceStaticsVo staticsVo = new DeviceStaticsVo();
staticsVo.setZzjgdm(deptVo.getDeptId());
staticsVo.setZzjgmc(deptVo.getDeptName().replaceAll("公安局",""));
staticsVo.setCo(0);
staticsVo.setOnlineCo(0);
list.add(staticsVo);
}
}
return R.ok(list);
}
/*
* Code线
* */
@GetMapping("/dsOnlineCount")
public R dsOnlineCount(String code){
TDeviceBo bo = new TDeviceBo();
bo.setInfoSource(code);
Long co = deviceService.countByCondition(bo);
int onlineCo = 0;
if (null == code || "".equals(code)){
onlineCo = RedisUtils.searchKeys("org_code:*");
}else {
onlineCo = RedisUtils.searchKeys("org_code:"+code+"*");
}
HashMap map = new HashMap();
map.put("co",co);
map.put("onlineCo",onlineCo);
return R.ok(map);
}
/*
*
* */
@GetMapping("/deviceCount")
public R deviceCount(String code){
List<SysDictDataVo> dataList = dictTypeService.selectDictDataByType("zd_device_type");
List<HashMap> list = new ArrayList<>();
for (SysDictDataVo data : dataList) {
HashMap map = new HashMap();
TDeviceBo deviceInfo = new TDeviceBo();
deviceInfo.setDeviceType(data.getDictValue());
deviceInfo.setInfoSource(code);
Long co = deviceService.countByCondition(deviceInfo);
map.put("name",data.getDictLabel());
map.put("all",co);
list.add(map);
}
return R.ok(list);
}
@PostMapping("/getDeviceType")
public R getDeviceType(){
List<SysDictDataVo> dataList = dictTypeService.selectDictDataByType("zd_device_type");
return R.ok(dataList);
}
@GetMapping("/countByType")
public R countByType(String type){
List<HashMap> list = new ArrayList<>();
List<SysDeptVo> deptVoList = deptService.getDsList();
for (SysDeptVo deptVo : deptVoList) {
HashMap map = new HashMap();
TDeviceBo deviceInfo = new TDeviceBo();
deviceInfo.setDeviceType(type);
deviceInfo.setInfoSource(deptVo.getDeptId().substring(0,4));
Long co = deviceService.countByCondition(deviceInfo);
map.put("name",deptVo.getDeptName().replaceAll("公安局",""));
map.put("all",co);
list.add(map);
}
return R.ok(list);
}
}

View File

@ -1,20 +0,0 @@
package org.dromara.system.mapper;
import org.dromara.system.domain.TDevice;
import org.dromara.system.domain.vo.DeviceStaticsVo;
import org.dromara.system.domain.vo.TDeviceVo;
import org.dromara.common.mybatis.core.mapper.BaseMapperPlus;
import java.util.List;
/**
* deviceMapper
*
* @author luuy
* @date 2024-11-19
*/
public interface TDeviceMapper extends BaseMapperPlus<TDevice, TDeviceVo> {
List<DeviceStaticsVo> countByDs();
}

View File

@ -1,29 +0,0 @@
package org.dromara.system.schedule;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.json.JSONObject;
import org.dromara.common.redis.utils.RedisUtils;
import org.dromara.system.domain.DeviceRedis;
import org.dromara.system.service.IDeviceRedisService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.Scheduled;
import java.util.List;
@Configuration
public class DeviceRedisSchedule {
@Autowired
IDeviceRedisService redisService;
/*
* Redis online_usert_device_redis
* */
@Scheduled(cron = "0/30 * * * * ?")
public void handleDeviceRedis(){
List<JSONObject> jlist = RedisUtils.searchAndGetKeysValues("online_users:*");
redisService.insertBatch(BeanUtil.copyToList(jlist, DeviceRedis.class));
}
}

View File

@ -1,28 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration scan="true" scanPeriod="60 seconds" debug="false">
<!-- 日志存放路径 -->
<property name="log.path" value="logs/${project.artifactId}" />
<!-- 日志输出格式 -->
<property name="console.log.pattern"
value="%red(%d{yyyy-MM-dd HH:mm:ss}) %green([%thread]) %highlight(%-5level) %boldMagenta(%logger{36}%n) - %msg%n"/>
<!-- 控制台输出 -->
<appender name="console" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<pattern>${console.log.pattern}</pattern>
<charset>utf-8</charset>
</encoder>
</appender>
<include resource="logback-common.xml" />
<include resource="logback-logstash.xml" />
<!-- 开启 skywalking 日志收集 -->
<include resource="logback-skylog.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />
</root>
</configuration>

View File

@ -1,143 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.dromara.system.mapper.SysDeptMapper">
<resultMap type="org.dromara.system.domain.vo.SysDeptVo" id="SysDeptResult">
</resultMap>
<select id="selectDeptList" resultMap="SysDeptResult">
select
<if test="ew.getSqlSelect != null">
${ew.getSqlSelect}
</if>
<if test="ew.getSqlSelect == null">
*
</if>
from sys_dept ${ew.getCustomSqlSegment}
</select>
<select id="countDeptById" resultType="Long">
select count(*) from sys_dept where del_flag = '0' and dept_id = #{deptId}
</select>
<select id="selectDeptListByRoleId" resultType="Long">
select d.dept_id
from sys_dept d
left join sys_role_dept rd on d.dept_id = rd.dept_id
where rd.role_id = #{roleId}
<if test="deptCheckStrictly">
and d.dept_id not in (select d.parent_id from sys_dept d inner join sys_role_dept rd on d.dept_id = rd.dept_id and rd.role_id = #{roleId})
</if>
order by d.parent_id, d.order_num
</select>
<!-- 各机构设备在线总数 参数deviceType -->
<select id="deviceStatics" parameterType="String" resultMap="SysDeptResult">
select * from (
-- 安徽省
SELECT '0' dept_id,'安徽省' dept_name, '-1' parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 全省 各设备总数
(SELECT substr(zzjgdm, 1, 2) dept_id,count(*) co from (SELECT * FROM t_device
<where>valid = 1
<if test="
deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 2) HAVING substr(zzjgdm,1,2) is not null ) td
on substr(d.dept_id,1,2) = td.dept_id
LEFT JOIN
-- 全省 各设备在线数
(SELECT substr(zzjgdm, 1, 2) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 2) ) rd
on substr(d.dept_id,1,2) = rd.dept_id
WHERE d.dept_id = '340000000000'
union
-- 市局机构
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 市局 各设备总数
(SELECT substr(zzjgdm, 1, 4) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 4) HAVING substr(zzjgdm,1,4) is not null ) td
on substr(d.dept_id,1,4) = td.dept_id
LEFT JOIN
-- 市局 各设备在线数
(SELECT substr(zzjgdm, 1, 4) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 4) ) rd
on substr(d.dept_id,1,4) = rd.dept_id
WHERE d.parent_id = '0'
union
--分局
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 分局 各设备总数
(SELECT substr(zzjgdm, 1, 6) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 6) HAVING substr(zzjgdm,1,6) is not null ) td
on substr(d.dept_id,1,6) = td.dept_id
LEFT JOIN
-- 分局 各设备在线数
(SELECT substr(zzjgdm, 1, 6) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 6) ) rd
on substr(d.dept_id,1,6) = rd.dept_id
WHERE d.type = 1
union
--支队 机关
SELECT d.dept_id,short_name dept_name,parent_id,COALESCE(td.co,0) co,COALESCE(rd.online,0) online FROM
sys_dept d
LEFT JOIN
-- 支队 机关 各设备总数
(SELECT substr(zzjgdm, 1, 8) dept_id,count(*) co from (SELECT * FROM t_device
<where>
valid = 1
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 8) HAVING substr(zzjgdm,1,8) is not null ) td
on substr(d.dept_id,1,8) = td.dept_id
LEFT JOIN
-- 支队 机关 各设备在线数
(SELECT substr(zzjgdm, 1, 8) dept_id,count(*) online from (SELECT * FROM t_device_redis
<where>
online = '1'
<if test="deviceType != null and deviceType != ''"> and device_type = #{deviceType}</if>
</where>
) r
GROUP BY substr(zzjgdm,1, 8) ) rd
on substr(d.dept_id,1,8) = rd.dept_id
WHERE (length(d.ancestors) - length(translate(d.ancestors,',',''))+1) = 3 and d.type = 2
) a
order by a.dept_id asc
</select>
</mapper>

View File

@ -1,73 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.dromara.system.mapper.SysUserMapper">
<resultMap type="org.dromara.system.domain.vo.SysUserVo" id="SysUserResult">
<id property="userId" column="user_id"/>
</resultMap>
<resultMap type="org.dromara.system.domain.vo.SysUserExportVo" id="SysUserExportResult">
<id property="userId" column="user_id"/>
</resultMap>
<select id="selectPageUserList" resultMap="SysUserResult">
select
<if test="ew.getSqlSelect != null">
${ew.getSqlSelect}
</if>
<if test="ew.getSqlSelect == null">
u.user_id, u.dept_id, u.nick_name, u.user_name, u.user_type, u.email, u.avatar, u.phonenumber, u.sex,
u.status, u.del_flag, u.login_ip, u.login_date, u.create_by, u.create_time, u.remark
</if>
from sys_user u
${ew.getCustomSqlSegment}
</select>
<select id="selectUserList" resultMap="SysUserResult">
select
<if test="ew.getSqlSelect != null">
${ew.getSqlSelect}
</if>
<if test="ew.getSqlSelect == null">
u.user_id, u.dept_id, u.nick_name, u.user_name, u.user_type, u.email, u.avatar, u.phonenumber, u.sex,
u.status, u.del_flag, u.login_ip, u.login_date, u.create_by, u.create_time, u.remark
</if>
from sys_user u
${ew.getCustomSqlSegment}
</select>
<select id="selectUserExportList" resultMap="SysUserExportResult">
select u.user_id, u.dept_id, u.nick_name, u.user_name, u.user_type, u.email, u.avatar, u.phonenumber, u.sex,
u.status, u.del_flag, u.login_ip, u.login_date, u.create_by, u.create_time, u.remark,
d.dept_name, d.leader, u1.user_name as leaderName
from sys_user u
left join sys_dept d on u.dept_id = d.dept_id
left join sys_user u1 on u1.user_id = d.leader
${ew.getCustomSqlSegment}
</select>
<select id="selectAllocatedList" resultMap="SysUserResult">
select distinct u.user_id, u.dept_id, u.user_name, u.user_type, u.nick_name, u.email, u.phonenumber, u.status, u.create_time
from sys_user u
left join sys_dept d on u.dept_id = d.dept_id
left join sys_user_role sur on u.user_id = sur.user_id
left join sys_role r on r.role_id = sur.role_id
${ew.getCustomSqlSegment}
</select>
<select id="selectUnallocatedList" resultMap="SysUserResult">
select distinct u.user_id, u.dept_id, u.user_name, u.user_type, u.nick_name, u.email, u.phonenumber, u.status, u.create_time
from sys_user u
left join sys_dept d on u.dept_id = d.dept_id
left join sys_user_role sur on u.user_id = sur.user_id
left join sys_role r on r.role_id = sur.role_id
${ew.getCustomSqlSegment}
</select>
<select id="countUserById" resultType="Long">
select count(*) from sys_user where del_flag = '0' and user_id = #{userId}
</select>
</mapper>

View File

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE mapper
PUBLIC "-//mybatis.org//DTD Mapper 3.0//EN"
"http://mybatis.org/dtd/mybatis-3-mapper.dtd">
<mapper namespace="org.dromara.system.mapper.TDeviceMapper">
<resultMap id="deviceStaticsResult" type="org.dromara.system.domain.vo.DeviceStaticsVo">
</resultMap>
<select id="countByDs" resultMap="deviceStaticsResult">
SELECT SUBSTR(zzjgdm,1,4) zzjgdm,count(*) co from t_device GROUP BY SUBSTR(zzjgdm,1,4) HAVING SUBSTR(zzjgdm,1,4) is not null
</select>
</mapper>

View File

@ -9,7 +9,7 @@
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>stwzhj-consumer</artifactId>
<artifactId>wzhj-consumer</artifactId>
<dependencies>

View File

@ -0,0 +1,22 @@
package org.dromara.kafka.consumer;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.servlet.ServletComponentScan;
import org.springframework.scheduling.annotation.EnableAsync;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 11:12
*/
@SpringBootApplication
@EnableAsync
@ServletComponentScan
public class KafkaConsumerApplication {
public static void main(String[] args){
SpringApplication.run(KafkaConsumerApplication.class,args);
}
}

View File

@ -0,0 +1,211 @@
package org.dromara.kafka.consumer.handler;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.bean.copier.CopyOptions;
import cn.hutool.core.convert.ConvertException;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUnit;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.ruansee.response.ApiResponse;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.dubbo.config.annotation.DubboReference;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.dromara.common.core.domain.R;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfoVO;
import org.dromara.system.api.domain.bo.RemoteDeviceBo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import java.text.DateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.LinkedBlockingDeque;
/**
* <p>description: </p>
*
* @author chenle
* @date 2021-09-06 16:44
*/
@Slf4j
@Configuration
public class ConsumerWorker {
private Logger logger = LoggerFactory.getLogger(ConsumerWorker.class);
public static LinkedBlockingDeque linkedBlockingDeque = new LinkedBlockingDeque<>(5000);
public static LinkedBlockingDeque basedataDeque = new LinkedBlockingDeque<>(5000);
@DubboReference
private RemoteDataToEsService gpsService;
@KafkaListener(topics = "#{'${spring.kafka.consumer.topics}'.split(',')}",properties = {
"auto.offset.reset:latest"})
public void consumer(ConsumerRecord<String,Object> record) {
Object value = record.value();
EsGpsInfo esGpsInfo = JSONUtil.toBean((String) value, EsGpsInfo.class);
Date gpsTime = esGpsInfo.getGpsTime();
// log.info("value={}",value);
if(Objects.isNull(gpsTime)){
log.error("gpsTime == null,deviceCode={}",esGpsInfo.getDeviceCode());
return;
}
String deviceType = esGpsInfo.getDeviceType();
if(StringUtils.isBlank(deviceType)){
log.error("deviceType is null, deviceCode={}",esGpsInfo.getDeviceCode());
return;
}
if(DateUtil.between(gpsTime,new Date(), DateUnit.MINUTE) < 30){
esGpsInfo.setOnline(1);
}
logger.info("esGpsInfo={}",esGpsInfo);
try {
R r = gpsService.saveData(BeanUtil.toBean(esGpsInfo, RemoteGpsInfo.class) );
if(Objects.isNull(r)){
logger.error("response == null");
}else {
logger.info(r.getMsg());
}
} catch (Exception e) {
e.printStackTrace();
}
// boolean offer = linkedBlockingDeque.offer(esGpsInfo);
}
private void luanrequest(Object value) {
RemoteGpsInfo esGpsInfo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
esGpsInfo = JSONUtil.toBean(jsonObject, RemoteGpsInfo.class);
}catch (ConvertException e){
logger.info("EsGpsInfo=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(esGpsInfo)){
logger.info("esGpsInfo=null no error");
return;
}
String deviceCode = esGpsInfo.getDeviceCode();
if(StringUtils.isEmpty(deviceCode) || deviceCode.length() > 100){
logger.info("deviceCode:{} is null or is too long ",deviceCode);
return;
}
String latitude = esGpsInfo.getLat();
if(StringUtils.isEmpty(latitude) || "0.0".equals(latitude)){
logger.info("latitude:{} is null or is zero ",latitude);
return;
}
String longitude = esGpsInfo.getLng();
if(StringUtils.isEmpty(longitude) || "0.0".equals(longitude)){
logger.info("longitude:{} is null or is zero ",longitude);
return;
}
try {
esGpsInfo.setGpsTime(new Date(Long.valueOf(jsonObject.getStr("gpsTime"))));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
logger.info("esGpsInfo={}",esGpsInfo);
boolean offer = linkedBlockingDeque.offer(esGpsInfo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic=jysb_dwxx,data2es={},gpsTime={}","success",esGpsInfo.getGpsTime());
}else{
logger.info("topic=jysb_dwxx,data2es={}",response.getMsg());
}
}
/*
*
* */
private void baseDataRequest(Object value){
RemoteDeviceBo deviceBo;
JSONObject jsonObject;
try {
jsonObject = JSONUtil.parseObj(((String) value));
}catch (ConvertException e){
logger.info("jsonObject=null:error={}",e.getMessage());
return;
}
try {
deviceBo = JSONUtil.toBean(jsonObject, RemoteDeviceBo.class);
}catch (ConvertException e){
logger.info("Device=null:error={}",e.getMessage());
return;
}
if(Objects.isNull(deviceBo)){
logger.info("deviceBo=null no error");
return;
}
if (StringUtils.isEmpty(deviceBo.getDeviceCode())){
logger.info("deviceCode is null");
return;
}
if (StringUtils.isEmpty(deviceBo.getInfoSource())){
logger.info("infoSource is null");
return;
}
if (!StringUtils.isEmpty(deviceBo.getCreateTime())){
try {
Date createTime = new Date(Long.valueOf(jsonObject.getStr("createTime")));
deviceBo.setCreateTime(DateUtil.format(createTime, "yyyy-MM-dd HH:mm:ss"));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
}
if (!StringUtils.isEmpty(deviceBo.getUpdateTime())){
try {
Date updateTime = new Date(Long.valueOf(jsonObject.getStr("updateTime")));
deviceBo.setUpdateTime(DateUtil.format(updateTime, "yyyy-MM-dd HH:mm:ss"));
}catch (Exception e){
logger.error("error_msg={}",e.getMessage());
}
}
logger.info("deviceBo={}",deviceBo);
boolean offer = basedataDeque.offer(deviceBo);
R response = R.ok(offer);
if(Objects.isNull(response)){
logger.info("response == null");
}
logger.info("code={},msg={}",response.getCode(),response.getMsg());
if(200 == response.getCode()){
logger.info("topic=jysb_sbxx,data2es={},deviceCode={}","success",deviceBo.getDeviceCode());
}else{
logger.info("topic=jysb_sbxx,data2es={}",response.getMsg());
}
}
}

View File

@ -7,6 +7,8 @@ import org.apache.dubbo.config.annotation.DubboReference;
import org.dromara.data2es.api.RemoteDataToEsService;
import org.dromara.data2es.api.domain.RemoteGpsInfo;
import org.dromara.kafka.consumer.entity.EsGpsInfo;
import org.dromara.system.api.RemoteDeviceService;
import org.dromara.system.api.domain.bo.RemoteDeviceBo;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.context.annotation.Configuration;
@ -31,21 +33,31 @@ public class DataInsertBatchHandler implements CommandLineRunner {
@DubboReference
private RemoteDataToEsService gpsService;
@DubboReference
private RemoteDeviceService deviceService;
@Override
public void run(String... args) throws Exception {
ExecutorService singleThreadExecutor = Executors.newSingleThreadExecutor();
LinkedBlockingDeque linkedBlockingDeque = ConsumerWorker.linkedBlockingDeque;
LinkedBlockingDeque linkedBlockingDeque = ConsumerWorker.linkedBlockingDeque; //定位信息队列
// LinkedBlockingDeque baseDataDeque = ConsumerWorker.basedataDeque; //基础信息队列
singleThreadExecutor.execute(new Runnable() {
@Override
public void run() {
while (true) {
try {
List<RemoteGpsInfo> list = new ArrayList<>();
List<RemoteDeviceBo> bases = new ArrayList<>();
Queues.drain(linkedBlockingDeque, list, 200, 5, TimeUnit.SECONDS);
// Queues.drain(baseDataDeque, bases, 100, 5, TimeUnit.SECONDS);
log.info("batch size={}", list.size());
// log.info("basedata size={}", bases.size());
if(CollectionUtil.isNotEmpty(list)) {
gpsService.saveDataBatch(list);
}
/*if(CollectionUtil.isNotEmpty(bases)) {
deviceService.batchSaveDevice(bases);
}*/
} catch (Exception e) {
log.error("缓存队列批量消费异常:{}", e.getMessage());
}

Some files were not shown because too many files have changed in this diff Show More