1.设备注册接入优化

2.波形文件接收、解析功能调整
This commit is contained in:
2023-10-13 10:49:43 +08:00
parent 340e7dc75f
commit 6e12027e69
27 changed files with 894 additions and 662 deletions

View File

@@ -6,9 +6,13 @@ import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.utils.PubUtils;
import com.njcn.csdevice.api.CsLineFeignClient;
import com.njcn.csdevice.api.DataArrayFeignClient;
import com.njcn.csdevice.api.EquipmentFeignClient;
import com.njcn.csdevice.pojo.param.DataArrayParam;
import com.njcn.csdevice.pojo.po.CsDataArray;
import com.njcn.csdevice.pojo.po.CsEquipmentDeliveryPO;
import com.njcn.csdevice.pojo.po.CsLinePO;
import com.njcn.csdevice.pojo.vo.CsEquipmentDeliveryVO;
import com.njcn.cswarn.api.CsEquipmentAlarmFeignClient;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.mq.message.AppAutoDataMessage;
@@ -56,6 +60,8 @@ public class StatServiceImpl implements IStatService {
private final RedisUtil redisUtil;
private final EquipmentFeignClient equipmentFeignClient;
@Override
@Transactional(rollbackFor = Exception.class)
public void analysis(AppAutoDataMessage appAutoDataMessage) {
@@ -86,6 +92,8 @@ public class StatServiceImpl implements IStatService {
if(Objects.isNull(object2)) {
saveData();
}
//获取当前设备信息
CsEquipmentDeliveryPO po = equipmentFeignClient.findDevByNDid(appAutoDataMessage.getId()).getData();
if (CollectionUtil.isNotEmpty(list)){
List<String> recordList = new ArrayList<>();
for (AppAutoDataMessage.DataArray item : list) {
@@ -116,7 +124,7 @@ public class StatServiceImpl implements IStatService {
} else {
dataArrayList = objectToList(object);
}
List<String> result = assembleData(lineId,dataArrayList,item,appAutoDataMessage.getMsg().getClDid(),dataArrayParam.getStatMethod());
List<String> result = assembleData(lineId,dataArrayList,item,appAutoDataMessage.getMsg().getClDid(),dataArrayParam.getStatMethod(),po.getProcess());
recordList.addAll(result);
}
if (CollectionUtil.isNotEmpty(recordList)){
@@ -183,7 +191,7 @@ public class StatServiceImpl implements IStatService {
/**
* influxDB数据组装
*/
public List<String> assembleData(String lineId,List<CsDataArray> dataArrayList,AppAutoDataMessage.DataArray item,Integer clDid,String statMethod) {
public List<String> assembleData(String lineId,List<CsDataArray> dataArrayList,AppAutoDataMessage.DataArray item,Integer clDid,String statMethod,Integer process) {
List<String> records = new ArrayList<String>();
//解码
List<Float> floats = PubUtils.byteArrayToFloatList(Base64.getDecoder().decode(item.getData()));
@@ -206,6 +214,7 @@ public class StatServiceImpl implements IStatService {
tags.put(InfluxDBTableConstant.PHASIC_TYPE,dataArrayList.get(i).getPhase());
tags.put(InfluxDBTableConstant.VALUE_TYPE,statMethod);
tags.put(InfluxDBTableConstant.CL_DID,clDid.toString());
tags.put(InfluxDBTableConstant.PROCESS,process.toString());
Map<String,Object> fields = new HashMap<>();
fields.put(dataArrayList.get(i).getName(),floats.get(i));
fields.put(InfluxDBTableConstant.IS_ABNORMAL,item.getDataTag());

View File

@@ -1,29 +1,33 @@
package com.njcn;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.alibaba.nacos.shaded.com.google.gson.Gson;
import com.njcn.common.utils.PubUtils;
import com.njcn.csdevice.pojo.po.CsDataArray;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.mq.message.AppAutoDataMessage;
import com.njcn.redis.pojo.enums.AppRedisKey;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.redis.utils.RedisUtil;
import com.njcn.stat.StatBootApplication;
import org.influxdb.InfluxDB;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.junit.Test;
import org.springframework.web.bind.annotation.ResponseBody;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertTrue;
/**
* Unit test for simple App.
*/
@RunWith(SpringRunner.class)
@WebAppConfiguration
@SpringBootTest(classes = StatBootApplication.class)
public class AppTest
{
@@ -33,6 +37,7 @@ public class AppTest
@Resource
private InfluxDbUtils influxDbUtils;
/**
* Rigorous Test :-)
*/
@@ -42,4 +47,39 @@ public class AppTest
assertTrue( true );
}
@Test
public void addRedis() {
Map<String, String> tags1 = new HashMap<>();
tags1.put("LineID", "4");
tags1.put("Phasic_Type", "A");
Map<String, Object> fields1 = new HashMap<>();
fields1.put("RMS", 4.1111);
fields1.put("RMS_AB", 4.1111);
fields1.put("RMS_BC", 4.1111);
fields1.put("RMS_CA", 4.1111);
Map<String, String> tags2 = new HashMap<>();
tags2.put("LineID", "5");
tags2.put("Phasic_Type", "A");
Map<String, Object> fields2 = new HashMap<>();
fields2.put("RMS", 5.1111);
fields2.put("RMS_AB", 5.1111);
fields2.put("RMS_BC", 5.1111);
// 一条记录值。注意生产环境不要用System.currentTimeMillis(),因为数据量大会产生重复时间戳,导致数据丢失,要用数据自己的时间戳,这里只做演示)
Point point1 = influxDbUtils.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags1, fields1);
Point point2 = influxDbUtils.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags2, fields2);
// BatchPoints batchPoints1 = BatchPoints.database("Data_v").tag("LineID", "4").tag("Phasic_Type","A").retentionPolicy("").consistency(ConsistencyLevel.ALL).precision(TimeUnit.MILLISECONDS).build();
BatchPoints batchPoints1 = BatchPoints.database("test").tag("LineID", "4").tag("Phasic_Type", "A").retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
batchPoints1.point(point1);
BatchPoints batchPoints2 = BatchPoints.database("test").tag("LineID", "5").tag("Phasic_Type", "A").retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
// 将两条记录添加到batchPoints中
batchPoints2.point(point2);
// 将不同的batchPoints序列化后一次性写入数据库提高写入速度
List<String> records = new ArrayList<String>();
records.add(batchPoints1.lineProtocol());
records.add(batchPoints2.lineProtocol());
// 将两条数据批量插入到数据库中
influxDbUtils.batchInsert("test", "", InfluxDB.ConsistencyLevel.ALL, TimeUnit.MILLISECONDS, records);
}
}