1.设备注册接入优化

2.波形文件接收、解析功能调整
This commit is contained in:
2023-10-13 10:49:43 +08:00
parent 340e7dc75f
commit 6e12027e69
27 changed files with 894 additions and 662 deletions

View File

@@ -1,29 +1,33 @@
package com.njcn;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import com.alibaba.nacos.shaded.com.google.gson.Gson;
import com.njcn.common.utils.PubUtils;
import com.njcn.csdevice.pojo.po.CsDataArray;
import com.njcn.influx.pojo.constant.InfluxDBTableConstant;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.mq.message.AppAutoDataMessage;
import com.njcn.redis.pojo.enums.AppRedisKey;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.redis.utils.RedisUtil;
import com.njcn.stat.StatBootApplication;
import org.influxdb.InfluxDB;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.junit.Test;
import org.springframework.web.bind.annotation.ResponseBody;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertTrue;
/**
* Unit test for simple App.
*/
@RunWith(SpringRunner.class)
@WebAppConfiguration
@SpringBootTest(classes = StatBootApplication.class)
public class AppTest
{
@@ -33,6 +37,7 @@ public class AppTest
@Resource
private InfluxDbUtils influxDbUtils;
/**
* Rigorous Test :-)
*/
@@ -42,4 +47,39 @@ public class AppTest
assertTrue( true );
}
@Test
public void addRedis() {
Map<String, String> tags1 = new HashMap<>();
tags1.put("LineID", "4");
tags1.put("Phasic_Type", "A");
Map<String, Object> fields1 = new HashMap<>();
fields1.put("RMS", 4.1111);
fields1.put("RMS_AB", 4.1111);
fields1.put("RMS_BC", 4.1111);
fields1.put("RMS_CA", 4.1111);
Map<String, String> tags2 = new HashMap<>();
tags2.put("LineID", "5");
tags2.put("Phasic_Type", "A");
Map<String, Object> fields2 = new HashMap<>();
fields2.put("RMS", 5.1111);
fields2.put("RMS_AB", 5.1111);
fields2.put("RMS_BC", 5.1111);
// 一条记录值。注意生产环境不要用System.currentTimeMillis(),因为数据量大会产生重复时间戳,导致数据丢失,要用数据自己的时间戳,这里只做演示)
Point point1 = influxDbUtils.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags1, fields1);
Point point2 = influxDbUtils.pointBuilder("Data_v", System.currentTimeMillis(), TimeUnit.MILLISECONDS, tags2, fields2);
// BatchPoints batchPoints1 = BatchPoints.database("Data_v").tag("LineID", "4").tag("Phasic_Type","A").retentionPolicy("").consistency(ConsistencyLevel.ALL).precision(TimeUnit.MILLISECONDS).build();
BatchPoints batchPoints1 = BatchPoints.database("test").tag("LineID", "4").tag("Phasic_Type", "A").retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
batchPoints1.point(point1);
BatchPoints batchPoints2 = BatchPoints.database("test").tag("LineID", "5").tag("Phasic_Type", "A").retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
// 将两条记录添加到batchPoints中
batchPoints2.point(point2);
// 将不同的batchPoints序列化后一次性写入数据库提高写入速度
List<String> records = new ArrayList<String>();
records.add(batchPoints1.lineProtocol());
records.add(batchPoints2.lineProtocol());
// 将两条数据批量插入到数据库中
influxDbUtils.batchInsert("test", "", InfluxDB.ConsistencyLevel.ALL, TimeUnit.MILLISECONDS, records);
}
}