测试解析数据入influx库

This commit is contained in:
2023-10-11 10:37:57 +08:00
parent d627337c84
commit 7e85dac3ff
5 changed files with 124 additions and 106 deletions

View File

@@ -45,7 +45,7 @@
<maven.compiler.source>8</maven.compiler.source> <maven.compiler.source>8</maven.compiler.source>
<maven.compiler.target>8</maven.compiler.target> <maven.compiler.target>8</maven.compiler.target>
<commons-io.version>2.8.0</commons-io.version> <commons-io.version>2.8.0</commons-io.version>
<hutool.version>5.7.9</hutool.version> <hutool.version>5.8.11</hutool.version>
<influxdb-java.version>2.22</influxdb-java.version> <influxdb-java.version>2.22</influxdb-java.version>
</properties> </properties>
@@ -207,6 +207,13 @@
<version>1.0.0</version> <version>1.0.0</version>
</dependency> </dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-collections4</artifactId>
<version>4.4</version>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@@ -39,7 +39,7 @@ public class TokenComponent {
public TokenResult getTokenWithRestTemplate() { public TokenResult getTokenWithRestTemplate() {
RestTemplateUtil restTemplateUtil = new RestTemplateUtil(); RestTemplateUtil restTemplateUtil = new RestTemplateUtil();
ResponseEntity<TokenResult> userEntity = restTemplateUtil.post(url.concat("/psr-auth/oauth/accessToken?client_id=" + clientId + "&client_secret=" + clientSecret + "&grant_type=" + grantType), TokenResult.class); ResponseEntity<TokenResult> userEntity = restTemplateUtil.post(url.concat("/psr-auth/oauth/accessToken?client_id=" + clientId + "&client_secret=" + clientSecret + "&grant_type=" + grantType), TokenResult.class);
//返回状态不正常返回空 //状态不正常,返回空
log.info("getTokenWithRestTemplate获取token结束结果为:{}", userEntity); log.info("getTokenWithRestTemplate获取token结束结果为:{}", userEntity);
if (userEntity.getStatusCodeValue() == 200 && userEntity.getBody().getStatus().equalsIgnoreCase("000000")) { if (userEntity.getStatusCodeValue() == 200 && userEntity.getBody().getStatus().equalsIgnoreCase("000000")) {
return userEntity.getBody(); return userEntity.getBody();

View File

@@ -39,7 +39,7 @@ public class DisPhotovoltaicController {
@ApiOperation(value = "获取10kv分布式光伏接入情况") @ApiOperation(value = "获取10kv分布式光伏接入情况")
@PostMapping("/import10") @PostMapping("/import10")
public void importTakeOrder(MultipartFile file) throws Exception { public void importTakeOrder(MultipartFile file, String startTime, String endTime) throws Exception {
List<ExcelData> list = EasyExcel.read(file.getInputStream()) List<ExcelData> list = EasyExcel.read(file.getInputStream())
.head(ExcelData.class) .head(ExcelData.class)
.headRowNumber(2) .headRowNumber(2)
@@ -49,14 +49,14 @@ public class DisPhotovoltaicController {
.filter(t -> StrUtil.isNotBlank(t.getGenerationUserID())) .filter(t -> StrUtil.isNotBlank(t.getGenerationUserID()))
.filter(StreamUtil.distinctByKey(ExcelData::getGenerationUserID)) .filter(StreamUtil.distinctByKey(ExcelData::getGenerationUserID))
.collect(Collectors.toList()); .collect(Collectors.toList());
businessService.testInterfaceByUserId(list); businessService.testInterfaceByUserId(list, startTime, endTime);
System.out.println(); System.out.println();
} }
@ApiOperation(value = "获取380kv分布式光伏接入情况") @ApiOperation(value = "获取380kv分布式光伏接入情况")
@PostMapping("/import380") @PostMapping("/import380")
public void import380(MultipartFile file) throws Exception { public void import380(MultipartFile file, String startTime, String endTime) throws Exception {
List<ExcelData> list = EasyExcel.read(file.getInputStream()) List<ExcelData> list = EasyExcel.read(file.getInputStream())
.head(ExcelData.class) .head(ExcelData.class)
.headRowNumber(2) .headRowNumber(2)
@@ -66,7 +66,7 @@ public class DisPhotovoltaicController {
.filter(t -> StrUtil.isNotBlank(t.getGenerationUserID())) .filter(t -> StrUtil.isNotBlank(t.getGenerationUserID()))
.filter(StreamUtil.distinctByKey(ExcelData::getGenerationUserID)) .filter(StreamUtil.distinctByKey(ExcelData::getGenerationUserID))
.collect(Collectors.toList()); .collect(Collectors.toList());
businessService.testInterfaceByUserId(list); businessService.testInterfaceByUserId(list, startTime, endTime);
System.out.println(); System.out.println();
} }
@@ -86,6 +86,7 @@ public class DisPhotovoltaicController {
disPhotovoltaicService.SavaPmsPowerGenerationUser10KV(list, response); disPhotovoltaicService.SavaPmsPowerGenerationUser10KV(list, response);
} }
@ApiOperation(value = "导入380kv分布式光伏接入情况", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE) @ApiOperation(value = "导入380kv分布式光伏接入情况", produces = MediaType.APPLICATION_OCTET_STREAM_VALUE)
@PostMapping("/import380KV") @PostMapping("/import380KV")
public void import380KV(MultipartFile file, HttpServletResponse response) throws Exception { public void import380KV(MultipartFile file, HttpServletResponse response) throws Exception {

View File

@@ -5,7 +5,7 @@ import com.njcn.jbsyncdata.pojo.ExcelData;
import java.util.List; import java.util.List;
public interface IBusinessService { public interface IBusinessService {
void testInterface(List<ExcelData> list);
void testInterfaceByUserId(List<ExcelData> list); void testInterfaceByUserId(List<ExcelData> list,String startTime,String endTime);
} }

View File

@@ -2,13 +2,12 @@ package com.njcn.jbsyncdata.service.impl;
import cn.hutool.core.collection.CollectionUtil; import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern; import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil; import cn.hutool.core.date.DateUtil;
import cn.hutool.core.io.file.FileReader;
import cn.hutool.core.text.StrPool; import cn.hutool.core.text.StrPool;
import cn.hutool.core.util.StrUtil; import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONObject; import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil; import cn.hutool.json.JSONUtil;
import com.njcn.influx.utils.InfluxDbUtils;
import com.njcn.jbsyncdata.component.TokenComponent; import com.njcn.jbsyncdata.component.TokenComponent;
import com.njcn.jbsyncdata.enums.MeasTypeEnum; import com.njcn.jbsyncdata.enums.MeasTypeEnum;
import com.njcn.jbsyncdata.pojo.ExcelData; import com.njcn.jbsyncdata.pojo.ExcelData;
@@ -16,11 +15,16 @@ import com.njcn.jbsyncdata.pojo.result.*;
import com.njcn.jbsyncdata.service.IBusinessService; import com.njcn.jbsyncdata.service.IBusinessService;
import com.njcn.jbsyncdata.util.RestTemplateUtil; import com.njcn.jbsyncdata.util.RestTemplateUtil;
import lombok.extern.slf4j.Slf4j; import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.ListUtils;
import org.influxdb.InfluxDB;
import org.influxdb.dto.BatchPoints;
import org.influxdb.dto.Point;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import javax.annotation.Resource; import javax.annotation.Resource;
import java.util.*; import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
@@ -28,43 +32,31 @@ import java.util.stream.Stream;
@Service @Service
public class BusinessServiceImpl implements IBusinessService { public class BusinessServiceImpl implements IBusinessService {
@Resource @Resource
private TokenComponent tokenComponent; private TokenComponent tokenComponent;
@Override @Resource
public void testInterface(List<ExcelData> list) { private InfluxDbUtils influxDbUtils;
RestTemplateUtil restTemplateUtil = new RestTemplateUtil();
TokenResult tokenWithRestTemplate = tokenComponent.getTokenWithRestTemplate();
if (null == tokenWithRestTemplate) {
log.error("token信息没有获取到");
return;
}
JSONObject jsonObject;
JSONObject jsonObjectSub;
for (ExcelData excelData : list) {
jsonObject = JSONUtil.createObj();
jsonObjectSub = JSONUtil.createObj();
jsonObject.set("page", 1);
jsonObject.set("perPage", 50);
List<String> psrIds = Stream.of(excelData.getStageID()).collect(Collectors.toList());
jsonObjectSub.set("psrIds", psrIds);
jsonObjectSub.set("psrType", "0401004");
jsonObjectSub.set("astIds", new ArrayList<>());
jsonObjectSub.set("astType", "");
jsonObjectSub.set("termIds", new ArrayList<>());
jsonObjectSub.set("termType", "");
jsonObjectSub.set("measPointIds", new ArrayList<>());
jsonObject.set("filter", jsonObjectSub);
//组装好json开始发送请求
Map<String, String> headers = new HashMap<>();
headers.put("x-token", tokenWithRestTemplate.getAccess_token());
ResponseEntity<String> response = restTemplateUtil.post(tokenComponent.getUrl().concat("/realMeasCenter/measPoint/commonQuery"), headers, jsonObject, String.class);
log.error("请求接口,台区号为:{},结果为:{}", excelData.getStageID(), response);
}
}
/**
* 此方法通过发电客户编号查询数据,该方法存在以下问题
* 问题一一个发电客户编号同指标返回的数据会有多个但是目前看到最多2个测量点数据。
* 解决方案:匹配第一条,丢弃后续数据。
* 问题二一个客户编号最多2个测量点一次查8个指标即返回16条数据总计大约16万个用户编号如何高效查询并同步入库
* 解决方案暂且定500个客户编号将每页数据量调整为 500 * 20 = 1W的size避免存在匹配客户编号数据时遥测数据不在当前页。
* 问题三:时间区间如何控制?
* 解决方案目前暂定通过定时任务如每天凌晨2点查询前天的数据入库。
* 问题四:根据客户编号&指标查询数据会出现几种数据为空的情况
* 现象: 1. 首先GeneralResult的result属性直接为null------------不做处理,直接过
* 2. PageResult的records属性为null---------------------不做处理,直接过
* 3. CommonTelemetry的遥测数据集合telemetryValue为null--不做处理,直接过
* 4. StatisticsData统计数据的实际数值measValue为null-----对应时间、指标的数值设置为0
*
* @param excelDataList 客户编号集合
*/
@Override @Override
public void testInterfaceByUserId(List<ExcelData> list) { public void testInterfaceByUserId(List<ExcelData> excelDataList, String startTime, String endTime) {
RestTemplateUtil restTemplateUtil = new RestTemplateUtil(); RestTemplateUtil restTemplateUtil = new RestTemplateUtil();
TokenResult tokenWithRestTemplate = tokenComponent.getTokenWithRestTemplate(); TokenResult tokenWithRestTemplate = tokenComponent.getTokenWithRestTemplate();
if (null == tokenWithRestTemplate) { if (null == tokenWithRestTemplate) {
@@ -73,15 +65,21 @@ public class BusinessServiceImpl implements IBusinessService {
} }
JSONObject jsonObject; JSONObject jsonObject;
JSONObject jsonObjectSub; JSONObject jsonObjectSub;
for (ExcelData excelData : list) { //将发电客户编号按500尺寸分片
List<List<ExcelData>> partitionList = ListUtils.partition(excelDataList, 500);
for (List<ExcelData> excelData : partitionList) {
Map</*表名*/String, List<Map</*属性名*/String,/*数值*/String>>> typeData = new HashMap<>();
//按批次处理客户编号数据
jsonObject = JSONUtil.createObj(); jsonObject = JSONUtil.createObj();
jsonObjectSub = JSONUtil.createObj(); jsonObjectSub = JSONUtil.createObj();
jsonObject.set("page", 1); jsonObject.set("page", 1);
jsonObject.set("perPage", 50); jsonObject.set("perPage", 10000);
jsonObject.set("startTime", "2023-10-07 00:00:00"); jsonObject.set("startTime", startTime);
jsonObject.set("endTime", "2023-10-07 23:59:59"); jsonObject.set("endTime", endTime);
List<String> userId = Stream.of("160".concat(excelData.getGenerationUserID())).collect(Collectors.toList()); //处理客户编号
jsonObjectSub.set("consNos", userId); List<String> generationUserIDList = excelData.stream().map(t -> "160".concat(t.getGenerationUserID())).collect(Collectors.toList());
jsonObjectSub.set("consNos", generationUserIDList);
//1公专变2低压用户3光伏
jsonObjectSub.set("consType", 3); jsonObjectSub.set("consType", 3);
jsonObjectSub.set("astIds", new ArrayList<>()); jsonObjectSub.set("astIds", new ArrayList<>());
jsonObjectSub.set("astType", ""); jsonObjectSub.set("astType", "");
@@ -97,25 +95,32 @@ public class BusinessServiceImpl implements IBusinessService {
headers.put("x-token", tokenWithRestTemplate.getAccess_token()); headers.put("x-token", tokenWithRestTemplate.getAccess_token());
ResponseEntity<GeneralResult> response = restTemplateUtil.post(tokenComponent.getUrl().concat("/realMeasCenter/telemetry/commonQuery"), headers, jsonObject, GeneralResult.class); ResponseEntity<GeneralResult> response = restTemplateUtil.post(tokenComponent.getUrl().concat("/realMeasCenter/telemetry/commonQuery"), headers, jsonObject, GeneralResult.class);
if (response.getStatusCodeValue() == 200 && response.getBody().getStatus().equalsIgnoreCase("000000")) { if (response.getStatusCodeValue() == 200 && response.getBody().getStatus().equalsIgnoreCase("000000")) {
PageResult result = response.getBody().getResult(); GeneralResult generalResult = response.getBody();
PageResult result = generalResult.getResult();
List<CommonTelemetry> records = result.getRecords(); List<CommonTelemetry> records = result.getRecords();
if (Objects.isNull(result) || CollectionUtil.isEmpty(result.getRecords())) { if (Objects.isNull(result) || CollectionUtil.isEmpty(result.getRecords())) {
//日志输出: //日志输出:
log.error("用户编号为{},无遥测数据;", excelData.getGenerationUserID()); log.error("起始时间:{},截止时间{},无遥测数据;", startTime, endTime);
continue; continue;
} }
//将指标+客户编号组合起来匹配返回数据的第一条记录:userId@measType
List<String> userIdConcatMeasType = new ArrayList<>();
for (String measType : typeList) {
List<String> temp = generationUserIDList.stream().map(t -> t.concat(StrPool.AT).concat(measType)).collect(Collectors.toList());
userIdConcatMeasType.addAll(temp);
}
//处理各个record的数据因用户下可能有多个测量点按指标循环默认采用第一个匹配上的做数据处理 //处理各个record的数据因用户下可能有多个测量点按指标循环默认采用第一个匹配上的做数据处理
Map</*表名*/String, List<Map</*属性名*/String,/*数值*/String>>> typeData = new HashMap<>();
for (String type : typeList) {
MeasTypeEnum measTypeEnumByMeasType = MeasTypeEnum.getMeasTypeEnumByMeasType(type);
for (CommonTelemetry commonTelemetry : records) { for (CommonTelemetry commonTelemetry : records) {
if (type.equalsIgnoreCase(commonTelemetry.getMeasTypeCode())) { String dataIdentify = commonTelemetry.getConsNo().concat(StrPool.AT).concat(commonTelemetry.getMeasTypeCode());
if (userIdConcatMeasType.contains(dataIdentify)) {
//首个包含该标识的数据进行处理
MeasTypeEnum measTypeEnumByMeasType = MeasTypeEnum.getMeasTypeEnumByMeasType(commonTelemetry.getMeasTypeCode());
List<StatisticsData> statisticsDataList = commonTelemetry.getTelemetryValue(); List<StatisticsData> statisticsDataList = commonTelemetry.getTelemetryValue();
List<Map</*属性名*/String,/*数值*/String>> influxData = new ArrayList<>(); List<Map</*属性名*/String,/*数值*/String>> influxData = new ArrayList<>();
for (StatisticsData statisticsData : statisticsDataList) { for (StatisticsData statisticsData : statisticsDataList) { // 匹配上进入循环96次
Map<String, String> tempInfluxData = new HashMap<>(); Map<String, String> tempInfluxData = new HashMap<>();
tempInfluxData.put("phasic_type", measTypeEnumByMeasType.getPhaseType()); tempInfluxData.put("phasic_type", measTypeEnumByMeasType.getPhaseType());
tempInfluxData.put("line_id", "160".concat(excelData.getGenerationUserID())); tempInfluxData.put("line_id", commonTelemetry.getConsNo());
tempInfluxData.put("quality_flag", "0"); tempInfluxData.put("quality_flag", "0");
tempInfluxData.put("value_type", "AVG"); tempInfluxData.put("value_type", "AVG");
tempInfluxData.put("time", statisticsData.getDataTime()); tempInfluxData.put("time", statisticsData.getDataTime());
@@ -125,51 +130,56 @@ public class BusinessServiceImpl implements IBusinessService {
} }
//measType@tableName:存在多个指标存储表名一致,避免数据覆盖; //measType@tableName:存在多个指标存储表名一致,避免数据覆盖;
typeData.put(measTypeEnumByMeasType.getMeasType().concat(StrPool.AT).concat(measTypeEnumByMeasType.getTableName()), influxData); typeData.put(measTypeEnumByMeasType.getMeasType().concat(StrPool.AT).concat(measTypeEnumByMeasType.getTableName()), influxData);
//处理完,删除该条记录,减少集合尺寸,提高效率
userIdConcatMeasType.remove(dataIdentify);
break; break;
} }
} }
} }
//每片数据获取完毕后将数据处理入influxdb库
batchInsertData(typeData);
} }
log.error("请求接口,台区号为:{},结果为:{}", excelData.getStageID(), response);
}
} }
/**
public static void main(String[] args) { * 批量入库influxDB
String path = "C:\\Users\\83944\\Desktop\\test\\test.txt"; * @param typeData 远程根据用户编号获取的数据 Map</表名/String, List<Map</属性名/String,/数值/String>>> typeData = new HashMap<>();
FileReader fileReader = new FileReader(path); */
String jsonStr = fileReader.readString(); private void batchInsertData(Map<String, List<Map<String, String>>> typeData) {
GeneralResult result = JSONUtil.toBean(jsonStr, GeneralResult.class); List<String> sqlList = new ArrayList<>();
List<CommonTelemetry> records = result.getResult().getRecords(); Set<String> tableNames = typeData.keySet();
//处理各个record的数据因用户下可能有多个测量点按指标循环默认采用第一个匹配上的做数据处理 for (String tableName : tableNames) {
Map</*表名*/String, List<Map</*属性名*/String,/*数值*/String>>> typeData = new HashMap<>(); List<Map<String, String>> data = typeData.get(tableName);
List<String> typeList = Stream.of("PhV_phsA", "PhV_phsB", "PhV_phsC").collect(Collectors.toList()); tableName = tableName.substring(tableName.indexOf(StrPool.AT) + 1);
for (String type : typeList) { //需要转换的实体类class类
MeasTypeEnum measTypeEnumByMeasType = MeasTypeEnum.getMeasTypeEnumByMeasType(type); for (Map<String, String> datum : data) {
for (CommonTelemetry commonTelemetry : records) { //tag数据
if (type.equalsIgnoreCase(commonTelemetry.getMeasTypeCode())) { Map<String, String> tags = new HashMap<>();
List<StatisticsData> statisticsDataList = commonTelemetry.getTelemetryValue(); tags.put("phasic_type", datum.get("phasic_type"));
List<Map</*属性名*/String,/*数值*/String>> influxData = new ArrayList<>(); datum.remove("phasic_type");
for (StatisticsData statisticsData : statisticsDataList) { tags.put("line_id", datum.get("line_id"));
Map<String, String> tempInfluxData = new HashMap<>(); datum.remove("line_id");
tempInfluxData.put("phasic_type", measTypeEnumByMeasType.getPhaseType()); tags.put("quality_flag", datum.get("quality_flag"));
tempInfluxData.put("line_id", "1602514341899"); datum.remove("quality_flag");
tempInfluxData.put("quality_flag", "0"); tags.put("value_type", datum.get("value_type"));
tempInfluxData.put("value_type", "AVG"); datum.remove("value_type");
tempInfluxData.put("time", statisticsData.getDataTime()); String time = datum.get("time");
//为空则赋值为0表中其他均为0 datum.remove("time");
tempInfluxData.put(measTypeEnumByMeasType.getFieldName(), StrUtil.isBlank(statisticsData.getMeasValue()) ? "0" : statisticsData.getMeasValue()); //tag数据删完后剩余均是filed数据,因filed属性名不固定无法指定获取直接循环
influxData.add(tempInfluxData); Map<String, Object> fields = new HashMap<>();
Set<String> fieldNames = datum.keySet();
for (String fieldName : fieldNames) {
fields.put(fieldName, Double.parseDouble(datum.get(fieldName)));
} }
typeData.put(measTypeEnumByMeasType.getMeasType().concat("@").concat(measTypeEnumByMeasType.getTableName()), influxData); Point point = influxDbUtils.pointBuilder(tableName, DateUtil.parse(time, DatePattern.NORM_DATETIME_FORMATTER).getTime() + 8 * 3600 * 1000, TimeUnit.MILLISECONDS, tags, fields);
break; BatchPoints batchPoints = BatchPoints.database(influxDbUtils.getDbName()).retentionPolicy("").consistency(InfluxDB.ConsistencyLevel.ALL).build();
} batchPoints.point(point);
} sqlList.add(batchPoints.lineProtocol());
}
System.out.println(11);
} }
} }
influxDbUtils.batchInsert(influxDbUtils.getDbName(), "", InfluxDB.ConsistencyLevel.ALL, TimeUnit.MILLISECONDS, sqlList);
}
}