数据清洗算法

This commit is contained in:
xy
2025-02-26 11:07:34 +08:00
parent 92d540e497
commit f1ddff6bcb
99 changed files with 3937 additions and 224 deletions

View File

@@ -99,6 +99,11 @@
<artifactId>liteflow-rule-nacos</artifactId>
<version>2.11.2</version>
</dependency>
<dependency>
<groupId>com.njcn</groupId>
<artifactId>pq-device-api</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
<build>

View File

@@ -29,7 +29,8 @@ public class MeasurementExecutor extends BaseExecutor {
private IDataCleanService dataCleanService;
/**
* 数据清洗dataV
* 数据清洗 电压表
* dataV表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataVClean", nodeType = NodeTypeEnum.COMMON)
@@ -41,6 +42,120 @@ public class MeasurementExecutor extends BaseExecutor {
dataCleanService.dataVCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 电流表
* dataI表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataIClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataICleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataIClean", nodeType = NodeTypeEnum.COMMON)
public void dataICleanProcess(NodeComponent bindCmp) {
dataCleanService.dataICleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 长闪表
* dataPlt表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataPltClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataPltCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataPltClean", nodeType = NodeTypeEnum.COMMON)
public void dataPltCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataPltCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 间谐波电压表
* DataInHarmV表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataInHarmVClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataInHarmVCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataInHarmVClean", nodeType = NodeTypeEnum.COMMON)
public void dataInHarmVCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataInHarmVCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 谐波电压含有率
* DataHarmRateV表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataHarmRateVClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataHarmRateVCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataHarmRateVClean", nodeType = NodeTypeEnum.COMMON)
public void dataHarmRateVCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataHarmRateVCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 有功功率表
* DataHarmPowerP表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataHarmPowerPClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataHarmPowerPCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataHarmPowerPClean", nodeType = NodeTypeEnum.COMMON)
public void dataHarmPowerPCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataHarmPowerPCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 谐波电压角度表
* DataHarmPhasicV表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataHarmPhasicVClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataHarmPhasicVCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataHarmPhasicVClean", nodeType = NodeTypeEnum.COMMON)
public void dataHarmPhasicVCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataHarmPhasicVCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗 电压波动表
* DataFluc表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataFlucClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataFlucCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataFlucClean", nodeType = NodeTypeEnum.COMMON)
public void dataFlucCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataFlucCleanHandler(bindCmp.getRequestData());
}
/**
* 数据清洗
* DataFlicker表
* @author xy
*/
@LiteflowMethod(value = LiteFlowMethodEnum.IS_ACCESS, nodeId = "dataFlickerClean", nodeType = NodeTypeEnum.COMMON)
public boolean dataFlickerCleanAccess(NodeComponent bindCmp) {
return isAccess(bindCmp);
}
@LiteflowMethod(value = LiteFlowMethodEnum.PROCESS, nodeId = "dataFlickerClean", nodeType = NodeTypeEnum.COMMON)
public void dataFlickerCleanProcess(NodeComponent bindCmp) {
dataCleanService.dataFlickerCleanHandler(bindCmp.getRequestData());
}
/**
* 监测点报表_日表(r_stat_data_*_d)
* @author xy

View File

@@ -9,9 +9,73 @@ public interface IDataCleanService {
/***
* dataV数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataVCleanHandler(CalculatedParam calculatedParam);
/***
* dataI数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataICleanHandler(CalculatedParam calculatedParam);
/***
* dataPlt数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataPltCleanHandler(CalculatedParam calculatedParam);
/***
* dataInHarmV数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataInHarmVCleanHandler(CalculatedParam calculatedParam);
/***
* dataHarmRateV数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataHarmRateVCleanHandler(CalculatedParam calculatedParam);
/***
* dataHarmPowerP数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataHarmPowerPCleanHandler(CalculatedParam calculatedParam);
/***
* dataHarmPhasicV数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataHarmPhasicVCleanHandler(CalculatedParam calculatedParam);
/***
* dataFluc数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataFlucCleanHandler(CalculatedParam calculatedParam);
/***
* dataFlicker数据清洗
* 不标记原始数据,将异常数据查询出来重新存储至详情表中
* @author xy
* @param calculatedParam 查询条件
*/
void dataFlickerCleanHandler(CalculatedParam calculatedParam);
}

View File

@@ -3,10 +3,16 @@ package com.njcn.algorithm.serviceimpl.line;
import cn.hutool.core.collection.CollUtil;
import com.njcn.algorithm.pojo.bo.CalculatedParam;
import com.njcn.algorithm.service.line.IDataCleanService;
import com.njcn.dataProcess.api.DataVFeignClient;
import com.njcn.dataProcess.api.*;
import com.njcn.dataProcess.enums.DataCleanEnum;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import com.njcn.dataProcess.pojo.dto.*;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.util.DataCommonUtils;
import com.njcn.dataProcess.util.TimeUtils;
import com.njcn.device.pq.api.LineFeignClient;
import com.njcn.device.pq.pojo.vo.LineDetailVO;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.ListUtils;
@@ -15,8 +21,11 @@ import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.util.ArrayList;
import java.util.List;
import java.lang.reflect.Method;
import java.time.LocalDateTime;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
* @author xy
@@ -31,26 +40,833 @@ public class DataCleanServiceImpl implements IDataCleanService {
@Resource
private DataVFeignClient dataVFeignClient;
@Resource
private DataIFeignClient dataIFeignClient;
@Resource
private DataPltFeignClient dataPltFeignClient;
@Resource
private DataInharmVFeignClient dataInharmVFeignClient;
@Resource
private DataHarmRateVFeignClient dataHarmRateVFeignClient;
@Resource
private DataHarmpowerPFeignClient dataHarmpowerPFeignClient;
@Resource
private DataHarmphasicVFeignClient dataHarmphasicVFeignClient;
@Resource
private DataFlucFeignClient dataFlucFeignClient;
@Resource
private DataFlickerFeignClient dataFlickerFeignClient;
@Resource
private PqDataVerifyFeignClient pqDataVerifyFeignClient;
@Resource
private PqReasonableRangeFeignClient pqReasonableRangeFeignClient;
@Resource
private LineFeignClient lineFeignClient;
@Override
public void dataVCleanHandler(CalculatedParam calculatedParam) {
List<DataVDto> result = new ArrayList<>();
logger.info("{},dataV表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataV.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
List<LineDetailVO.Detail> lineDetail = lineFeignClient.getLineDetailByIds(lineList).getData();
if (CollUtil.isEmpty(lineDetail)) {
logger.error("监测点集合为空,无法计算!");
return;
}
Map<String,LineDetailVO.Detail> lineMap = lineDetail.stream().collect(Collectors.toMap(LineDetailVO.Detail::getLineId, Function.identity()));
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(calculatedParam.getIdList(),NUM);
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataVDto> partList = dataVFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
item.setAbnormalFlag(0);
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataV(map, lineMap.get(item.getLineId()),item);
result.addAll(pqDataVerifies);
});
result.addAll(partList);
}
});
if (CollUtil.isNotEmpty(result)) {
dataVFeignClient.addInfluxDbList(result);
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataICleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataI表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataI.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
List<LineDetailVO.Detail> lineDetail = lineFeignClient.getLineDetailByIds(lineList).getData();
if (CollUtil.isEmpty(lineDetail)) {
logger.error("监测点集合为空,无法计算!");
return;
}
Map<String,LineDetailVO.Detail> lineMap = lineDetail.stream().collect(Collectors.toMap(LineDetailVO.Detail::getLineId, Function.identity()));
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataIDto> partList = dataIFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataI(map, lineMap.get(item.getLineId()),item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataPltCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataPlt表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataPlt.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataPltDto> partList = dataPltFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataPlt(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataInHarmVCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataInHarmV表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataInHarmV.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataHarmDto> partList = dataInharmVFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataInHarmV(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataHarmRateVCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataHarmRateV表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataHarmRateV.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataHarmDto> partList = dataHarmRateVFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataHarmRateV(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataHarmPowerPCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataHarmPowerP表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataHarmPowerP.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataPowerPDto> partList = dataHarmpowerPFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataHarmPowerP(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataHarmPhasicVCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataHarmPhasicV表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataHarmPhasicV.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataHarmDto> partList = dataHarmphasicVFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataHarmPhasicV(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataFlucCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataFluc表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataFluc.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataFlucDto> partList = dataFlucFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataFluc(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
@Override
public void dataFlickerCleanHandler(CalculatedParam calculatedParam) {
logger.info("{},dataFlicker表异常数据算法执行=====》", LocalDateTime.now());
List<PqDataVerify> result = new ArrayList<>();
//获取标准
Map<String, PqReasonableRangeDto> map = getStandardData(DataCleanEnum.DataFlicker.getCode());
//获取监测点台账信息
List<String> lineList = calculatedParam.getIdList();
//数据分片处理
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
lineParam.setStartTime(TimeUtils.getBeginOfDay(calculatedParam.getDataDate()));
lineParam.setEndTime(TimeUtils.getEndOfDay(calculatedParam.getDataDate()));
List<List<String>> pendingIds = ListUtils.partition(lineList,NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
List<DataFlickerDto> partList = dataFlickerFeignClient.getRawData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
//数据清洗
List<PqDataVerify> pqDataVerifies = judgeDataFlicker(map,item);
result.addAll(pqDataVerifies);
});
}
});
if (CollUtil.isNotEmpty(result)) {
pqDataVerifyFeignClient.insertData(result);
}
}
/**
* 获取标准数据的范围
*/
public Map<String, PqReasonableRangeDto> getStandardData(String tableName) {
Map<String, PqReasonableRangeDto> pqReasonableRangeDtoMap = new HashMap<>();
DataCleanParam param = new DataCleanParam();
param.setSystemType(DataCleanEnum.Pqs.getCode());
param.setDataSource(DataCleanEnum.InfluxDB.getCode());
param.setTableName(tableName);
List<PqReasonableRangeDto> list = pqReasonableRangeFeignClient.getData(param).getData();
if (CollUtil.isNotEmpty(list)) {
pqReasonableRangeDtoMap = list.stream().collect(Collectors.toMap(PqReasonableRangeDto::getIndexCode, Function.identity()));
}
return pqReasonableRangeDtoMap;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataV(Map<String, PqReasonableRangeDto> map, LineDetailVO.Detail line, DataVDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto;
List<String> phaseList;
//频率判断
pqReasonableRangeDto = map.get(DataCleanEnum.Freq.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getFreq() < pqReasonableRangeDto.getMinValue() || dto.getFreq() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-频率数据异常,已清洗!数据值:{},数据时间:{}", dto.getFreq(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getFreq()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//频率偏差判断
pqReasonableRangeDto = map.get(DataCleanEnum.FreqDev.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getFreqDev() < pqReasonableRangeDto.getMinValue() || dto.getFreqDev() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-频率偏差数据异常,已清洗!数据值:{},数据时间:{}", dto.getFreqDev(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getFreqDev()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//相电压有效值
pqReasonableRangeDto = map.get(DataCleanEnum.RmsV.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getRms() < (pqReasonableRangeDto.getMinValue()* DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getRms() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-相电压有效值数据异常,已清洗!数据值:{},数据时间:{}", dto.getRms(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getRms()
,pqReasonableRangeDto.getMinValue()* DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
//正序电压
pqReasonableRangeDto = map.get(DataCleanEnum.VPos.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVPos() < (pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getVPos() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-正序电压数据异常,已清洗!数据值:{},数据时间:{}", dto.getVPos(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVPos()
,pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
//负序电压
pqReasonableRangeDto = map.get(DataCleanEnum.VNeg.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVNeg() < (pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getVNeg() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-负序电压数据异常,已清洗!数据值:{},数据时间:{}", dto.getVNeg(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVNeg()
,pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
//零序电压
pqReasonableRangeDto = map.get(DataCleanEnum.VZero.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVZero() < (pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getVZero() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-零序电压数据异常,已清洗!数据值:{},数据时间:{}", dto.getVZero(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVZero()
,pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
//电压不平衡度
pqReasonableRangeDto = map.get(DataCleanEnum.VUnbalance.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVUnbalance() < pqReasonableRangeDto.getMinValue() || dto.getVUnbalance() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-电压不平衡度数据异常,已清洗!数据值:{},数据时间:{}", dto.getVUnbalance(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVUnbalance()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//线电压有效值
pqReasonableRangeDto = map.get(DataCleanEnum.RmsLvr.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getRmsLvr() < (pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getRmsLvr() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-线电压有效值数据异常,已清洗!数据值:{},数据时间:{}", dto.getRmsLvr(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getRmsLvr()
,pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
//线电压正偏差
pqReasonableRangeDto = map.get(DataCleanEnum.VuDev.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVuDev() < pqReasonableRangeDto.getMinValue() || dto.getVuDev() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-线电压正偏差数据异常,已清洗!数据值:{},数据时间:{}", dto.getVuDev(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVuDev()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//线电压负偏差
pqReasonableRangeDto = map.get(DataCleanEnum.VlDev.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVlDev() < pqReasonableRangeDto.getMinValue() || dto.getVlDev() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-线电压负偏差数据异常,已清洗!数据值:{},数据时间:{}", dto.getVlDev(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVlDev()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//电压总谐波畸变率
pqReasonableRangeDto = map.get(DataCleanEnum.VThd.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getVThd() < pqReasonableRangeDto.getMinValue() || dto.getVThd() > pqReasonableRangeDto.getMaxValue()) {
//log.info("dataV-电压总谐波畸变率数据异常,已清洗!数据值:{},数据时间:{}", dto.getVThd(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getVThd()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
//相(线)电压基波有效值
pqReasonableRangeDto = map.get(DataCleanEnum.V_Data.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getV1() < (pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))
|| dto.getV1() > (pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()))) {
//log.info("dataV-基波电压数据异常,已清洗!数据值:{},数据时间:{}", dto.getV1(), dto.getMinTime());
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getV1()
,pqReasonableRangeDto.getMinValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel())
,pqReasonableRangeDto.getMaxValue()*DataCommonUtils.getVoltageData(line.getVoltageLevel()));
list.add(pqDataVerify);
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataI(Map<String, PqReasonableRangeDto> map, LineDetailVO.Detail line, DataIDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto;
List<String> phaseList;
//线电压有效值
pqReasonableRangeDto = map.get(DataCleanEnum.RmsI.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getRms() > line.getCT1()) {
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getRms()
,pqReasonableRangeDto.getMinValue()
,line.getCT1());
list.add(pqDataVerify);
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataPlt(Map<String, PqReasonableRangeDto> map, DataPltDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto;
List<String> phaseList;
//线电压有效值
pqReasonableRangeDto = map.get(DataCleanEnum.Plt.getCode());
phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getPlt() < pqReasonableRangeDto.getMinValue() || dto.getPlt() > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getPlt()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataInHarmV(Map<String, PqReasonableRangeDto> map, DataHarmDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.V_InHarm.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
// 50次谐波
for (int i = 1; i <= 50; i++) {
// 通过反射获取dto.getV(i)的值
double vValue = getVValue(dto, i);
if (vValue < pqReasonableRangeDto.getMinValue() || vValue > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(
dto.getLineId(),
dto.getMinTime(),
dto.getValueType(),
dto.getPhasicType(),
pqReasonableRangeDto.getIndexCode(),
(i - 0.5) + pqReasonableRangeDto.getIndexName(),
pqReasonableRangeDto.getInfluxdbTableName(),
vValue,
pqReasonableRangeDto.getMinValue(),
pqReasonableRangeDto.getMaxValue()
);
list.add(pqDataVerify);
}
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataHarmRateV(Map<String, PqReasonableRangeDto> map, DataHarmDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.V_Rate.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
// 50次谐波
for (int i = 2; i <= 50; i++) {
// 通过反射获取dto.getV(i)的值
double vValue = getVValue(dto, i);
if (vValue < pqReasonableRangeDto.getMinValue() || vValue > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(
dto.getLineId(),
dto.getMinTime(),
dto.getValueType(),
dto.getPhasicType(),
pqReasonableRangeDto.getIndexCode(),
i + pqReasonableRangeDto.getIndexName(),
pqReasonableRangeDto.getInfluxdbTableName(),
vValue,
pqReasonableRangeDto.getMinValue(),
pqReasonableRangeDto.getMaxValue()
);
list.add(pqDataVerify);
}
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataHarmPowerP(Map<String, PqReasonableRangeDto> map, DataPowerPDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.Pf.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getPf() < pqReasonableRangeDto.getMinValue() || dto.getPf() > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getPf()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataHarmPhasicV(Map<String, PqReasonableRangeDto> map, DataHarmDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.V.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
// 50次谐波
for (int i = 1; i <= 50; i++) {
// 通过反射获取dto.getV(i)的值
double vValue = getVValue(dto, i);
if (vValue < pqReasonableRangeDto.getMinValue() || vValue > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(
dto.getLineId(),
dto.getMinTime(),
dto.getValueType(),
dto.getPhasicType(),
pqReasonableRangeDto.getIndexCode(),
i + pqReasonableRangeDto.getIndexName(),
pqReasonableRangeDto.getInfluxdbTableName(),
vValue,
pqReasonableRangeDto.getMinValue(),
pqReasonableRangeDto.getMaxValue()
);
list.add(pqDataVerify);
}
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataFluc(Map<String, PqReasonableRangeDto> map, DataFlucDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.Fluc.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getFluc() < pqReasonableRangeDto.getMinValue() || dto.getFluc() > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getFluc()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
return list;
}
/**
* 获取数据的标准范围,进行数据比对
*/
public List<PqDataVerify> judgeDataFlicker(Map<String, PqReasonableRangeDto> map, DataFlickerDto dto) {
List<PqDataVerify> list = new ArrayList<>();
PqReasonableRangeDto pqReasonableRangeDto = map.get(DataCleanEnum.Pst.getCode());
List<String> phaseList = Arrays.asList(pqReasonableRangeDto.getPhaseType().split(","));
if (phaseList.contains(dto.getPhasicType())) {
if (dto.getPst() < pqReasonableRangeDto.getMinValue() || dto.getPst() > pqReasonableRangeDto.getMaxValue()) {
PqDataVerify pqDataVerify = getPqDataVerify(dto.getLineId()
,dto.getMinTime()
,dto.getValueType()
,dto.getPhasicType()
,pqReasonableRangeDto.getIndexCode()
,pqReasonableRangeDto.getIndexName()
,pqReasonableRangeDto.getInfluxdbTableName()
,dto.getPst()
,pqReasonableRangeDto.getMinValue()
,pqReasonableRangeDto.getMaxValue());
list.add(pqDataVerify);
}
}
return list;
}
public PqDataVerify getPqDataVerify(String lineId,String time,String valueType, String phasicType, String indexCode, String indexName, String indexTable, Double data, Double minValue, Double maxValue) {
PqDataVerify pqDataVerify = new PqDataVerify();
pqDataVerify.setLineId(lineId);
pqDataVerify.setTime(TimeUtils.StringToLocalDateTime(time));
pqDataVerify.setValueType(valueType);
pqDataVerify.setPhasicType(phasicType);
pqDataVerify.setIndexCode(indexCode);
pqDataVerify.setIndexName(indexName);
pqDataVerify.setIndexTable(indexTable);
//这里对数据进行判断,如果值过大,都超出数据库存储范围会报错,无法插入,所以这里设置门槛 上下限都为1千万
if (data > 10000000.0) {
pqDataVerify.setAbnormalValue(9999999.0);
} else if (data < -10000000.0) {
pqDataVerify.setAbnormalValue(-9999999.0);
} else {
pqDataVerify.setAbnormalValue(data);
}
pqDataVerify.setMinValue(minValue);
pqDataVerify.setMaxValue(maxValue);
return pqDataVerify;
}
private double getVValue(DataHarmDto dto, int index) {
try {
Method method = DataHarmDto.class.getMethod("getV" + index);
return (double) method.invoke(dto);
} catch (Exception e) {
throw new RuntimeException("Failed to get V value for index: " + index, e);
}
}
}

View File

@@ -4,9 +4,12 @@ import cn.hutool.core.collection.CollUtil;
import com.njcn.algorithm.pojo.bo.CalculatedParam;
import com.njcn.algorithm.service.line.IDayDataService;
import com.njcn.dataProcess.api.DataVFeignClient;
import com.njcn.dataProcess.api.PqDataVerifyFeignClient;
import com.njcn.dataProcess.enums.DataCleanEnum;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.util.TimeUtils;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import lombok.RequiredArgsConstructor;
@@ -19,6 +22,7 @@ import org.springframework.stereotype.Component;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author xy
@@ -32,10 +36,12 @@ public class DayDataServiceImpl implements IDayDataService {
private final static Integer NUM = 100;
@Resource
private DataVFeignClient dataVFeignClient;
@Resource
private PqDataVerifyFeignClient pqDataVerifyFeignClient;
@Override
public void dataVHandler(CalculatedParam calculatedParam) {
logger.info("{},dataV表转r_stat_data_v_d开始=====》", LocalDateTime.now());
logger.info("{},dataV表转r_stat_data_v_d算法开始=====》", LocalDateTime.now());
List<DataVDto> result = new ArrayList<>();
//远程接口获取分钟数据
LineCountEvaluateParam lineParam = new LineCountEvaluateParam();
@@ -45,6 +51,9 @@ public class DayDataServiceImpl implements IDayDataService {
List<List<String>> pendingIds = ListUtils.partition(calculatedParam.getIdList(),NUM);
pendingIds.forEach(list->{
lineParam.setLineId(list);
//添加异常数据时间点
getAbnormalData(lineParam);
//获取原始数据
List<CommonMinuteDto> partList = dataVFeignClient.getBaseData(lineParam).getData();
if (CollUtil.isNotEmpty(partList)) {
partList.forEach(item->{
@@ -61,7 +70,7 @@ public class DayDataServiceImpl implements IDayDataService {
dto.setLineId(item.getLineId());
dto.setPhasicType(item2.getPhasicType());
dto.setValueType(item3.getValueType());
dto.setQualityFlag("0");
dto.setQualityFlag(item.getQualityFlag());
channelDataVHandler(item3,valueTypes,dto,true);
result.add(dto);
});
@@ -75,87 +84,103 @@ public class DayDataServiceImpl implements IDayDataService {
}
}
//获取异常数据集合
public void getAbnormalData(LineCountEvaluateParam lineParam) {
lineParam.setTableName(DataCleanEnum.DataV.getCode());
// 获取异常数据,用于排除异常数据
List<PqDataVerify> pqDataVerifies = pqDataVerifyFeignClient.queryData(lineParam).getData();
if (CollUtil.isNotEmpty(pqDataVerifies)) {
Map<String, List<String>> timeMap = pqDataVerifies.stream()
.collect(Collectors.groupingBy(
PqDataVerify::getLineId,
Collectors.mapping(item->TimeUtils.LocalDateTimeToString(item.getTime()), Collectors.toList())
));
lineParam.setAbnormalTime(timeMap);
}
}
//指标处理
//pojo1 为正常数据集合
//pojo2 为平均值数据集合根据不同情况用来计算cp95
public void channelDataVHandler(CommonMinuteDto.ValueType pojo1, CommonMinuteDto.ValueType pojo2, DataVDto dto, boolean scheme) {
CommonMinuteDto.ValueType valueType;
String type;
if (dto.getValueType().equalsIgnoreCase(InfluxDbSqlConstant.CP95) && !scheme) {
valueType = pojo2;
type = "CP95";
} else {
valueType = pojo1;
type = pojo1.getValueType();
}
//按照指标集合排列顺序取值
dto.setFreq(getData(valueType.getValueType(),valueType.getValueList().get(0),scheme));
dto.setFreqDev(getData(valueType.getValueType(),valueType.getValueList().get(1),scheme));
dto.setRms(getData(valueType.getValueType(),valueType.getValueList().get(2),scheme));
dto.setRmsLvr(getData(valueType.getValueType(),valueType.getValueList().get(3),scheme));
dto.setVNeg(getData(valueType.getValueType(),valueType.getValueList().get(4),scheme));
dto.setVPos(getData(valueType.getValueType(),valueType.getValueList().get(5),scheme));
dto.setVThd(getData(valueType.getValueType(),valueType.getValueList().get(6),scheme));
dto.setVUnbalance(getData(valueType.getValueType(),valueType.getValueList().get(7),scheme));
dto.setVZero(getData(valueType.getValueType(),valueType.getValueList().get(8),scheme));
dto.setVlDev(getData(valueType.getValueType(),valueType.getValueList().get(9),scheme));
dto.setVuDev(getData(valueType.getValueType(),valueType.getValueList().get(10),scheme));
dto.setFreq(getData(type,valueType.getValueList().get(0),scheme));
dto.setFreqDev(getData(type,valueType.getValueList().get(1),scheme));
dto.setRms(getData(type,valueType.getValueList().get(2),scheme));
dto.setRmsLvr(getData(type,valueType.getValueList().get(3),scheme));
dto.setVNeg(getData(type,valueType.getValueList().get(4),scheme));
dto.setVPos(getData(type,valueType.getValueList().get(5),scheme));
dto.setVThd(getData(type,valueType.getValueList().get(6),scheme));
dto.setVUnbalance(getData(type,valueType.getValueList().get(7),scheme));
dto.setVZero(getData(type,valueType.getValueList().get(8),scheme));
dto.setVlDev(getData(type,valueType.getValueList().get(9),scheme));
dto.setVuDev(getData(type,valueType.getValueList().get(10),scheme));
dto.setV1(getData(valueType.getValueType(),valueType.getValueList().get(11),scheme));
dto.setV2(getData(valueType.getValueType(),valueType.getValueList().get(12),scheme));
dto.setV3(getData(valueType.getValueType(),valueType.getValueList().get(13),scheme));
dto.setV4(getData(valueType.getValueType(),valueType.getValueList().get(14),scheme));
dto.setV5(getData(valueType.getValueType(),valueType.getValueList().get(15),scheme));
dto.setV6(getData(valueType.getValueType(),valueType.getValueList().get(16),scheme));
dto.setV7(getData(valueType.getValueType(),valueType.getValueList().get(17),scheme));
dto.setV8(getData(valueType.getValueType(),valueType.getValueList().get(18),scheme));
dto.setV9(getData(valueType.getValueType(),valueType.getValueList().get(19),scheme));
dto.setV10(getData(valueType.getValueType(),valueType.getValueList().get(20),scheme));
dto.setV1(getData(type,valueType.getValueList().get(11),scheme));
dto.setV2(getData(type,valueType.getValueList().get(12),scheme));
dto.setV3(getData(type,valueType.getValueList().get(13),scheme));
dto.setV4(getData(type,valueType.getValueList().get(14),scheme));
dto.setV5(getData(type,valueType.getValueList().get(15),scheme));
dto.setV6(getData(type,valueType.getValueList().get(16),scheme));
dto.setV7(getData(type,valueType.getValueList().get(17),scheme));
dto.setV8(getData(type,valueType.getValueList().get(18),scheme));
dto.setV9(getData(type,valueType.getValueList().get(19),scheme));
dto.setV10(getData(type,valueType.getValueList().get(20),scheme));
dto.setV11(getData(valueType.getValueType(),valueType.getValueList().get(21),scheme));
dto.setV12(getData(valueType.getValueType(),valueType.getValueList().get(22),scheme));
dto.setV13(getData(valueType.getValueType(),valueType.getValueList().get(23),scheme));
dto.setV14(getData(valueType.getValueType(),valueType.getValueList().get(24),scheme));
dto.setV15(getData(valueType.getValueType(),valueType.getValueList().get(25),scheme));
dto.setV16(getData(valueType.getValueType(),valueType.getValueList().get(26),scheme));
dto.setV17(getData(valueType.getValueType(),valueType.getValueList().get(27),scheme));
dto.setV18(getData(valueType.getValueType(),valueType.getValueList().get(28),scheme));
dto.setV19(getData(valueType.getValueType(),valueType.getValueList().get(29),scheme));
dto.setV20(getData(valueType.getValueType(),valueType.getValueList().get(30),scheme));
dto.setV11(getData(type,valueType.getValueList().get(21),scheme));
dto.setV12(getData(type,valueType.getValueList().get(22),scheme));
dto.setV13(getData(type,valueType.getValueList().get(23),scheme));
dto.setV14(getData(type,valueType.getValueList().get(24),scheme));
dto.setV15(getData(type,valueType.getValueList().get(25),scheme));
dto.setV16(getData(type,valueType.getValueList().get(26),scheme));
dto.setV17(getData(type,valueType.getValueList().get(27),scheme));
dto.setV18(getData(type,valueType.getValueList().get(28),scheme));
dto.setV19(getData(type,valueType.getValueList().get(29),scheme));
dto.setV20(getData(type,valueType.getValueList().get(30),scheme));
dto.setV21(getData(valueType.getValueType(),valueType.getValueList().get(31),scheme));
dto.setV22(getData(valueType.getValueType(),valueType.getValueList().get(32),scheme));
dto.setV23(getData(valueType.getValueType(),valueType.getValueList().get(33),scheme));
dto.setV24(getData(valueType.getValueType(),valueType.getValueList().get(34),scheme));
dto.setV25(getData(valueType.getValueType(),valueType.getValueList().get(35),scheme));
dto.setV26(getData(valueType.getValueType(),valueType.getValueList().get(36),scheme));
dto.setV27(getData(valueType.getValueType(),valueType.getValueList().get(37),scheme));
dto.setV28(getData(valueType.getValueType(),valueType.getValueList().get(38),scheme));
dto.setV29(getData(valueType.getValueType(),valueType.getValueList().get(39),scheme));
dto.setV30(getData(valueType.getValueType(),valueType.getValueList().get(40),scheme));
dto.setV21(getData(type,valueType.getValueList().get(31),scheme));
dto.setV22(getData(type,valueType.getValueList().get(32),scheme));
dto.setV23(getData(type,valueType.getValueList().get(33),scheme));
dto.setV24(getData(type,valueType.getValueList().get(34),scheme));
dto.setV25(getData(type,valueType.getValueList().get(35),scheme));
dto.setV26(getData(type,valueType.getValueList().get(36),scheme));
dto.setV27(getData(type,valueType.getValueList().get(37),scheme));
dto.setV28(getData(type,valueType.getValueList().get(38),scheme));
dto.setV29(getData(type,valueType.getValueList().get(39),scheme));
dto.setV30(getData(type,valueType.getValueList().get(40),scheme));
dto.setV31(getData(valueType.getValueType(),valueType.getValueList().get(41),scheme));
dto.setV32(getData(valueType.getValueType(),valueType.getValueList().get(42),scheme));
dto.setV33(getData(valueType.getValueType(),valueType.getValueList().get(43),scheme));
dto.setV34(getData(valueType.getValueType(),valueType.getValueList().get(44),scheme));
dto.setV35(getData(valueType.getValueType(),valueType.getValueList().get(45),scheme));
dto.setV36(getData(valueType.getValueType(),valueType.getValueList().get(46),scheme));
dto.setV37(getData(valueType.getValueType(),valueType.getValueList().get(47),scheme));
dto.setV38(getData(valueType.getValueType(),valueType.getValueList().get(48),scheme));
dto.setV39(getData(valueType.getValueType(),valueType.getValueList().get(49),scheme));
dto.setV40(getData(valueType.getValueType(),valueType.getValueList().get(50),scheme));
dto.setV31(getData(type,valueType.getValueList().get(41),scheme));
dto.setV32(getData(type,valueType.getValueList().get(42),scheme));
dto.setV33(getData(type,valueType.getValueList().get(43),scheme));
dto.setV34(getData(type,valueType.getValueList().get(44),scheme));
dto.setV35(getData(type,valueType.getValueList().get(45),scheme));
dto.setV36(getData(type,valueType.getValueList().get(46),scheme));
dto.setV37(getData(type,valueType.getValueList().get(47),scheme));
dto.setV38(getData(type,valueType.getValueList().get(48),scheme));
dto.setV39(getData(type,valueType.getValueList().get(49),scheme));
dto.setV40(getData(type,valueType.getValueList().get(50),scheme));
dto.setV41(getData(valueType.getValueType(),valueType.getValueList().get(51),scheme));
dto.setV42(getData(valueType.getValueType(),valueType.getValueList().get(52),scheme));
dto.setV43(getData(valueType.getValueType(),valueType.getValueList().get(53),scheme));
dto.setV44(getData(valueType.getValueType(),valueType.getValueList().get(54),scheme));
dto.setV45(getData(valueType.getValueType(),valueType.getValueList().get(55),scheme));
dto.setV46(getData(valueType.getValueType(),valueType.getValueList().get(56),scheme));
dto.setV47(getData(valueType.getValueType(),valueType.getValueList().get(57),scheme));
dto.setV48(getData(valueType.getValueType(),valueType.getValueList().get(58),scheme));
dto.setV49(getData(valueType.getValueType(),valueType.getValueList().get(59),scheme));
dto.setV50(getData(valueType.getValueType(),valueType.getValueList().get(60),scheme));
dto.setV41(getData(type,valueType.getValueList().get(51),scheme));
dto.setV42(getData(type,valueType.getValueList().get(52),scheme));
dto.setV43(getData(type,valueType.getValueList().get(53),scheme));
dto.setV44(getData(type,valueType.getValueList().get(54),scheme));
dto.setV45(getData(type,valueType.getValueList().get(55),scheme));
dto.setV46(getData(type,valueType.getValueList().get(56),scheme));
dto.setV47(getData(type,valueType.getValueList().get(57),scheme));
dto.setV48(getData(type,valueType.getValueList().get(58),scheme));
dto.setV49(getData(type,valueType.getValueList().get(59),scheme));
dto.setV50(getData(type,valueType.getValueList().get(60),scheme));
}
//数据类型处理
//cp95值的计算有点区别会用到cp95的集合或者平均值的集合
public Double getData(String valueType, List<Double> list, boolean scheme) {

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataFlickerFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -24,4 +25,6 @@ public interface DataFlickerFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataFlickerDTO> dataFlickerDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataFlickerDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataFlucFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -24,4 +25,6 @@ public interface DataFlucFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataFlucDTO> dataFlucDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataFlucDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -0,0 +1,23 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataHarmRateVFeignClientFallbackFactory;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
/**
* @author xy
*/
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataHarmRateV", fallbackFactory = DataHarmRateVFeignClientFallbackFactory.class, contextId = "dataHarmRateV")
public interface DataHarmRateVFeignClient {
@PostMapping("/getRawData")
HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataIFeignClientFallbackFactory;
import com.njcn.dataProcess.api.fallback.DataHarmphasicVFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -16,7 +17,7 @@ import java.util.List;
* @version 1.0.0
* @date 2022年01月05日 15:11
*/
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataHarmphasicV", fallbackFactory = DataIFeignClientFallbackFactory.class, contextId = "dataHarmphasicV")
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataHarmphasicV", fallbackFactory = DataHarmphasicVFeignClientFallbackFactory.class, contextId = "dataHarmphasicV")
public interface DataHarmphasicVFeignClient {
@@ -24,4 +25,7 @@ public interface DataHarmphasicVFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataHarmphasicVDTO> dataHarmphasicVDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataHarmpowerPFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -19,9 +20,9 @@ import java.util.List;
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataHarmpowerP", fallbackFactory = DataHarmpowerPFeignClientFallbackFactory.class, contextId = "dataHarmpowerP")
public interface DataHarmpowerPFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataHarmpowerPDTO> dataHarmpowerPDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataPowerPDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataIFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -19,9 +20,10 @@ import java.util.List;
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataI", fallbackFactory = DataIFeignClientFallbackFactory.class, contextId = "dataI")
public interface DataIFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataIDTO> dataIDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -5,6 +5,8 @@ import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataInharmVFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -24,4 +26,7 @@ public interface DataInharmVFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataInharmVDTO> dataInharmVDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -2,9 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataPltFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -24,4 +25,7 @@ public interface DataPltFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataPltDTO> dataPltDTOList);
@PostMapping("/getRawData")
HttpResult<List<DataPltDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -0,0 +1,34 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.PqDataVerifyFeignClientFallbackFactory;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
/**
* @author xy
* @version 1.0.0
* @date 2025年02月13日 20:11
*/
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/pqDataVerify", fallbackFactory = PqDataVerifyFeignClientFallbackFactory.class, contextId = "pqDataVerify")
public interface PqDataVerifyFeignClient {
@PostMapping("/insertData")
@ApiOperation("存储清洗的异常数据")
HttpResult<List<PqDataVerify>> insertData(@RequestBody List<PqDataVerify> list);
@PostMapping("/queryData")
@ApiOperation("查询清洗的异常数据")
HttpResult<List<PqDataVerify>> queryData(@RequestBody LineCountEvaluateParam param);
}

View File

@@ -0,0 +1,27 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.PqReasonableRangeFeignClientFallbackFactory;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
/**
* @author xy
* @version 1.0.0
* @date 2025年02月13日 20:11
*/
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/pqReasonableRange", fallbackFactory = PqReasonableRangeFeignClientFallbackFactory.class, contextId = "pqReasonableRange")
public interface PqReasonableRangeFeignClient {
@PostMapping("/getData")
@ApiOperation("按条件获取数据合理范围")
HttpResult<List<PqReasonableRangeDto>> getData(@RequestBody DataCleanParam param);
}

View File

@@ -6,6 +6,8 @@ import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataFlickerFeignClient;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
@@ -44,6 +46,12 @@ public class DataFlickerFeignClientFallbackFactory implements FallbackFactory<Da
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataFlickerDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -3,9 +3,10 @@ package com.njcn.dataProcess.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataFlucFeignClient;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
@@ -40,10 +41,17 @@ public class DataFlucFeignClientFallbackFactory implements FallbackFactory<DataF
@Override
public HttpResult<String> batchInsertion(List<DataFlucDTO> dataFlucDTOList) {
log.error("{}异常,降级处理,异常为:{}","批量插入数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataFlucDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataHarmRateVFeignClient;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author xy
*/
@Slf4j
@Component
public class DataHarmRateVFeignClientFallbackFactory implements FallbackFactory<DataHarmRateVFeignClient> {
/**
* 输出远程请求接口异常日志
* @param cause RPC请求异常
*/
@Override
public DataHarmRateVFeignClient create(Throwable cause) {
//判断抛出异常是否为解码器抛出的业务异常
Enum<?> exceptionEnum = CommonResponseEnum.SERVICE_FALLBACK;
if(cause.getCause() instanceof BusinessException){
BusinessException businessException = (BusinessException) cause.getCause();
exceptionEnum = DataProcessingEnumUtil.getExceptionEnum(businessException.getResult());
}
Enum<?> finalExceptionEnum = exceptionEnum;
return new DataHarmRateVFeignClient() {
@Override
public HttpResult<List<DataHarmDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -0,0 +1,56 @@
package com.njcn.dataProcess.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataHarmphasicVFeignClient;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author denghuajun
* @version 1.0.0
* @date 2022年01月05日 15:08
*/
@Slf4j
@Component
public class DataHarmphasicVFeignClientFallbackFactory implements FallbackFactory<DataHarmphasicVFeignClient> {
/**
* 输出远程请求接口异常日志
* @param cause RPC请求异常
*/
@Override
public DataHarmphasicVFeignClient create(Throwable cause) {
//判断抛出异常是否为解码器抛出的业务异常
Enum<?> exceptionEnum = CommonResponseEnum.SERVICE_FALLBACK;
if(cause.getCause() instanceof BusinessException){
BusinessException businessException = (BusinessException) cause.getCause();
exceptionEnum = DataProcessingEnumUtil.getExceptionEnum(businessException.getResult());
}
Enum<?> finalExceptionEnum = exceptionEnum;
return new DataHarmphasicVFeignClient() {
@Override
public HttpResult<String> batchInsertion(List<DataHarmphasicVDTO> dataHarmphasicVDTOList) {
log.error("{}异常,降级处理,异常为:{}","批量插入数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -5,6 +5,8 @@ import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataHarmpowerPFeignClient;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
@@ -43,6 +45,12 @@ public class DataHarmpowerPFeignClientFallbackFactory implements FallbackFactory
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataPowerPDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -5,6 +5,8 @@ import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataIFeignClient;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import com.njcn.system.utils.SystemEnumUtil;
import feign.hystrix.FallbackFactory;
@@ -44,6 +46,12 @@ public class DataIFeignClientFallbackFactory implements FallbackFactory<DataIFei
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataIDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -5,8 +5,9 @@ import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataInharmVFeignClient;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import com.njcn.system.utils.SystemEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -44,6 +45,12 @@ public class DataInharmVFeignClientFallbackFactory implements FallbackFactory<Da
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -5,8 +5,9 @@ import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataPltFeignClient;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import com.njcn.system.utils.SystemEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
@@ -44,6 +45,12 @@ public class DataPltFeignClientFallbackFactory implements FallbackFactory<DataPl
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataPltDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -0,0 +1,53 @@
package com.njcn.dataProcess.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.PqDataVerifyFeignClient;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author xy
* @version 1.0.0
* @date 2025年02月13日 20:13
*/
@Slf4j
@Component
public class PqDataVerifyFeignClientFallbackFactory implements FallbackFactory<PqDataVerifyFeignClient> {
/**
* 输出远程请求接口异常日志
* @param cause RPC请求异常
*/
@Override
public PqDataVerifyFeignClient create(Throwable cause) {
//判断抛出异常是否为解码器抛出的业务异常
Enum<?> exceptionEnum = CommonResponseEnum.SERVICE_FALLBACK;
if(cause.getCause() instanceof BusinessException){
BusinessException businessException = (BusinessException) cause.getCause();
exceptionEnum = DataProcessingEnumUtil.getExceptionEnum(businessException.getResult());
}
Enum<?> finalExceptionEnum = exceptionEnum;
return new PqDataVerifyFeignClient() {
@Override
public HttpResult<List<PqDataVerify>> insertData(List<PqDataVerify> list) {
log.error("{}异常,降级处理,异常为:{}","存储清洗的异常数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<PqDataVerify>> queryData(LineCountEvaluateParam param) {
log.error("{}异常,降级处理,异常为:{}","查询清洗的异常数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -0,0 +1,47 @@
package com.njcn.dataProcess.api.fallback;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.PqReasonableRangeFeignClient;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.util.DataProcessingEnumUtil;
import feign.hystrix.FallbackFactory;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* @author xy
* @version 1.0.0
* @date 2025年02月13日 20:13
*/
@Slf4j
@Component
public class PqReasonableRangeFeignClientFallbackFactory implements FallbackFactory<PqReasonableRangeFeignClient> {
/**
* 输出远程请求接口异常日志
* @param cause RPC请求异常
*/
@Override
public PqReasonableRangeFeignClient create(Throwable cause) {
//判断抛出异常是否为解码器抛出的业务异常
Enum<?> exceptionEnum = CommonResponseEnum.SERVICE_FALLBACK;
if(cause.getCause() instanceof BusinessException){
BusinessException businessException = (BusinessException) cause.getCause();
exceptionEnum = DataProcessingEnumUtil.getExceptionEnum(businessException.getResult());
}
Enum<?> finalExceptionEnum = exceptionEnum;
return new PqReasonableRangeFeignClient() {
@Override
public HttpResult<List<PqReasonableRangeDto>> getData(DataCleanParam param) {
log.error("{}异常,降级处理,异常为:{}","按条件获取数据合理范围",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -0,0 +1,94 @@
package com.njcn.dataProcess.enums;
import lombok.Getter;
/**
* 数据清洗枚举值
*
* @author xy
* @version 1.0
* @data 2025/2/13 15:08
*/
@Getter
public enum DataCleanEnum {
/**
* 所属系统
*/
Pqs("pqs","数据中心"),
Govern("govern","治理"),
/**
* 数据来源
*/
InfluxDB("InfluxDB","时序数据库"),
MySql("MySql","关系型数据库"),
/**
* 数据表名
*/
DataV("DataV","电压表"),
DataHarmPhasicV("DataHarmPhasicV","谐波电压相角表"),
DataI("DataI","电路表"),
DataFlicker("DataFlicker","短闪表"),
DataPlt("DataPlt","长闪表"),
DataFluc("DataFluc","电压波动"),
DataInHarmV("DataInHarmV","间谐波电压含有率表"),
DataHarmRateV("DataHarmRateV","谐波电压含有率表"),
DataHarmPowerP("DataHarmPowerP","功率因数表"),
RMpEventDetail("r_mp_event_detail","暂降表"),
/**
* 指标名称
*/
//DataFlicker
Pst("pst","短时闪变"),
//DataFluc
Fluc("fluc","电压波动"),
//DataHarmPhasicV
V("v","次谐波电压基波相角"),
//DataHarmRateV
V_Rate("v","次谐波电压含有率"),
//DataHarmPowerP
Pf("pf","视在功率因素"),
//DataI
RmsI("rms","电流有效值"),
//DataInHarmV
V_InHarm("v","次间谐波电压含有率"),
//DataPlt
Plt("plt","长时闪变"),
//DataV
FreqDev("freq_dev","频率偏差"),
Freq("freq","频率"),
RmsV("rms","相电压有效值"),
VPos("v_pos","正序电压"),
VNeg("v_neg","负序电压"),
VZero("v_zero","零序电压"),
VUnbalance("v_unbalance","电压不平衡度"),
RmsLvr("rms_lvr","线电压有效值"),
VuDev("vu_dev","线电压正偏差"),
VlDev("vl_dev","线电压负偏差"),
VThd("v_thd","电压总谐波畸变率"),
V_Data("v_1","相(线)电压基波有效值"),
//r_mp_event_detail
VoltageDip("Voltage_Dip","相(线)电压基波有效值"),
VoltageRise("Voltage_Rise","相(线)电压基波有效值");
private String code;
private String desc;
DataCleanEnum(String code, String desc) {
this.code = code;
this.desc = desc;
}
}

View File

@@ -0,0 +1,23 @@
package com.njcn.dataProcess.param;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataCleanParam implements Serializable {
@ApiModelProperty("系统类型")
private String systemType;
@ApiModelProperty("数据来源")
private String dataSource;
@ApiModelProperty("表名")
private String tableName;
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.param;
import lombok.Data;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Map;
/**
* 监测点有效数值统计数据评估入参
@@ -19,4 +21,15 @@ public class LineCountEvaluateParam extends BaseParam implements Serializable {
*/
private List<String> lineId;
/**
* 表名
*/
private String tableName;
/**
* 异常数据时间集合
* Map<String,List<String>> key:监测点id value:异常时间集合
*/
private Map<String,List<String>> abnormalTime;
}

View File

@@ -31,7 +31,7 @@ public class DataFlicker {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "fluc")
private Double fluc=0.00;
@@ -56,7 +56,7 @@ public class DataFlicker {
influxDBDataFlicker.setTime(instant);
influxDBDataFlicker.setLineId(dataFlicker.getLineid());
influxDBDataFlicker.setPhaseType(dataFlicker.getPhasicType());
influxDBDataFlicker.setPhasicType(dataFlicker.getPhasicType());
influxDBDataFlicker.setFluc(Objects.isNull(dataFlicker.getFluc())?0.00:dataFlicker.getFluc());
influxDBDataFlicker.setPlt(Objects.isNull(dataFlicker.getPst())?0.00:dataFlicker.getPst());
influxDBDataFlicker.setPst(Objects.isNull(dataFlicker.getPlt())?0.00:dataFlicker.getPlt());

View File

@@ -31,7 +31,7 @@ public class DataFluc {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "fluc")
private Double fluc=0.00;
@@ -53,7 +53,7 @@ public class DataFluc {
influxDBDataFluc.setTime(instant);
influxDBDataFluc.setLineId(dataFluc.getLineid());
influxDBDataFluc.setPhaseType(dataFluc.getPhasicType());
influxDBDataFluc.setPhasicType(dataFluc.getPhasicType());
influxDBDataFluc.setFluc(Objects.isNull(dataFluc.getFluc())?0.00:dataFluc.getFluc());
influxDBDataFluc.setFluccf(Objects.isNull(dataFluc.getFluccf())?0.00:dataFluc.getFluccf());
influxDBDataFluc.setQualityFlag(dataFluc.getQualityflag()+"");

View File

@@ -35,7 +35,7 @@ public class DataHarmphasicI {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -206,7 +206,7 @@ public class DataHarmphasicI {
influxDBDataHarmPhasicI.setTime(instant);
influxDBDataHarmPhasicI.setLineId(dataHarmphasicI.getLineid());
influxDBDataHarmPhasicI.setPhaseType(dataHarmphasicI.getPhasicType());
influxDBDataHarmPhasicI.setPhasicType(dataHarmphasicI.getPhasicType());
influxDBDataHarmPhasicI.setQualityFlag(dataHarmphasicI.getQualityflag()+"");
influxDBDataHarmPhasicI.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataHarmphasicV {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -206,7 +206,7 @@ public class DataHarmphasicV {
InfluxDBDataHarmphasicV.setTime(instant);
InfluxDBDataHarmphasicV.setLineId(dataHarmphasicV.getLineid());
InfluxDBDataHarmphasicV.setPhaseType(dataHarmphasicV.getPhasicType());
InfluxDBDataHarmphasicV.setPhasicType(dataHarmphasicV.getPhasicType());
InfluxDBDataHarmphasicV.setQualityFlag(dataHarmphasicV.getQualityflag()+"");
InfluxDBDataHarmphasicV.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataHarmpowerP {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -215,7 +215,7 @@ public class DataHarmpowerP {
influxDBDataHarmpowerP.setTime(instant);
influxDBDataHarmpowerP.setLineId(dataHarmpowerP.getLineid());
influxDBDataHarmpowerP.setPhaseType(dataHarmpowerP.getPhasicType());
influxDBDataHarmpowerP.setPhasicType(dataHarmpowerP.getPhasicType());
influxDBDataHarmpowerP.setQualityFlag(dataHarmpowerP.getQualityflag()+"");
influxDBDataHarmpowerP.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataHarmpowerQ {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -209,7 +209,7 @@ public class DataHarmpowerQ {
influxDBDataHarmpowerQ.setTime(instant);
influxDBDataHarmpowerQ.setLineId(dataHarmpowerQ.getLineid());
influxDBDataHarmpowerQ.setPhaseType(dataHarmpowerQ.getPhasicType());
influxDBDataHarmpowerQ.setPhasicType(dataHarmpowerQ.getPhasicType());
influxDBDataHarmpowerQ.setQualityFlag(dataHarmpowerQ.getQualityflag()+"");
influxDBDataHarmpowerQ.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataHarmpowerS {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -209,7 +209,7 @@ public class DataHarmpowerS {
influxDBDataHarmpowerS.setTime(instant);
influxDBDataHarmpowerS.setLineId(dataHarmpowerS.getLineid());
influxDBDataHarmpowerS.setPhaseType(dataHarmpowerS.getPhasicType());
influxDBDataHarmpowerS.setPhasicType(dataHarmpowerS.getPhasicType());
influxDBDataHarmpowerS.setQualityFlag(dataHarmpowerS.getQualityflag()+"");
influxDBDataHarmpowerS.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -35,7 +35,7 @@ public class DataHarmrateI {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -205,7 +205,7 @@ public class DataHarmrateI {
influxDBDataHarmRateI.setTime(instant);
influxDBDataHarmRateI.setLineId(dataHarmrateI.getLineid());
influxDBDataHarmRateI.setPhaseType(dataHarmrateI.getPhasicType());
influxDBDataHarmRateI.setPhasicType(dataHarmrateI.getPhasicType());
influxDBDataHarmRateI.setQualityFlag(dataHarmrateI.getQualityflag()+"");
influxDBDataHarmRateI.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -35,7 +35,7 @@ public class DataHarmrateV {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -205,7 +205,7 @@ public class DataHarmrateV {
influxDBDataHarmRateV.setTime(instant);
influxDBDataHarmRateV.setLineId(dataHarmrateV.getLineid());
influxDBDataHarmRateV.setPhaseType(dataHarmrateV.getPhasicType());
influxDBDataHarmRateV.setPhasicType(dataHarmrateV.getPhasicType());
influxDBDataHarmRateV.setQualityFlag(dataHarmrateV.getQualityflag()+"");
influxDBDataHarmRateV.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataI {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -224,7 +224,7 @@ public class DataI {
influxDBDataI.setTime(instant);
influxDBDataI.setLineId(dataI.getLineid());
influxDBDataI.setPhaseType(dataI.getPhasicType());
influxDBDataI.setPhasicType(dataI.getPhasicType());
influxDBDataI.setQualityFlag(dataI.getQualityflag()+"");
influxDBDataI.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataInharmI {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -207,7 +207,7 @@ public class DataInharmI {
influxDBDataInHarmI.setTime(instant);
influxDBDataInHarmI.setLineId(dataInharmI.getLineid());
influxDBDataInHarmI.setPhaseType(dataInharmI.getPhasicType());
influxDBDataInHarmI.setPhasicType(dataInharmI.getPhasicType());
influxDBDataInHarmI.setQualityFlag(dataInharmI.getQualityflag()+"");
influxDBDataInHarmI.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -36,7 +36,7 @@ public class DataInharmV {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -206,7 +206,7 @@ public class DataInharmV {
influxDBDataInHarmV.setTime(instant);
influxDBDataInHarmV.setLineId(dataInharmV.getLineid());
influxDBDataInHarmV.setPhaseType(dataInharmV.getPhasicType());
influxDBDataInHarmV.setPhasicType(dataInharmV.getPhasicType());
influxDBDataInHarmV.setQualityFlag(dataInharmV.getQualityflag()+"");
influxDBDataInHarmV.setValueType(valueType);
if (valueType.equals("AVG")) {

View File

@@ -32,7 +32,7 @@ public class DataPlt {
private String lineId;
@Column(name = "phasic_type",tag = true)
private String phaseType;
private String phasicType;
@Column(name = "quality_flag",tag = true)
private String qualityFlag;
@@ -50,7 +50,7 @@ public class DataPlt {
influxDBDataPlt.setTime(instant);
influxDBDataPlt.setLineId(dataPlt.getLineid());
influxDBDataPlt.setPhaseType(dataPlt.getPhasicType());
influxDBDataPlt.setPhasicType(dataPlt.getPhasicType());
influxDBDataPlt.setPlt(Objects.isNull(dataPlt.getPlt())?0.00:dataPlt.getPlt());
influxDBDataPlt.setQualityFlag(dataPlt.getQualityflag()+"");

View File

@@ -20,6 +20,9 @@ public class CommonMinuteDto implements Serializable {
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("数据质量 0:正常 1:异常")
private String qualityFlag;
private List<PhasicType> phasicTypeList;
@Data

View File

@@ -0,0 +1,45 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataFlickerDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("电压波动")
private Double fluc;
@ApiModelProperty("短时闪变")
private Double pst;
@ApiModelProperty("长时闪变")
private Double plt;
}

View File

@@ -0,0 +1,42 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataFlucDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("电压波动")
private Double fluc;
@ApiModelProperty("电压波动频度")
private Double fluccf;
}

View File

@@ -0,0 +1,145 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataHarmDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("0.5次间谐波电压含有率")
private Double v1;
@ApiModelProperty("1.5次间谐波电压含有率(2次谐波电压含有率)")
private Double v2;
@ApiModelProperty("2.5次间谐波电压含有率(3次谐波电压含有率)")
private Double v3;
@ApiModelProperty("3.5次间谐波电压含有率(4次谐波电压含有率)")
private Double v4;
@ApiModelProperty("4.5次间谐波电压含有率(5次谐波电压含有率)")
private Double v5;
@ApiModelProperty("5.5次间谐波电压含有率(6次谐波电压含有率)")
private Double v6;
@ApiModelProperty("6.5次间谐波电压含有率(7次谐波电压含有率)")
private Double v7;
@ApiModelProperty("7.5次间谐波电压含有率(8次谐波电压含有率)")
private Double v8;
private Double v9;
private Double v10;
private Double v11;
private Double v12;
private Double v13;
private Double v14;
private Double v15;
private Double v16;
private Double v17;
private Double v18;
private Double v19;
private Double v20;
private Double v21;
private Double v22;
private Double v23;
private Double v24;
private Double v25;
private Double v26;
private Double v27;
private Double v28;
private Double v29;
private Double v30;
private Double v31;
private Double v32;
private Double v33;
private Double v34;
private Double v35;
private Double v36;
private Double v37;
private Double v38;
private Double v39;
private Double v40;
private Double v41;
private Double v42;
private Double v43;
private Double v44;
private Double v45;
private Double v46;
private Double v47;
private Double v48;
private Double v49;
@ApiModelProperty("49.5次间谐波电压含有率(50次谐波电压含有率)")
private Double v50;
}

View File

@@ -0,0 +1,163 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataIDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("负序电流")
private Double iNeg;
@ApiModelProperty("正序电流")
private Double iPos;
@ApiModelProperty("电流总谐波畸变率")
private Double iThd;
@ApiModelProperty("三相电流不平衡度")
private Double iUnbalance;
@ApiModelProperty("零序电流")
private Double iZero;
@ApiModelProperty("电流有效值")
private Double rms;
@ApiModelProperty("基波电流幅值")
private Double i1;
@ApiModelProperty("2次谐波电流幅值")
private Double i2;
@ApiModelProperty("3次谐波电流幅值")
private Double i3;
@ApiModelProperty("4次谐波电流幅值")
private Double i4;
@ApiModelProperty("5次谐波电流幅值")
private Double i5;
@ApiModelProperty("6次谐波电流幅值")
private Double i6;
@ApiModelProperty("7次谐波电流幅值")
private Double i7;
@ApiModelProperty("8次谐波电流幅值")
private Double i8;
private Double i9;
private Double i10;
private Double i11;
private Double i12;
private Double i13;
private Double i14;
private Double i15;
private Double i16;
private Double i17;
private Double i18;
private Double i19;
private Double i20;
private Double i21;
private Double i22;
private Double i23;
private Double i24;
private Double i25;
private Double i26;
private Double i27;
private Double i28;
private Double i29;
private Double i30;
private Double i31;
private Double i32;
private Double i33;
private Double i34;
private Double i35;
private Double i36;
private Double i37;
private Double i38;
private Double i39;
private Double i40;
private Double i41;
private Double i42;
private Double i43;
private Double i44;
private Double i45;
private Double i46;
private Double i47;
private Double i48;
private Double i49;
@ApiModelProperty("50次谐波电流幅值")
private Double i50;
}

View File

@@ -0,0 +1,39 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataPltDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("长时闪变值")
private Double plt;
}

View File

@@ -0,0 +1,154 @@
package com.njcn.dataProcess.pojo.dto;
import com.fasterxml.jackson.annotation.JsonFormat;
import io.swagger.annotations.ApiModelProperty;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class DataPowerPDto implements Serializable {
@JsonFormat(pattern = "yyyy-MM-dd")
private String time;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
private String minTime;
@ApiModelProperty("监测点Id")
private String lineId;
@ApiModelProperty("相别,'A'表示A相'B'表示B相,'C'表示C相,'T'表示总, 'M'表示无相别")
private String phasicType;
@ApiModelProperty("数据类型 最大值max、最小值min、平均值avg、95值cp95")
private String valueType;
@ApiModelProperty("数据质量标志0-表示是正常数据、1-表示是错误数据、2-表示是有事件的数据数据库默认是0污染数据不参与报表统计")
private String qualityFlag;
@ApiModelProperty("数据清洗标识 0:正常 1:异常")
private Integer abnormalFlag;
@ApiModelProperty("位移功率因数")
private Double df;
@ApiModelProperty("视在功率因素")
private Double pf;
@ApiModelProperty("总功功率")
private Double p;
@ApiModelProperty("基波有功功率")
private Double p1;
@ApiModelProperty("2次谐波有功功率")
private Double p2;
@ApiModelProperty("3次谐波有功功率")
private Double p3;
@ApiModelProperty("4次谐波有功功率")
private Double p4;
@ApiModelProperty("5次谐波有功功率")
private Double p5;
@ApiModelProperty("6次谐波有功功率")
private Double p6;
@ApiModelProperty("7次谐波有功功率")
private Double p7;
@ApiModelProperty("8次谐波有功功率")
private Double p8;
private Double p9;
private Double p10;
private Double p11;
private Double p12;
private Double p13;
private Double p14;
private Double p15;
private Double p16;
private Double p17;
private Double p18;
private Double p19;
private Double p20;
private Double p21;
private Double p22;
private Double p23;
private Double p24;
private Double p25;
private Double p26;
private Double p27;
private Double p28;
private Double p29;
private Double p30;
private Double p31;
private Double p32;
private Double p33;
private Double p34;
private Double p35;
private Double p36;
private Double p37;
private Double p38;
private Double p39;
private Double p40;
private Double p41;
private Double p42;
private Double p43;
private Double p44;
private Double p45;
private Double p46;
private Double p47;
private Double p48;
private Double p49;
@ApiModelProperty("50次谐波有功功率")
private Double p50;
}

View File

@@ -0,0 +1,100 @@
package com.njcn.dataProcess.pojo.dto;
import lombok.Data;
import java.io.Serializable;
/**
* @author xy
*/
@Data
public class PqReasonableRangeDto implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键ID
*/
private String id;
/**
* 指标code
*/
private String indexCode;
/**
* 指标名称
*/
private String indexName;
/**
* 指标开始
*/
private Integer harmStart;
/**
* 指标结束
*/
private Integer harmEnd;
/**
* 指标所属项别
*/
private String phaseType;
/**
* 指标所属influxdb表的实体类
*/
private String influxdbTableName;
/**
* 指标所属类的属性
*/
private String influxdbColumnName;
/**
* 指标下限
*/
private Double minValue;
/**
* 指标上限
*/
private Double maxValue;
/**
* 电压等级是否参与计算(0:不参与1:参与)
*/
private Integer isVoltage;
/**
* Ct变比是否参与计算
*/
private Integer ctAttendFlag;
/**
* 指标数据源influxdboralcemysql
*/
private String dataSource;
/**
* 无具体范围,判断的条件描述
*/
private String otherAlgorithm;
/**
* 备注
*/
private String remark;
/**
* 所属系统pqsgovern目前就这个两个系统
*/
private String belongingSystem;
/**
* 状态(0:删除 1:正常)
*/
private Integer state;
}

View File

@@ -0,0 +1,96 @@
package com.njcn.dataProcess.pojo.po;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.github.jeffreyning.mybatisplus.anno.MppMultiId;
import lombok.Getter;
import lombok.Setter;
import java.io.Serializable;
import java.math.BigDecimal;
import java.time.LocalDateTime;
/**
* <p>
*
* </p>
*
* @author xy
* @since 2025-02-17
*/
@Getter
@Setter
@TableName("pq_data_verify")
public class PqDataVerify implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 监测点id
*/
@MppMultiId
@TableField(value = "line_id")
private String lineId;
/**
* 异常数据时间
*/
@MppMultiId
@TableField(value = "time")
private LocalDateTime time;
/**
* 数据类型(最大值max、最小值min、平均值avg、95值cp95)
*/
@MppMultiId
@TableField(value = "value_type")
private String valueType;
/**
* 相别:"A" "B" "C" "ABC" "T"
*/
@MppMultiId
@TableField(value = "phasic_type")
private String phasicType;
/**
* 指标code
*/
@MppMultiId
@TableField(value = "index_code")
private String indexCode;
/**
* 指标名称
*/
@MppMultiId
@TableField(value = "index_name")
private String indexName;
/**
* 指标表名
*/
@MppMultiId
@TableField(value = "index_table")
private String indexTable;
/**
* 异常值
*/
@TableField(value = "abnormal_value")
private Double abnormalValue;
/**
* 指标上限
*/
@TableField(value = "min_value")
private Double minValue;
/**
* 指标下限
*/
@TableField(value = "max_value")
private Double maxValue;
}

View File

@@ -0,0 +1,110 @@
package com.njcn.dataProcess.pojo.po;
import com.baomidou.mybatisplus.annotation.TableName;
import com.njcn.db.bo.BaseEntity;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.io.Serializable;
/**
* <p>
*
* </p>
*
* @author xy
* @since 2025-02-13
*/
@Data
@EqualsAndHashCode(callSuper = true)
@TableName("pq_reasonable_range")
public class PqReasonableRange extends BaseEntity implements Serializable {
private static final long serialVersionUID = 1L;
/**
* 主键ID
*/
private String id;
/**
* 指标code
*/
private String indexCode;
/**
* 指标名称
*/
private String indexName;
/**
* 指标开始
*/
private Integer harmStart;
/**
* 指标结束
*/
private Integer harmEnd;
/**
* 指标所属项别
*/
private String phaseType;
/**
* 指标所属influxdb表的实体类
*/
private String influxdbTableName;
/**
* 指标所属类的属性
*/
private String influxdbColumnName;
/**
* 指标下限
*/
private Double minValue;
/**
* 指标上限
*/
private Double maxValue;
/**
* 电压等级是否参与计算(0:不参与1:参与)
*/
private Integer isVoltage;
/**
* Ct变比是否参与计算
*/
private Integer ctAttendFlag;
/**
* 指标数据源influxdboralcemysql
*/
private String dataSource;
/**
* 无具体范围,判断的条件描述
*/
private String otherAlgorithm;
/**
* 备注
*/
private String remark;
/**
* 所属系统pqsgovern目前就这个两个系统
*/
private String belongingSystem;
/**
* 状态(0:删除 1:正常)
*/
private Integer state;
}

View File

@@ -1,7 +1,6 @@
package com.njcn.dataProcess.pojo.po;
import com.baomidou.mybatisplus.annotation.TableField;
import com.baomidou.mybatisplus.annotation.TableName;
import com.baomidou.mybatisplus.annotation.*;
import com.github.jeffreyning.mybatisplus.anno.MppMultiId;
import lombok.Data;
@@ -37,187 +36,187 @@ public class RStatDataVD implements Serializable {
@TableField(value = "quality_flag")
private Integer qualityFlag;
@TableField(value = "freq")
@TableField(value = "freq",updateStrategy = FieldStrategy.IGNORED)
private Double freq;
@TableField(value = "freq_dev")
@TableField(value = "freq_dev",updateStrategy = FieldStrategy.IGNORED)
private Double freqDev;
@TableField(value = "rms")
@TableField(value = "rms",updateStrategy = FieldStrategy.IGNORED)
private Double rms;
@TableField(value = "rms_lvr")
@TableField(value = "rms_lvr",updateStrategy = FieldStrategy.IGNORED)
private Double rmsLvr;
@TableField(value = "v_neg")
@TableField(value = "v_neg",updateStrategy = FieldStrategy.IGNORED)
private Double vNeg;
@TableField(value = "v_pos")
@TableField(value = "v_pos",updateStrategy = FieldStrategy.IGNORED)
private Double vPos;
@TableField(value = "v_thd")
@TableField(value = "v_thd",updateStrategy = FieldStrategy.IGNORED)
private Double vThd;
@TableField(value = "v_unbalance")
@TableField(value = "v_unbalance",updateStrategy = FieldStrategy.IGNORED)
private Double vUnbalance;
@TableField(value = "v_zero")
@TableField(value = "v_zero",updateStrategy = FieldStrategy.IGNORED)
private Double vZero;
@TableField(value = "vl_dev")
@TableField(value = "vl_dev",updateStrategy = FieldStrategy.IGNORED)
private Double vlDev;
@TableField(value = "vu_dev")
@TableField(value = "vu_dev",updateStrategy = FieldStrategy.IGNORED)
private Double vuDev;
@TableField(value = "v_1")
@TableField(value = "v_1",updateStrategy = FieldStrategy.IGNORED)
private Double v1;
@TableField(value = "v_2")
@TableField(value = "v_2",updateStrategy = FieldStrategy.IGNORED)
private Double v2;
@TableField(value = "v_3")
@TableField(value = "v_3",updateStrategy = FieldStrategy.IGNORED)
private Double v3;
@TableField(value = "v_4")
@TableField(value = "v_4",updateStrategy = FieldStrategy.IGNORED)
private Double v4;
@TableField(value = "v_5")
@TableField(value = "v_5",updateStrategy = FieldStrategy.IGNORED)
private Double v5;
@TableField(value = "v_6")
@TableField(value = "v_6",updateStrategy = FieldStrategy.IGNORED)
private Double v6;
@TableField(value = "v_7")
@TableField(value = "v_7",updateStrategy = FieldStrategy.IGNORED)
private Double v7;
@TableField(value = "v_8")
@TableField(value = "v_8",updateStrategy = FieldStrategy.IGNORED)
private Double v8;
@TableField(value = "v_9")
@TableField(value = "v_9",updateStrategy = FieldStrategy.IGNORED)
private Double v9;
@TableField(value = "v_10")
@TableField(value = "v_10",updateStrategy = FieldStrategy.IGNORED)
private Double v10;
@TableField(value = "v_11")
@TableField(value = "v_11",updateStrategy = FieldStrategy.IGNORED)
private Double v11;
@TableField(value = "v_12")
@TableField(value = "v_12",updateStrategy = FieldStrategy.IGNORED)
private Double v12;
@TableField(value = "v_13")
@TableField(value = "v_13",updateStrategy = FieldStrategy.IGNORED)
private Double v13;
@TableField(value = "v_14")
@TableField(value = "v_14",updateStrategy = FieldStrategy.IGNORED)
private Double v14;
@TableField(value = "v_15")
@TableField(value = "v_15",updateStrategy = FieldStrategy.IGNORED)
private Double v15;
@TableField(value = "v_16")
@TableField(value = "v_16",updateStrategy = FieldStrategy.IGNORED)
private Double v16;
@TableField(value = "v_17")
@TableField(value = "v_17",updateStrategy = FieldStrategy.IGNORED)
private Double v17;
@TableField(value = "v_18")
@TableField(value = "v_18",updateStrategy = FieldStrategy.IGNORED)
private Double v18;
@TableField(value = "v_19")
@TableField(value = "v_19",updateStrategy = FieldStrategy.IGNORED)
private Double v19;
@TableField(value = "v_20")
@TableField(value = "v_20",updateStrategy = FieldStrategy.IGNORED)
private Double v20;
@TableField(value = "v_21")
@TableField(value = "v_21",updateStrategy = FieldStrategy.IGNORED)
private Double v21;
@TableField(value = "v_22")
@TableField(value = "v_22",updateStrategy = FieldStrategy.IGNORED)
private Double v22;
@TableField(value = "v_23")
@TableField(value = "v_23",updateStrategy = FieldStrategy.IGNORED)
private Double v23;
@TableField(value = "v_24")
@TableField(value = "v_24",updateStrategy = FieldStrategy.IGNORED)
private Double v24;
@TableField(value = "v_25")
@TableField(value = "v_25",updateStrategy = FieldStrategy.IGNORED)
private Double v25;
@TableField(value = "v_26")
@TableField(value = "v_26",updateStrategy = FieldStrategy.IGNORED)
private Double v26;
@TableField(value = "v_27")
@TableField(value = "v_27",updateStrategy = FieldStrategy.IGNORED)
private Double v27;
@TableField(value = "v_28")
@TableField(value = "v_28",updateStrategy = FieldStrategy.IGNORED)
private Double v28;
@TableField(value = "v_29")
@TableField(value = "v_29",updateStrategy = FieldStrategy.IGNORED)
private Double v29;
@TableField(value = "v_30")
@TableField(value = "v_30",updateStrategy = FieldStrategy.IGNORED)
private Double v30;
@TableField(value = "v_31")
@TableField(value = "v_31",updateStrategy = FieldStrategy.IGNORED)
private Double v31;
@TableField(value = "v_32")
@TableField(value = "v_32",updateStrategy = FieldStrategy.IGNORED)
private Double v32;
@TableField(value = "v_33")
@TableField(value = "v_33",updateStrategy = FieldStrategy.IGNORED)
private Double v33;
@TableField(value = "v_34")
@TableField(value = "v_34",updateStrategy = FieldStrategy.IGNORED)
private Double v34;
@TableField(value = "v_35")
@TableField(value = "v_35",updateStrategy = FieldStrategy.IGNORED)
private Double v35;
@TableField(value = "v_36")
@TableField(value = "v_36",updateStrategy = FieldStrategy.IGNORED)
private Double v36;
@TableField(value = "v_37")
@TableField(value = "v_37",updateStrategy = FieldStrategy.IGNORED)
private Double v37;
@TableField(value = "v_38")
@TableField(value = "v_38",updateStrategy = FieldStrategy.IGNORED)
private Double v38;
@TableField(value = "v_39")
@TableField(value = "v_39",updateStrategy = FieldStrategy.IGNORED)
private Double v39;
@TableField(value = "v_40")
@TableField(value = "v_40",updateStrategy = FieldStrategy.IGNORED)
private Double v40;
@TableField(value = "v_41")
@TableField(value = "v_41",updateStrategy = FieldStrategy.IGNORED)
private Double v41;
@TableField(value = "v_42")
@TableField(value = "v_42",updateStrategy = FieldStrategy.IGNORED)
private Double v42;
@TableField(value = "v_43")
@TableField(value = "v_43",updateStrategy = FieldStrategy.IGNORED)
private Double v43;
@TableField(value = "v_44")
@TableField(value = "v_44",updateStrategy = FieldStrategy.IGNORED)
private Double v44;
@TableField(value = "v_45")
@TableField(value = "v_45",updateStrategy = FieldStrategy.IGNORED)
private Double v45;
@TableField(value = "v_46")
@TableField(value = "v_46",updateStrategy = FieldStrategy.IGNORED)
private Double v46;
@TableField(value = "v_47")
@TableField(value = "v_47",updateStrategy = FieldStrategy.IGNORED)
private Double v47;
@TableField(value = "v_48")
@TableField(value = "v_48",updateStrategy = FieldStrategy.IGNORED)
private Double v48;
@TableField(value = "v_49")
@TableField(value = "v_49",updateStrategy = FieldStrategy.IGNORED)
private Double v49;
@TableField(value = "v_50")
@TableField(value = "v_50",updateStrategy = FieldStrategy.IGNORED)
private Double v50;
}

View File

@@ -0,0 +1,20 @@
package com.njcn.dataProcess.util;
/**
* @author xy
* @description 公共方法
*/
public class DataCommonUtils {
/**
* 传入字典类型的电压等级,获取可以计算的数据
* 例如 35kV->32.0
* 0.38kV->0.38
*/
public static Double getVoltageData(String lineVoltage) {
String numberStr = lineVoltage.replace("kV", "");
return Double.parseDouble(numberStr);
}
}

View File

@@ -27,6 +27,24 @@ public class TimeUtils {
return dateTime.toLocalDate();
}
/**
* String类型的yyyy-MM-dd HH:mm:ss转成LocalDataTime yyyy-MM-dd HH:mm:ss
* @param time
* @return LocalDataTime
*/
public static LocalDateTime StringToLocalDateTime(String time) {
return LocalDateTime.parse(time, DATE_TIME_FORMATTER);
}
/**
* LocalDataTime类型的yyyy-MM-dd HH:mm:ss转成String yyyy-MM-dd HH:mm:ss
* @param time
* @return LocalDataTime
*/
public static String LocalDateTimeToString(LocalDateTime time) {
return time.format(DATE_TIME_FORMATTER);
}
/**
* String类型的yyyy-MM-dd转成yyyy-MM-dd
* @param time

View File

@@ -6,10 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -54,7 +55,14 @@ public class DataFlickerController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataFlickerDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataFlickerDto> data = dataFlickerQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}

View File

@@ -6,9 +6,12 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -42,7 +45,6 @@ public class DataFlucController extends BaseController {
@InsertBean
private IDataFluc dataFlucInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
@@ -53,9 +55,13 @@ public class DataFlucController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataFlucDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataFlucDto> data = dataFlucQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -0,0 +1,55 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmRateV")
@Api(tags = "谐波电压含有率")
public class DataHarmRateVController extends BaseController {
@QueryBean
private IDataHarmRateV dataHarmRateVQuery;
@InsertBean
private IDataHarmRateV dataHarmRateVInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataHarmRateVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -54,7 +56,14 @@ public class DataHarmphasicVController extends BaseController {
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataHarmphasicVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,13 @@ public class DataHarmpowerPController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataPowerPDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataPowerPDto> data = dataHarmpowerPQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,13 @@ public class DataIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataIDto> data = dataIQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -9,6 +9,8 @@ import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,14 @@ public class DataInharmVController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataInharmVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,14 @@ public class DataPltController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataPltDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataPltDto> data = dataPltQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -105,9 +105,10 @@ public class DataVController extends BaseController {
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/addInfluxDbList")
@ApiOperation("时序数据库插入数据")
@Deprecated
public HttpResult<String> addInfluxDbList(@RequestBody List<DataVDto> dataVList) {
String methodDescribe = getMethodDescribe("addInfluxDbList");
dataVInsert.addInfluxDbList(dataVList);
dataVQuery.addInfluxDbList(dataVList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
}

View File

@@ -0,0 +1,57 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.service.IPqDataVerifyService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 前端控制器
* </p>
*
* @author xy
* @since 2025-02-17
*/
@RestController
@RequestMapping("/pqDataVerify")
public class PqDataVerifyController extends BaseController {
@Resource
private IPqDataVerifyService pqDataVerifyService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/insertData")
@ApiOperation("存储清洗的异常数据")
public HttpResult<List<PqDataVerify>> insertData(@RequestBody List<PqDataVerify> list) {
String methodDescribe = getMethodDescribe("insertData");
pqDataVerifyService.insertData(list);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/queryData")
@ApiOperation("查询清洗的异常数据")
public HttpResult<List<PqDataVerify>> queryData(@RequestBody LineCountEvaluateParam param) {
String methodDescribe = getMethodDescribe("queryData");
List<PqDataVerify> list = pqDataVerifyService.queryData(param);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
}

View File

@@ -0,0 +1,56 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.service.IPqReasonableRangeService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 前端控制器
* </p>
*
* @author xy
* @since 2025-02-13
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/pqReasonableRange")
@Api(tags = "数据清洗标准库")
public class PqReasonableRangeController extends BaseController {
@Resource
private IPqReasonableRangeService pqReasonableRangeService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getData")
@ApiOperation("按条件获取数据合理范围")
public HttpResult<List<PqReasonableRangeDto>> getData(@RequestBody DataCleanParam param) {
String methodDescribe = getMethodDescribe("getData");
List<PqReasonableRangeDto> list = pqReasonableRangeService.getReasonableRangeList(param);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
}

View File

@@ -0,0 +1,14 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* @author xy
*/
public interface DataHarmRateVMapper extends InfluxDbBaseMapper<DataHarmrateV> {
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.github.jeffreyning.mybatisplus.base.MppBaseMapper;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
/**
* <p>
* Mapper 接口
* </p>
*
* @author xy
* @since 2025-02-17
*/
public interface PqDataVerifyMapper extends MppBaseMapper<PqDataVerify> {
}

View File

@@ -0,0 +1,18 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author xy
* @since 2025-02-13
*/
public interface PqReasonableRangeMapper extends BaseMapper<PqReasonableRange> {
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataFlicker {
void batchInsertion(List<DataFlickerDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataFluc {
void batchInsertion(List<DataFlucDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -0,0 +1,19 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
/**
* @author xy
*/
public interface IDataHarmRateV {
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataHarmphasicV {
void batchInsertion(List<DataHarmphasicVDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,9 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import java.util.List;
@@ -13,4 +16,11 @@ import java.util.List;
*/
public interface IDataHarmpowerP {
void batchInsertion(List<DataHarmpowerPDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,10 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import java.util.List;
@@ -12,5 +16,13 @@ import java.util.List;
* @version V1.0.0
*/
public interface IDataI {
void batchInsertion(List<DataIDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataIDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
@@ -14,4 +16,11 @@ import java.util.List;
public interface IDataInharmV {
void batchInsertion(List<DataInharmVDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,9 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import java.util.List;
@@ -13,4 +16,11 @@ import java.util.List;
*/
public interface IDataPlt {
void batchInsertion(List<DataPltDTO> dataPltDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataPltDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -4,7 +4,6 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import com.njcn.dataProcess.pojo.po.RStatDataVD;

View File

@@ -0,0 +1,30 @@
package com.njcn.dataProcess.service;
import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author xy
* @since 2025-02-17
*/
public interface IPqDataVerifyService extends IMppService<PqDataVerify> {
/**
* 异常数据插入
* @param list
*/
void insertData(List<PqDataVerify> list);
/**
* 异常数据查询
* @param param
*/
List<PqDataVerify> queryData(LineCountEvaluateParam param);
}

View File

@@ -0,0 +1,27 @@
package com.njcn.dataProcess.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author xy
* @since 2025-02-13
*/
public interface IPqReasonableRangeService extends IService<PqReasonableRange> {
/**
* 根据条件获取稳态指标正常范围数据
* @param param
* @return
*/
List<PqReasonableRangeDto> getReasonableRangeList(DataCleanParam param);
}

View File

@@ -1,15 +1,23 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.dataProcess.dao.imapper.DataFlickerMapper;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +31,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataFlickerImpl implements IDataFlicker {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataFlickerMapper dataFlickerMapper;
@@ -44,4 +54,69 @@ public class InfluxdbDataFlickerImpl implements IDataFlicker {
}
}
@Override
public List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataFlickerDto> result = new ArrayList<>();
List<DataFlicker> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataFlickerDto dto = new DataFlickerDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataFlicker> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataFlicker> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataFlicker.class);
influxQueryWrapper.regular(DataFlicker::getLineId, lineList)
.select(DataFlicker::getLineId)
.select(DataFlicker::getPhasicType)
.select(DataFlicker::getFluc)
.select(DataFlicker::getPst)
.select(DataFlicker::getPlt)
.select(DataFlicker::getQualityFlag)
.between(DataFlicker::getTime, startTime, endTime)
.eq(DataFlicker::getQualityFlag,"0");
List<DataFlicker> list = dataFlickerMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataFlicker>> lineMap = list.stream().collect(Collectors.groupingBy(DataFlicker::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFlicker> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,23 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlucDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.dataProcess.dao.imapper.DataFlucMapper;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFluc;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +31,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataFlucImpl implements IDataFluc {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataFlucMapper dataFlucMapper;
@@ -44,4 +54,68 @@ public class InfluxdbDataFlucImpl implements IDataFluc {
}
}
@Override
public List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataFlucDto> result = new ArrayList<>();
List<DataFluc> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataFlucDto dto = new DataFlucDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataFluc> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataFluc> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataFluc.class);
influxQueryWrapper.regular(DataFluc::getLineId, lineList)
.select(DataFluc::getLineId)
.select(DataFluc::getPhasicType)
.select(DataFluc::getFluc)
.select(DataFluc::getFluccf)
.select(DataFluc::getQualityFlag)
.between(DataFluc::getTime, startTime, endTime)
.eq(DataFluc::getQualityFlag,"0");
List<DataFluc> list = dataFlucMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataFluc>> lineMap = list.stream().collect(Collectors.groupingBy(DataFluc::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFluc> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -0,0 +1,98 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataHarmRateVMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author xy
*/
@Service("InfluxdbDataHarmRateVImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmRateVImpl implements IDataHarmRateV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmRateVMapper dataHarmRateVMapper;
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataHarmrateV> list = getMinuteDataI(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmrateV> getMinuteDataI(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmrateV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmrateV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(2, 50, 1));
influxQueryWrapper.regular(DataHarmrateV::getLineId, lineList)
.select(DataHarmrateV::getLineId)
.select(DataHarmrateV::getPhasicType)
.select(DataHarmrateV::getValueType)
.select(DataHarmrateV::getQualityFlag)
.between(DataHarmrateV::getTime, startTime, endTime)
.eq(DataHarmrateV::getQualityFlag,"0");
List<DataHarmrateV> list = dataHarmRateVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmrateV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmrateV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,25 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataHarmphasicVMapper;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +33,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataHarmphasicVImpl implements IDataHarmphasicV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmphasicVMapper dataHarmphasicVMapper;
@@ -44,4 +56,68 @@ public class InfluxdbDataHarmphasicVImpl implements IDataHarmphasicV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataHarmphasicV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmphasicV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmphasicV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmphasicV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataHarmphasicV::getLineId, lineList)
.select(DataHarmphasicV::getLineId)
.select(DataHarmphasicV::getPhasicType)
.select(DataHarmphasicV::getValueType)
.select(DataHarmphasicV::getQualityFlag)
.between(DataHarmphasicV::getTime, startTime, endTime)
.eq(DataHarmphasicV::getQualityFlag,"0");
List<DataHarmphasicV> list = dataHarmphasicVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmphasicV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmphasicV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,28 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmpowerPMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +36,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataHarmpowerPImpl implements IDataHarmpowerP {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmpowerPMapper dataHarmpowerPMapper;
@@ -43,4 +58,71 @@ public class InfluxdbDataHarmpowerPImpl implements IDataHarmpowerP {
}
}
@Override
public List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataPowerPDto> result = new ArrayList<>();
List<DataHarmpowerP> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataPowerPDto dto = new DataPowerPDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmpowerP> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmpowerP> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmpowerP.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.P, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataHarmpowerP::getLineId, lineList)
.select(DataHarmpowerP::getLineId)
.select(DataHarmpowerP::getPhasicType)
.select(DataHarmpowerP::getValueType)
.select(DataHarmpowerP::getP)
.select(DataHarmpowerP::getDf)
.select(DataHarmpowerP::getPf)
.select(DataHarmpowerP::getQualityFlag)
.between(DataHarmpowerP::getTime, startTime, endTime)
.eq(DataHarmpowerP::getQualityFlag,"0");
List<DataHarmpowerP> list = dataHarmpowerPMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmpowerP>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmpowerP::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerP> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,25 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataIDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataIMapper;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +33,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataIImpl implements IDataI {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataIMapper dataIMapper;
@@ -43,4 +55,75 @@ public class InfluxdbDataIImpl implements IDataI {
}
}
@Override
public List<DataIDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataIDto> result = new ArrayList<>();
List<DataI> list = getMinuteDataI(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataIDto dto = new DataIDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataI> getMinuteDataI(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataI::getLineId, lineList)
.select(DataI::getLineId)
.select(DataI::getPhasicType)
.select(DataI::getValueType)
.select(DataI::getINeg)
.select(DataI::getIPos)
.select(DataI::getIThd)
.select(DataI::getIUnbalance)
.select(DataI::getIZero)
.select(DataI::getRms)
.select(DataI::getQualityFlag)
.between(DataI::getTime, startTime, endTime)
.eq(DataI::getQualityFlag,"0");
List<DataI> list = dataIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataI>> lineMap = list.stream().collect(Collectors.groupingBy(DataI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,17 +1,29 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataInharmVMapper;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
@@ -23,6 +35,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataInharmVImpl implements IDataInharmV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataInharmVMapper dataInharmVMapper;
@@ -43,4 +57,68 @@ public class InfluxdbDataInharmVImpl implements IDataInharmV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataInharmV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataInharmV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataInharmV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataInharmV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataInharmV::getLineId, lineList)
.select(DataInharmV::getLineId)
.select(DataInharmV::getPhasicType)
.select(DataInharmV::getValueType)
.select(DataInharmV::getQualityFlag)
.between(DataI::getTime, startTime, endTime)
.eq(DataI::getQualityFlag,"0");
List<DataInharmV> list = dataInharmVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataInharmV>> lineMap = list.stream().collect(Collectors.groupingBy(DataInharmV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,26 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataPltDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataPltMapper;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,8 +34,9 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataPltImpl implements IDataPlt {
private final DataPltMapper dataPltMapper;
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataPltMapper dataPltMapper;
@Override
public void batchInsertion(List<DataPltDTO> dataPltDTOList) {
@@ -44,4 +56,68 @@ public class InfluxdbDataPltImpl implements IDataPlt {
}
}
@Override
public List<DataPltDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataPltDto> result = new ArrayList<>();
List<DataPlt> list = getMinuteDataPlt(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataPltDto dto = new DataPltDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataPlt> getMinuteDataPlt(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataPlt> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataPlt.class);
influxQueryWrapper.regular(DataPlt::getLineId, lineList)
.select(DataPlt::getLineId)
.select(DataPlt::getPhasicType)
.select(DataPlt::getPlt)
.select(DataPlt::getQualityFlag)
.between(DataPlt::getTime, startTime, endTime)
.eq(DataPlt::getQualityFlag,"0");
List<DataPlt> list = dataPltMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataPlt>> lineMap = list.stream().collect(Collectors.groupingBy(DataPlt::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataPlt> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -29,6 +29,7 @@ import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
@@ -39,7 +40,7 @@ import java.util.stream.Collectors;
@Service("InfluxdbDataVImpl")
public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper, RStatDataVD> implements IDataV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());;
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
@Resource
private DataVMapper dataVMapper;
@@ -119,7 +120,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
@Override
public List<DataVDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataVDto> result = new ArrayList<>();
List<DataV> list = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), false);
List<DataV> list = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataVDto dto = new DataVDto();
BeanUtils.copyProperties(item,dto);
@@ -132,7 +133,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataV> dataVList = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),true);
List<DataV> dataVList = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
if (CollectionUtil.isNotEmpty(dataVList)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -141,6 +142,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
CommonMinuteDto dto = new CommonMinuteDto();
dto.setLineId(line);
dto.setTime(time);
dto.setQualityFlag(lineList.get(0).getQualityFlag());
//以相别分组
Map<String,List<DataV>> phasicTypeMap = lineList.stream().collect(Collectors.groupingBy(DataV::getPhasicType));
List<CommonMinuteDto.PhasicType> phasicTypes = new ArrayList<>();
@@ -256,8 +258,14 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
/**
* 按监测点集合、时间条件获取dataV分钟数据
* timeMap参数来判断是否进行数据出来 timeMap为空则不进行数据处理
* 剔除异常数据,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataV> getMinuteDataV(List<String> lineList, String startTime, String endTime, boolean clean) {
public List<DataV> getMinuteDataV(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataV::getLineId, lineList)
@@ -276,11 +284,38 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
.select(DataV::getVlDev)
.select(DataV::getVuDev)
.select(DataV::getQualityFlag)
.between(DataV::getTime, startTime, endTime);
if (clean) {
influxQueryWrapper.eq(DataV::getAbnormalFlag,0);
.between(DataV::getTime, startTime, endTime)
.eq(DataV::getQualityFlag,"0");
List<DataV> list = dataVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataV>> lineMap = list.stream().collect(Collectors.groupingBy(DataV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,是自身异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
return dataVMapper.selectByQueryWrapper(influxQueryWrapper);
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.relation;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.dataProcess.dao.relation.mapper.PqDataVerifyMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.service.IPqDataVerifyService;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Objects;
/**
* <p>
* 服务实现类
* </p>
*
* @author xy
* @since 2025-02-17
*/
@Service
public class PqDataVerifyServiceImpl extends MppServiceImpl<PqDataVerifyMapper, PqDataVerify> implements IPqDataVerifyService {
@Override
@Transactional(rollbackFor = Exception.class)
public void insertData(List<PqDataVerify> list) {
this.saveOrUpdateBatchByMultiId(list);
}
@Override
public List<PqDataVerify> queryData(LineCountEvaluateParam param) {
LambdaQueryWrapper<PqDataVerify> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.between(PqDataVerify::getTime,param.getStartTime(),param.getEndTime());
if (CollUtil.isNotEmpty(param.getLineId())) {
queryWrapper.in(PqDataVerify::getLineId,param.getLineId());
}
if (!Objects.isNull(param.getTableName())) {
queryWrapper.eq(PqDataVerify::getIndexTable, param.getTableName());
}
return this.list(queryWrapper);
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.relation;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.dataProcess.dao.relation.mapper.PqReasonableRangeMapper;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import com.njcn.dataProcess.service.IPqReasonableRangeService;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
* <p>
* 服务实现类
* </p>
*
* @author xy
* @since 2025-02-13
*/
@Service
public class PqReasonableRangeServiceImpl extends ServiceImpl<PqReasonableRangeMapper, PqReasonableRange> implements IPqReasonableRangeService {
@Override
public List<PqReasonableRangeDto> getReasonableRangeList(DataCleanParam param) {
List<PqReasonableRangeDto> result = new ArrayList<>();
LambdaQueryWrapper<PqReasonableRange> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(PqReasonableRange::getBelongingSystem,param.getSystemType())
.eq(PqReasonableRange::getDataSource,param.getDataSource())
.eq(PqReasonableRange::getInfluxdbTableName,param.getTableName())
.eq(PqReasonableRange::getState,1);
List<PqReasonableRange> list = this.list(queryWrapper);
if (CollUtil.isNotEmpty(list)) {
list.forEach(item->{
PqReasonableRangeDto dto = new PqReasonableRangeDto();
BeanUtils.copyProperties(item,dto);
result.add(dto);
});
}
return result;
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlickerRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataFlicker;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -53,4 +56,9 @@ public class RelationDataFlickerImpl implements IDataFlicker {
}
}
@Override
public List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlucRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataFluc;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -43,5 +46,12 @@ public class RelationDataFlucImpl implements IDataFluc {
dataFlucRelationMapper.insertBatchSomeColumn(dataFlucList);
}
};
}
@Override
public List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
;
}

View File

@@ -0,0 +1,28 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Collections;
import java.util.List;
/**
* @author xy
*/
@Service("RelationDataHarmRateVImpl")
@RequiredArgsConstructor
public class RelationDataHarmRateVImpl implements IDataHarmRateV {
@Resource
private DataIRelationMapper dataIRelationMapper;
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -2,7 +2,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmphasicVRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataHarmphasicV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -10,6 +12,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -50,5 +53,10 @@ public class RelationDataHarmphasicVImpl implements IDataHarmphasicV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmpowerPRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataHarmpowerP;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -53,4 +56,9 @@ public class RelationDataHarmpowerPImpl implements IDataHarmpowerP {
}
}
@Override
public List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -12,6 +14,7 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataIImpl implements IDataI {
dataIRelationMapper.insertBatchSomeColumn(dataIList);
}
}
@Override
public List<DataIDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataVInharmVRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataInharmV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataInharmVImpl implements IDataInharmV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataPltRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataPlt;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataPltImpl implements IDataPlt {
dataPltRelationMapper.insertBatchSomeColumn(dataPltList);
}
}
@Override
public List<DataPltDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -2,6 +2,7 @@ package com.njcn.dataProcess.service.impl.relation;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.dao.relation.mapper.DataVRelationMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataVRelationMapper;
import com.njcn.dataProcess.dto.DataVDTO;
@@ -20,6 +21,7 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
@@ -37,8 +39,11 @@ import java.util.stream.Collectors;
public class RelationDataVImpl extends MppServiceImpl<RStatDataVRelationMapper, RStatDataVD> implements IDataV {
@Resource
private DataVRelationMapper dataVRelationMapper;
@Resource
@InsertBean
private IDataV iDataV;
@Override
public Map<String, List<DataVFiveItemDTO>> getLineCountEvaluate(LineCountEvaluateParam lineParam) {
@@ -100,10 +105,11 @@ public class RelationDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
RStatDataVD dataV = new RStatDataVD();
dataV.setTime(TimeUtils.LocalDataTimeToLocalDate2(item.getTime()));
dataV.setPhasicType(item.getPhasicType());
dataV.setQualityFlag(Integer.valueOf(item.getQualityFlag()));
BeanUtils.copyProperties(item, dataV);
result.add(dataV);
});
iDataV.saveOrUpdateBatch(result);
iDataV.saveOrUpdateBatchByMultiId(result);
}
@Override