Merge remote-tracking branch 'origin/master'

This commit is contained in:
Lee
2023-03-30 16:22:31 +08:00
79 changed files with 656 additions and 312 deletions

View File

@@ -50,5 +50,10 @@ public class PollutionDTO {
*/
private Double vInharm = 0.0;
/**
* 谐波电压含有率污染值
*/
private Double vHarmonic = 0.0;
}

View File

@@ -0,0 +1,49 @@
package com.njcn.prepare.harmonic.pojo.influxdb.po;
import lombok.Data;
import org.influxdb.annotation.Column;
import org.influxdb.annotation.Measurement;
import java.time.Instant;
/**
* <p>
*
* </p>
*
* @author cdf
* @since 2022-01-07
*/
@Data
@Measurement(name = "pqs_communicate")
public class Communicate {
private static final long serialVersionUID = 1L;
/**
* 终端Id
*/
/**
* 更新时间
*/
@Column(name = "time")
private Instant updateTime;
@Column(name = "dev_id")
private String devId;
/**
* 事件类型(0中断1正常2退出)
*/
@Column(name = "type")
private Integer type;
/**
* 备注
*/
@Column(name = "description")
private String remark;
}

View File

@@ -22,6 +22,7 @@ public class AlarmPO extends BaseEntity {
private String deviceId;
private LocalDateTime updateTime;
/**
* 告警描述
@@ -69,9 +70,9 @@ public class AlarmPO extends BaseEntity {
private Float flowProportion;
/**
* 通讯终端次数
* 通讯中断次数
*/
private Integer comOutNum = 0;
private Integer comOutNum;
/**
* 中断时间段描述

View File

@@ -4,6 +4,8 @@ import com.njcn.db.bo.BaseEntity;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.List;
@Data
@EqualsAndHashCode(callSuper = true)
public class TopMsgPO extends BaseEntity {
@@ -26,7 +28,7 @@ public class TopMsgPO extends BaseEntity {
/**
* 通信中断时间段描述
*/
private String comOutDesc;
private List<String> comOutDesc;
/**
* 数据完整性标识 0.不达标 1.达标

View File

@@ -57,19 +57,19 @@ public class RStatComassesDPO implements Serializable {
@TableField(value = "vu_dev5")
private Double vuDev5;
@TableField(value = "data_pst1")
@TableField(value = "data_plt1")
private Double dataPst1;
@TableField(value = "data_pst2")
@TableField(value = "data_plt2")
private Double dataPst2;
@TableField(value = "data_pst3")
@TableField(value = "data_plt3")
private Double dataPst3;
@TableField(value = "data_pst4")
@TableField(value = "data_plt4")
private Double dataPst4;
@TableField(value = "data_pst5")
@TableField(value = "data_plt5")
private Double dataPst5;
@TableField(value = "v_unbalance1")

View File

@@ -45,6 +45,7 @@ public class DayDataController extends BaseController {
private final DayDataService dayDataService;
@Deprecated
@ApiOperation("day表定时任务")
@ApiImplicitParam(value = "jobParam",name = "jobParam",required = true)
@PostMapping("dayDataHanlder")

View File

@@ -37,6 +37,7 @@ public class HarmonicGeneralController extends BaseController {
private final HarmonicGeneralService harmonicGeneralService;
@Deprecated
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/generalData")
@ApiOperation("稳态综合评估日数据计算")

View File

@@ -38,6 +38,7 @@ public class HarmonicMetricController extends BaseController {
private final HarmonicMetricService harmonicMetricService;
@Deprecated
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/metricData")
@ApiOperation("稳态指标评估日数据计算")

View File

@@ -38,6 +38,7 @@ public class IntegrityController extends BaseController {
private final IntegrityService integrityService;
@Deprecated
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/computeDataIntegrity")
@ApiOperation("数据完整性统计")

View File

@@ -38,6 +38,7 @@ public class LimitTargetController extends BaseController {
private final LimitTargetService limitTargetService;
@Deprecated
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getLimitTargetData")
@ApiOperation("生成limit_target表")

View File

@@ -34,7 +34,7 @@ import java.util.List;
* @version V1.0.0
*/
@Slf4j
@Api(tags = "监测点数据超限处理")
@Api(tags = "监测点数据超限处理limitRate")
@RestController
@RequestMapping("limitrate")
@RequiredArgsConstructor
@@ -45,7 +45,7 @@ public class LimitrateController extends BaseController {
private final LineFeignClient lineFeignClient;
@Deprecated
@ApiOperation("limitrate表定时任务")
@ApiImplicitParam(value = "limitRateHanlderParam",name = "limitRateHanlderParam",required = true)
@PostMapping("LimitRateHanlder")

View File

@@ -40,6 +40,7 @@ public class OnlineRateController extends BaseController {
private final OnlineRateService onlineRateService;
@Deprecated
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getOnlineRateData")
@ApiOperation("生成在线率数据")

View File

@@ -73,34 +73,35 @@
<select id="getAlarmByDevice" resultType="com.njcn.prepare.harmonic.pojo.mysql.po.line.AlarmPO">
SELECT t.*,
(t.statisValue)/t.flowMeal flowProportion
FROM
(
SELECT device.Id AS Id,
device.id AS deviceId,
COUNT(ca.Id) AS alarmCount,
GROUP_CONCAT(ca.Remark) AS alarmDesc,
IFNULL(d.flow, (select flow from cld_flow_meal where type = 0 and flag = 1)) + ifnull(d1.flow, 0) flowMeal,
IFNULL(pmf.Actual_Value,0) statisValue
FROM pq_line line
INNER JOIN pq_line vol ON line.pid=vol.id
INNER JOIN pq_line device ON vol.pid=device.id
LEFT JOIN pq_device pd ON device.id=pd.id
LEFT JOIN pq_line_detail pld ON pld.id=line.id
LEFT JOIN cld_alarm ca ON ca.Line_Id=device.id
LEFT JOIN pqs_month_flow pmf ON pmf.Dev_Id=device.id
LEFT JOIN cld_dev_meal c ON device.id = c.line_id
LEFT JOIN cld_flow_meal d ON c.Base_Meal_Id = d.id
LEFT JOIN cld_flow_meal d1 ON c.Ream_Meal_Id = d1.id
WHERE
pd.Dev_Model = 1
AND
pd.Run_Flag = 0
AND
pld.Line_Grade is NOT NULL
AND
ca.Occurred_Time between #{startTime} and #{endTime}
GROUP BY deviceId
) t
FROM
(
SELECT device.Id AS Id,
device.id AS deviceId,
COUNT(ca.Id) AS alarmCount,
GROUP_CONCAT(ca.Remark) AS alarmDesc,
IFNULL(d.flow, (select flow from cld_flow_meal where type = 0 and flag = 1)) + ifnull(d1.flow, 0) flowMeal,
IFNULL(ANY_VALUE(pmf.Actual_Value),0) statisValue,
ANY_VALUE(device.Update_Time) AS updateTime
FROM pq_line line
INNER JOIN pq_line vol ON line.pid=vol.id
INNER JOIN pq_line device ON vol.pid=device.id
LEFT JOIN pq_device pd ON device.id=pd.id
LEFT JOIN pq_line_detail pld ON pld.id=line.id
LEFT JOIN cld_alarm ca ON ca.Line_Id=device.id
LEFT JOIN pqs_month_flow pmf ON pmf.Dev_Id=device.id
LEFT JOIN cld_dev_meal c ON device.id = c.line_id
LEFT JOIN cld_flow_meal d ON c.Base_Meal_Id = d.id
LEFT JOIN cld_flow_meal d1 ON c.Ream_Meal_Id = d1.id
WHERE
pd.Dev_Model = 1
AND
pd.Run_Flag = 0
AND
pld.Line_Grade is NOT NULL
AND
ca.Occurred_Time between #{startTime} and #{endTime}
GROUP BY deviceId
) t
ORDER BY flowProportion DESC
</select>

View File

@@ -2,23 +2,20 @@ package com.njcn.prepare.harmonic.service.mysql.Impl.device;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import com.njcn.device.pq.pojo.po.Communicate;
import com.njcn.influxdb.param.InfluxDBPublicParam;
import com.njcn.influxdb.utils.InfluxDbUtils;
import com.njcn.prepare.harmonic.mapper.mysql.device.DeviceAbnormaStatisticsMapper;
import com.njcn.prepare.harmonic.pojo.influxdb.po.Communicate;
import com.njcn.prepare.harmonic.pojo.mysql.po.line.AlarmPO;
import com.njcn.prepare.harmonic.pojo.mysql.po.line.AlarmStrategyVO;
import com.njcn.prepare.harmonic.pojo.mysql.po.line.LinePO;
import com.njcn.prepare.harmonic.pojo.mysql.po.line.TopMsgPO;
import com.njcn.prepare.harmonic.pojo.param.DeviceAbnormaStatisticsParam;
import com.njcn.prepare.harmonic.service.mysql.device.DeviceAbnormalStatisticsService;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.influxdb.dto.QueryResult;
import org.influxdb.impl.InfluxDBResultMapper;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import java.math.BigDecimal;
import java.util.*;
@@ -37,33 +34,31 @@ public class DeviceAbnormaStatisticsServiceImpl implements DeviceAbnormalStatist
private final InfluxDbUtils influxDbUtils;
@Override
public boolean dailyDeviceAbnormaStatistics(DeviceAbnormaStatisticsParam param) {
Map<String, List<TopMsgPO>> comMap = new HashMap<>();Map<String, List<AlarmPO>> devLineMap = new HashMap<>();
// 获取监测点告警
List<AlarmPO> lineAlarms = deviceAbnormaStatisticsMapper.getAlarmByLines(
DateUtil.beginOfDay(DateUtil.parse(param.getBeginTime())),
DateUtil.beginOfDay(DateUtil.parse(param.getEndTime())));
Map<String, List<AlarmPO>> lineMap = new HashMap<>();
DateUtil.endOfDay(DateUtil.parse(param.getEndTime())));
if (CollectionUtil.isNotEmpty(lineAlarms)){
// 根据装置分组
lineMap = lineAlarms.stream().collect(Collectors.groupingBy(AlarmPO::getDeviceId));
devLineMap = lineAlarms.stream().collect(Collectors.groupingBy(AlarmPO::getDeviceId));
}
// 获取设备流量
List<AlarmPO> deviceAlarms = deviceAbnormaStatisticsMapper.getAlarmByDevice(
DateUtil.beginOfDay(DateUtil.parse(param.getBeginTime())),
DateUtil.beginOfDay(DateUtil.parse(param.getEndTime())));
DateUtil.endOfDay(DateUtil.parse(param.getEndTime())));
List<String> devs = deviceAlarms.stream().map(AlarmPO::getId).collect(Collectors.toList());
devs.add("6469e77fda42db12c7ca6620a092f03c");
Map<String, List<Communicate>> comMap = new HashMap<>();
// 获取通信中断信息
List<Communicate> communicate = getCommunicate(devs, param.getBeginTime(), param.getEndTime());
if (CollectionUtil.isNotEmpty(communicate)){
comMap = communicate.stream().collect(Collectors.groupingBy(Communicate::getId));
List<TopMsgPO> comMsgs = getCommunicate(devs, param.getBeginTime(), param.getEndTime());
if (CollectionUtil.isNotEmpty(comMsgs)){
comMap = comMsgs.stream().collect(Collectors.groupingBy(TopMsgPO::getDevId));
}
// 整合监测点告警信息、数据完整性以及监测点等级为设备信息
for (AlarmPO deviceAlarm : deviceAlarms) {
if (!lineMap.containsKey(deviceAlarm.getId())) {
if (!devLineMap.containsKey(deviceAlarm.getId())) {
continue;
}
AlarmPO lineAlarm = lineMap.get(deviceAlarm.getId()).get(0);
AlarmPO lineAlarm = devLineMap.get(deviceAlarm.getId()).get(0);
deviceAlarm.setAlarmCount(deviceAlarm.getAlarmCount() + lineAlarm.getAlarmCount());
if (lineAlarm.getDue().compareTo(BigDecimal.ZERO) == 0) {
deviceAlarm.setIntegrity(BigDecimal.ZERO);
@@ -73,15 +68,14 @@ public class DeviceAbnormaStatisticsServiceImpl implements DeviceAbnormalStatist
if (lineAlarm.getLevel() != null) {
deviceAlarm.setLevel(lineAlarm.getLevel());
}
if (CollectionUtil.isNotEmpty(comMap)) {
List<Communicate> communicates = comMap.get(deviceAlarm.getId());
int comOut = communicates.stream().filter(t -> t.getType() == 0).collect(Collectors.toList()).size();
deviceAlarm.setComOutNum(comOut);
}
TopMsgPO topMsgPO = comMap.get(deviceAlarm.getId()).get(0);
deviceAlarm.setComOutNum(topMsgPO.getComOutCount());
deviceAlarm.setComOutDesc(String.join(",",topMsgPO.getComOutDesc()));
}
Map<Integer, List<AlarmPO>> levelMap = deviceAlarms.stream().collect(Collectors.groupingBy(AlarmPO::getLevel));
// 比对告警策略并落表
List<AlarmStrategyVO> alarmStrategyVOS = deviceAbnormaStatisticsMapper.selectAlarmStrategy();
List<TopMsgPO> alarmExceptions = new ArrayList<>();
Map<Integer, List<AlarmPO>> levelMap = deviceAlarms.stream().collect(Collectors.groupingBy(AlarmPO::getLevel));
for (AlarmStrategyVO strategyVO : alarmStrategyVOS) {
if (!levelMap.containsKey(strategyVO.getAlgoDesc())){
continue;
@@ -91,7 +85,7 @@ public class DeviceAbnormaStatisticsServiceImpl implements DeviceAbnormalStatist
TopMsgPO topMsg = new TopMsgPO();
topMsg.setDevId(alarmPO.getId());
topMsg.setFlowFlag(1);
topMsg.setComOutCount(alarmPO.getComOutNum() == 0 ? 0 : alarmPO.getComOutNum());
topMsg.setComOutCount(alarmPO.getComOutNum() == null ? 0 : alarmPO.getComOutNum());
if (alarmPO.getIntegrity().intValue()<strategyVO.getIntegrityValue()) {
topMsg.setIntegrityFlag("0");
topMsg.setIntegrityValue(alarmPO.getIntegrity().intValue());
@@ -111,7 +105,7 @@ public class DeviceAbnormaStatisticsServiceImpl implements DeviceAbnormalStatist
}
public List<Communicate> getCommunicate(List<String> devs, String startTime, String endTime) {
public List<TopMsgPO> getCommunicate(List<String> devs, String startTime, String endTime) {
//组装sql语句
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("time >= '").append(DateUtil.beginOfDay(DateUtil.parse(startTime))).append("' and ").append("time <= '").append(DateUtil.endOfDay(DateUtil.parse(endTime))).append("' and ");
@@ -126,7 +120,55 @@ public class DeviceAbnormaStatisticsServiceImpl implements DeviceAbnormalStatist
//获取暂降事件
QueryResult result = influxDbUtils.query(sql);
InfluxDBResultMapper influxDBResultMapper = new InfluxDBResultMapper();
List<Communicate> communicateList = influxDBResultMapper.toPOJO(result, Communicate.class);
return communicateList;
List<Communicate> communicates = influxDBResultMapper.toPOJO(result, Communicate.class);
List<TopMsgPO> comMsgs = new ArrayList<>();
if (CollectionUtil.isEmpty(communicates)) {
return comMsgs;
}
Map<String, List<Communicate>> comMap = communicates.stream().collect(Collectors.groupingBy(Communicate::getDevId));
for (Map.Entry<String, List<Communicate>> comEntry : comMap.entrySet()) {
TopMsgPO topMsgPO = new TopMsgPO(); List<String> comOutDesc = new ArrayList<>();
topMsgPO.setDevId(comEntry.getKey());
// 根据日期排序
List<Communicate> sortedList = comEntry.getValue().stream().sorted(Comparator.comparing(Communicate::getUpdateTime)).collect(Collectors.toList());
if (sortedList.size() == 1) {
if (sortedList.get(0).getType() == 0) {
comOutDesc.add(sortedList.get(0).getUpdateTime()+""+DateUtil.endOfDay(DateUtil.parse(endTime)));
} else {
comOutDesc.add((DateUtil.beginOfDay(DateUtil.parse(startTime)))+""+sortedList.get(0).getUpdateTime());
}
} else {
switch (sortedList.get(0).getType()) {
case 0:
for (int i = 0; i <sortedList.size() ; i++) {
if (sortedList.get(i).getType()==1) {
String comOutTime = sortedList.get(i-1).getUpdateTime() +""+ sortedList.get(i).getUpdateTime();
comOutDesc.add(comOutTime);
}
}
break;
case 1:
comOutDesc.add((DateUtil.beginOfDay(DateUtil.parse(startTime)))+""+sortedList.get(0).getUpdateTime());
for (int i = 0; i <sortedList.size();i++) {
if (sortedList.get(i).getType()==0) {
StringBuilder sb = new StringBuilder();
sb.append(sortedList.get(i).getUpdateTime());
sb.append("");
sb.append(i==sortedList.size()-1?DateUtil.endOfDay(DateUtil.parse(endTime)):sortedList.get(i+1).getUpdateTime());
comOutDesc.add(sb.toString());
}
}
break;
default:
break;
}
}
topMsgPO.setComOutDesc(comOutDesc);
// 通信中断次数
topMsgPO.setComOutCount(sortedList.stream().filter(a -> a.getType() == 0).collect(Collectors.toList()).size());
comMsgs.add(topMsgPO);
}
return comMsgs;
}
}

View File

@@ -30,6 +30,7 @@ import org.springframework.util.CollectionUtils;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.Instant;
import java.time.LocalDateTime;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@@ -532,6 +533,7 @@ public class DayDataServiceImpl implements DayDataService {
}
log.info(LocalDateTime.now()+"dayDataJobHandler执行完成!");
}
@Override

View File

@@ -152,6 +152,12 @@ public class HarmonicGeneralServiceImpl extends MppServiceImpl<RStatComassesDMap
outMap.put("vu_dev3",vuDev3.multiply(hundred).divide(vuDevAll,3, RoundingMode.HALF_UP));
outMap.put("vu_dev4",vuDev4.multiply(hundred).divide(vuDevAll,3, RoundingMode.HALF_UP));
outMap.put("vu_dev5",vuDev5.multiply(hundred).divide(vuDevAll,3, RoundingMode.HALF_UP));
}else {
outMap.put("vu_dev1",0.0);
outMap.put("vu_dev2",0.0);
outMap.put("vu_dev3",0.0);
outMap.put("vu_dev4",0.0);
outMap.put("vu_dev5",0.0);
}
String freqDevSql = "select count(freq_dev) from data_v where phasic_type != 'T' and quality_flag = '0' and value_type = 'AVG' and ";
@@ -172,6 +178,12 @@ public class HarmonicGeneralServiceImpl extends MppServiceImpl<RStatComassesDMap
outMap.put("freq_dev3",freqDev3.multiply(hundred).divide(freqDevAll,3, RoundingMode.HALF_UP));
outMap.put("freq_dev4",freqDev4.multiply(hundred).divide(freqDevAll,3, RoundingMode.HALF_UP));
outMap.put("freq_dev5",freqDev5.multiply(hundred).divide(freqDevAll,3, RoundingMode.HALF_UP));
}else {
outMap.put("freq_dev1",0.0);
outMap.put("freq_dev2",0.0);
outMap.put("freq_dev3",0.0);
outMap.put("freq_dev4",0.0);
outMap.put("freq_dev5",0.0);
}
String vThdSql = "select count(v_thd) from data_v where phasic_type != 'T' and quality_flag = '0' and value_type = 'CP95' and ";
@@ -192,6 +204,12 @@ public class HarmonicGeneralServiceImpl extends MppServiceImpl<RStatComassesDMap
outMap.put("v_thd3",vThd3.multiply(hundred).divide(vThdAll,3, RoundingMode.HALF_UP));
outMap.put("v_thd4",vThd4.multiply(hundred).divide(vThdAll,3, RoundingMode.HALF_UP));
outMap.put("v_thd5",vThd5.multiply(hundred).divide(vThdAll,3, RoundingMode.HALF_UP));
}else {
outMap.put("v_thd1",0.0);
outMap.put("v_thd2",0.0);
outMap.put("v_thd3",0.0);
outMap.put("v_thd4",0.0);
outMap.put("v_thd5",0.0);
}
String vUnbalanceSql="select count(v_unbalance) from data_v where phasic_type != 'T' and quality_flag = '0' and value_type = 'CP95' and ";
@@ -212,6 +230,12 @@ public class HarmonicGeneralServiceImpl extends MppServiceImpl<RStatComassesDMap
outMap.put("v_unbalance3",vUnbalance3.multiply(hundred).divide(vUnbalanceAll,3, RoundingMode.HALF_UP));
outMap.put("v_unbalance4",vUnbalance4.multiply(hundred).divide(vUnbalanceAll,3, RoundingMode.HALF_UP));
outMap.put("v_unbalance5",vUnbalance5.multiply(hundred).divide(vUnbalanceAll,3, RoundingMode.HALF_UP));
}else {
outMap.put("v_unbalance1",0.0);
outMap.put("v_unbalance2",0.0);
outMap.put("v_unbalance3",0.0);
outMap.put("v_unbalance4",0.0);
outMap.put("v_unbalance5",0.0);
}
String pstSql="select count(pst) from data_flicker where phasic_type != 'T' and quality_flag = '0' and ";
@@ -227,11 +251,17 @@ public class HarmonicGeneralServiceImpl extends MppServiceImpl<RStatComassesDMap
BigDecimal pst5 = BigDecimal.valueOf(mapper.toPOJO(pstResult5, DataFlickerPO.class).size());
BigDecimal pstAll = pst1.add(pst2).add(pst3).add(pst4).add(pst5);
if (pstAll.compareTo(BigDecimal.ZERO)!=0){
outMap.put("data_pst1",pst1.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_pst2",pst2.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_pst3",pst3.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_pst4",pst4.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_pst5",pst5.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_plt1",pst1.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_plt2",pst2.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_plt3",pst3.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_plt4",pst4.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
outMap.put("data_plt5",pst5.multiply(hundred).divide(pstAll,3, RoundingMode.HALF_UP));
}else {
outMap.put("data_plt1",0.0);
outMap.put("data_plt2",0.0);
outMap.put("data_plt3",0.0);
outMap.put("data_plt4",0.0);
outMap.put("data_plt5",0.0);
}
if (!CollUtil.isEmpty(outMap)){

View File

@@ -117,16 +117,12 @@ public class PollutionServiceImpl implements PollutionService {
lineIdList.add(pollutionDTO);
});
pollutionList = processPollutionList(lineIdList,harmonicVoltageList,harmonicCurrentList,frequencyDeviationList,voltageDeviationList,threePhaseVoltageList,negativeSequenceList,interHarmonicVoltageList,voltageFlickerList);
//原逻辑修改influxDb不再进行存储日数据改为Mysql直存
/*insertPolluction(pollutionList,dateOut.getTime());*/
//MySql入表 r_mp_pollution_d
insertPollutionDayMySql(pollutionList, dictData, local);
LogUtil.njcnDebug(log, "监测点污染指标数据MySql插入耗时{}", timer.intervalRestart());
}
//原逻辑修改influxDb不再进行存储日数据改为Mysql直存
/*if (Integer.valueOf(BizParamConstant.STAT_BIZ_DAY).equals(lineParam.getType())){
insertLinePollution(pollutionList,local);
}*/
Date dateOut = DateUtil.parse(lineParam.getDataDate());
LocalDateTime localEnd = LocalDateTimeUtil.now();
LambdaQueryWrapper<RMpPollutionDPO> lambdaQuery = new LambdaQueryWrapper<>();
@@ -279,6 +275,10 @@ public class PollutionServiceImpl implements PollutionService {
map.put("pollutionType",dictData.getId());
map.put("value",pollution.getVInharm());
}
if ("V_Harmonic".equals(dictData.getCode())){
map.put("pollutionType",dictData.getId());
map.put("value",pollution.getVHarmonic());
}
}
private void processData(DictData dictData, List<RMpPollutionDPO> processList, Map<String, Object> map){