新增数据质量清洗算法

This commit is contained in:
xy
2025-05-22 16:18:24 +08:00
parent 228b694322
commit 412a67f6fd
77 changed files with 1217 additions and 385 deletions

View File

@@ -1,11 +1,17 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataHarmRateIFeignClientFallbackFactory;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -24,4 +30,10 @@ public interface DataHarmRateIFeignClient {
@PostMapping("/addList")
HttpResult<String> addList(@RequestBody List<DataHarmRateIDto> list);
@PostMapping("/getRawData")
HttpResult<List<DataHarmRateIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
@PostMapping("/addInfluxDbList")
HttpResult<String> addInfluxDbList(@RequestBody List<DataHarmRateIDto> dataIList);
}

View File

@@ -1,12 +1,19 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataHarmRateVFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmrateVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateVDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -33,4 +40,7 @@ public interface DataHarmRateVFeignClient {
@PostMapping("/getHarmRateVData")
HttpResult<List<DataHarmDto>> getHarmRateVData(@RequestBody LineCountEvaluateParam lineParam);
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataHarmrateVDTO> dataHarmrateVDTOList);
}

View File

@@ -1,14 +1,20 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataHarmphasicIFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicIDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -32,4 +38,7 @@ public interface DataHarmphasicIFeignClient {
@PostMapping("/addList")
HttpResult<String> addList(@RequestBody List<DataHarmPhasicIDto> data);
@PostMapping("/getRawData")
HttpResult<List<DataHarmPhasicIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -1,12 +1,18 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataHarmpowerQFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerQDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -32,4 +38,7 @@ public interface DataHarmpowerQFeignClient {
@PostMapping("/addList")
HttpResult<String> addList(@RequestBody List<DataHarmPowerQDto> data);
@PostMapping("/getRawData")
HttpResult<List<DataHarmPowerQDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -1,12 +1,18 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataIFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerSDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -32,4 +38,7 @@ public interface DataHarmpowerSFeignClient {
@PostMapping("/addList")
HttpResult<String> addList(@RequestBody List<DataHarmPowerSDto> data);
@PostMapping("/getRawData")
HttpResult<List<DataHarmPowerSDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -1,12 +1,18 @@
package com.njcn.dataProcess.api;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.api.fallback.DataIFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import io.swagger.annotations.ApiOperation;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
@@ -35,4 +41,7 @@ public interface DataIFeignClient {
@PostMapping("/getDataI")
HttpResult<List<DataIDto>> getDataI(@RequestBody LineCountEvaluateParam lineParam);
@PostMapping("/addInfluxDbList")
HttpResult<String> addInfluxDbList(@RequestBody List<DataIDto> dataIList);
}

View File

@@ -2,12 +2,10 @@ package com.njcn.dataProcess.api;
import com.njcn.common.pojo.constant.ServerInfo;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.fallback.DataInharmIFeignClientFallbackFactory;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateVDto;
import com.njcn.dataProcess.pojo.dto.DataInHarmIDto;
import org.springframework.cloud.openfeign.FeignClient;
import org.springframework.web.bind.annotation.PostMapping;
@@ -32,4 +30,7 @@ public interface DataInharmIFeignClient {
@PostMapping("/addList")
HttpResult<String> addList(@RequestBody List<DataInHarmIDto> list);
@PostMapping("/getRawData")
HttpResult<List<DataInHarmIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam);
}

View File

@@ -24,8 +24,6 @@ import java.util.List;
@FeignClient(value = ServerInfo.PLATFORM_DATA_PROCESSING_BOOT, path = "/dataInharmV", fallbackFactory = DataInharmVFeignClientFallbackFactory.class, contextId = "dataInharmV")
public interface DataInharmVFeignClient {
@PostMapping("/batchInsertion")
HttpResult<String> batchInsertion(@RequestBody List<DataInharmVDTO> dataInharmVDTOList);

View File

@@ -50,6 +50,18 @@ public class DataHarmRateIFeignClientFallbackFactory implements FallbackFactory<
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmRateIDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<String> addInfluxDbList(List<DataHarmRateIDto> dataIList) {
log.error("{}异常,降级处理,异常为:{}","influxdb数据插入",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -4,6 +4,7 @@ import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.exception.BusinessException;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.dataProcess.api.DataHarmRateVFeignClient;
import com.njcn.dataProcess.dto.DataHarmrateVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
@@ -67,6 +68,12 @@ public class DataHarmRateVFeignClientFallbackFactory implements FallbackFactory<
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<String> batchInsertion(List<DataHarmrateVDTO> dataHarmrateVDTOList) {
log.error("{}异常,降级处理,异常为:{}","谐波含有率数据批量插入",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -59,6 +59,12 @@ public class DataHarmphasicIFeignClientFallbackFactory implements FallbackFactor
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmPhasicIDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -58,6 +58,12 @@ public class DataHarmpowerQFeignClientFallbackFactory implements FallbackFactory
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmPowerQDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -59,6 +59,12 @@ public class DataHarmpowerSFeignClientFallbackFactory implements FallbackFactory
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataHarmPowerSDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","harmPowerS获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -70,6 +70,12 @@ public class DataIFeignClientFallbackFactory implements FallbackFactory<DataIFei
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<String> addInfluxDbList(List<DataIDto> dataIList) {
log.error("{}异常,降级处理,异常为:{}","dataI时序数据库插入数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -59,6 +59,12 @@ public class DataInharmIFeignClientFallbackFactory implements FallbackFactory<Da
throw new BusinessException(finalExceptionEnum);
}
@Override
public HttpResult<List<DataInHarmIDto>> getRawData(LineCountEvaluateParam lineParam) {
log.error("{}异常,降级处理,异常为:{}","获取原始数据",cause.toString());
throw new BusinessException(finalExceptionEnum);
}
};
}
}

View File

@@ -35,6 +35,7 @@ public class DataFlickerDTO implements Serializable{
private Double pst;
private Double plt;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -34,6 +34,6 @@ public class DataFlucDTO implements Serializable{
private Double fluc;
private Double fluccf;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -232,6 +232,6 @@ public class DataHarmphasicIDTO implements Serializable{
private Double i49Cp95;
private Double i50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -232,6 +232,7 @@ public class DataHarmphasicVDTO implements Serializable{
private Double v49Cp95;
private Double v50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -243,6 +243,6 @@ public class DataHarmpowerPDTO implements Serializable {
private Double p49Cp95;
private Double p50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -236,6 +236,6 @@ public class DataHarmpowerQDTO implements Serializable {
private Double q49Cp95;
private Double q50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -237,6 +237,6 @@ public class DataHarmpowerSDTO implements Serializable {
private Double s49Cp95;
private Double s50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -232,6 +232,6 @@ public class DataHarmrateVDTO implements Serializable{
private Double v49Cp95;
private Double v50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -232,6 +232,6 @@ public class DataInharmVDTO implements Serializable{
private Double v49Cp95;
private Double v50Cp95;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -32,6 +32,6 @@ public class DataPltDTO implements Serializable {
private String phasicType;
private Double plt;
private Integer qualityflag;
private Integer abnormalFlag;
}

View File

@@ -57,5 +57,10 @@ public class LineCountEvaluateParam extends BaseParam implements Serializable {
*/
private String lt;
/**
* 数据类型 判断获取数据是否排除暂态异常数据
*/
private Boolean dataType = true;
}

View File

@@ -45,6 +45,9 @@ public class DataFlicker {
@Column(name = "quality_flag",tag = true)
private String qualityFlag ="0";
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
public static DataFlicker relationToInfluxDB(DataFlickerDTO dataFlicker) {
if (dataFlicker == null) {
@@ -61,7 +64,7 @@ public class DataFlicker {
influxDBDataFlicker.setPlt(Objects.isNull(dataFlicker.getPlt())?0.00:dataFlicker.getPlt());
influxDBDataFlicker.setPst(Objects.isNull(dataFlicker.getPst())?0.00:dataFlicker.getPst());
influxDBDataFlicker.setQualityFlag(dataFlicker.getQualityflag()+"");
influxDBDataFlicker.setAbnormalFlag(dataFlicker.getAbnormalFlag());
return influxDBDataFlicker;
}

View File

@@ -39,10 +39,13 @@ public class DataFluc {
@Column(name = "fluccf")
private Double fluccf=0.00;
@Column(name = "quality_flag",tag = true)
private String qualityFlag="0";
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
public static DataFluc relationToInfluxDB(DataFlucDTO dataFluc) {
if (dataFluc == null) {
return null;
@@ -57,7 +60,7 @@ public class DataFluc {
influxDBDataFluc.setFluc(Objects.isNull(dataFluc.getFluc())?0.00:dataFluc.getFluc());
influxDBDataFluc.setFluccf(Objects.isNull(dataFluc.getFluccf())?0.00:dataFluc.getFluccf());
influxDBDataFluc.setQualityFlag(dataFluc.getQualityflag()+"");
influxDBDataFluc.setAbnormalFlag(dataFluc.getAbnormalFlag());
return influxDBDataFluc;
}

View File

@@ -43,6 +43,10 @@ public class DataHarmphasicI {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "i_1")
private Double i1;
@@ -209,6 +213,7 @@ public class DataHarmphasicI {
influxDBDataHarmPhasicI.setPhasicType(dataHarmphasicI.getPhasicType());
influxDBDataHarmPhasicI.setQualityFlag(dataHarmphasicI.getQualityflag()+"");
influxDBDataHarmPhasicI.setValueType(valueType);
influxDBDataHarmPhasicI.setAbnormalFlag(dataHarmphasicI.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataHarmPhasicI.setI1(Objects.isNull(dataHarmphasicI.getI1())?0.00:dataHarmphasicI.getI1());
influxDBDataHarmPhasicI.setI2(Objects.isNull(dataHarmphasicI.getI2())?0.00:dataHarmphasicI.getI2());

View File

@@ -44,6 +44,10 @@ public class DataHarmphasicV {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "v_1")
private Double v1;
@@ -209,6 +213,7 @@ public class DataHarmphasicV {
InfluxDBDataHarmphasicV.setPhasicType(dataHarmphasicV.getPhasicType());
InfluxDBDataHarmphasicV.setQualityFlag(dataHarmphasicV.getQualityflag()+"");
InfluxDBDataHarmphasicV.setValueType(valueType);
InfluxDBDataHarmphasicV.setAbnormalFlag(dataHarmphasicV.getAbnormalFlag());
if (valueType.equals("AVG")) {
InfluxDBDataHarmphasicV.setV1(Objects.isNull(dataHarmphasicV.getV1())?0.00:dataHarmphasicV.getV1());
InfluxDBDataHarmphasicV.setV2(Objects.isNull(dataHarmphasicV.getV2())?0.00:dataHarmphasicV.getV2());

View File

@@ -44,6 +44,10 @@ public class DataHarmpowerP {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "df")
private Double df;
@@ -218,6 +222,7 @@ public class DataHarmpowerP {
influxDBDataHarmpowerP.setPhasicType(dataHarmpowerP.getPhasicType());
influxDBDataHarmpowerP.setQualityFlag(dataHarmpowerP.getQualityflag()+"");
influxDBDataHarmpowerP.setValueType(valueType);
influxDBDataHarmpowerP.setAbnormalFlag(dataHarmpowerP.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataHarmpowerP.setDf(Objects.isNull(dataHarmpowerP.getDf())?0.00:dataHarmpowerP.getDf());
influxDBDataHarmpowerP.setPf(Objects.isNull(dataHarmpowerP.getPf())?0.00:dataHarmpowerP.getPf());

View File

@@ -44,6 +44,10 @@ public class DataHarmpowerQ {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "q")
private Double q;
@@ -212,6 +216,7 @@ public class DataHarmpowerQ {
influxDBDataHarmpowerQ.setPhasicType(dataHarmpowerQ.getPhasicType());
influxDBDataHarmpowerQ.setQualityFlag(dataHarmpowerQ.getQualityflag()+"");
influxDBDataHarmpowerQ.setValueType(valueType);
influxDBDataHarmpowerQ.setAbnormalFlag(dataHarmpowerQ.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataHarmpowerQ.setQ(Objects.isNull(dataHarmpowerQ.getQ())?0.00:dataHarmpowerQ.getQ());

View File

@@ -44,6 +44,10 @@ public class DataHarmpowerS {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "s")
private Double s;
@@ -212,6 +216,7 @@ public class DataHarmpowerS {
influxDBDataHarmpowerS.setPhasicType(dataHarmpowerS.getPhasicType());
influxDBDataHarmpowerS.setQualityFlag(dataHarmpowerS.getQualityflag()+"");
influxDBDataHarmpowerS.setValueType(valueType);
influxDBDataHarmpowerS.setAbnormalFlag(dataHarmpowerS.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataHarmpowerS.setS(Objects.isNull(dataHarmpowerS.getS())?0.00:dataHarmpowerS.getS());

View File

@@ -43,6 +43,10 @@ public class DataHarmrateI {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "i_1")
private Double i1;

View File

@@ -43,6 +43,10 @@ public class DataHarmrateV {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "v_1")
private Double v1;
@@ -208,6 +212,7 @@ public class DataHarmrateV {
influxDBDataHarmRateV.setPhasicType(dataHarmrateV.getPhasicType());
influxDBDataHarmRateV.setQualityFlag(dataHarmrateV.getQualityflag()+"");
influxDBDataHarmRateV.setValueType(valueType);
influxDBDataHarmRateV.setAbnormalFlag(dataHarmrateV.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataHarmRateV.setV1(Objects.isNull(dataHarmrateV.getV1())?0.00:dataHarmrateV.getV1());
influxDBDataHarmRateV.setV2(Objects.isNull(dataHarmrateV.getV2())?0.00:dataHarmrateV.getV2());

View File

@@ -44,6 +44,10 @@ public class DataI {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "i_neg")
private Double iNeg;

View File

@@ -44,6 +44,10 @@ public class DataInharmI {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "i_1")
private Double i1;

View File

@@ -44,6 +44,10 @@ public class DataInharmV {
@Column(name = "value_type",tag = true)
private String valueType;
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "v_1")
private Double v1;
@@ -209,6 +213,7 @@ public class DataInharmV {
influxDBDataInHarmV.setPhasicType(dataInharmV.getPhasicType());
influxDBDataInHarmV.setQualityFlag(dataInharmV.getQualityflag()+"");
influxDBDataInHarmV.setValueType(valueType);
influxDBDataInHarmV.setAbnormalFlag(dataInharmV.getAbnormalFlag());
if (valueType.equals("AVG")) {
influxDBDataInHarmV.setV1(Objects.isNull(dataInharmV.getV1())?0.00:dataInharmV.getV1());
influxDBDataInHarmV.setV2(Objects.isNull(dataInharmV.getV2())?0.00:dataInharmV.getV2());

View File

@@ -37,6 +37,10 @@ public class DataPlt {
@Column(name = "quality_flag",tag = true)
private String qualityFlag="0";
//是否是异常指标数据0否1是
@Column(name = "abnormal_flag")
private Integer abnormalFlag;
@Column(name = "plt")
private Double plt;
@@ -53,7 +57,7 @@ public class DataPlt {
influxDBDataPlt.setPhasicType(dataPlt.getPhasicType());
influxDBDataPlt.setPlt(Objects.isNull(dataPlt.getPlt())?0.00:dataPlt.getPlt());
influxDBDataPlt.setQualityFlag(dataPlt.getQualityflag()+"");
influxDBDataPlt.setAbnormalFlag(dataPlt.getAbnormalFlag());
return influxDBDataPlt;
}

View File

@@ -12,6 +12,7 @@ import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerSDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataHarmRateI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -45,6 +46,15 @@ public class DataHarmRateIController extends BaseController {
@InsertBean
private IDataHarmRateI dataHarmRateIInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmRateIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmRateIDto> data = dataHarmRateIQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getBaseData")
@ApiOperation("获取算法基础数据")
@@ -63,4 +73,13 @@ public class DataHarmRateIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/addInfluxDbList")
@ApiOperation("时序数据库插入数据")
public HttpResult<String> addInfluxDbList(@RequestBody List<DataHarmRateIDto> dataIList) {
String methodDescribe = getMethodDescribe("addInfluxDbList");
dataHarmRateIQuery.addInfluxDbList(dataIList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
}

View File

@@ -11,6 +11,7 @@ import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicIDto;
import com.njcn.dataProcess.service.IDataHarmphasicI;
import com.njcn.web.controller.BaseController;
@@ -56,6 +57,15 @@ public class DataHarmphasicIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmPhasicIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmPhasicIDto> data = dataHarmphasicIQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getBaseData")
@ApiOperation("获取算法基础数据")

View File

@@ -6,12 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerPDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerQDto;
import com.njcn.dataProcess.service.IDataHarmpowerQ;
import com.njcn.web.controller.BaseController;
@@ -57,6 +56,15 @@ public class DataHarmpowerQController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmPowerQDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmPowerQDto> data = dataHarmpowerQQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getBaseData")
@ApiOperation("获取算法基础数据")

View File

@@ -56,6 +56,15 @@ public class DataHarmpowerSController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmPowerSDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmPowerSDto> data = dataHarmpowerSQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getBaseData")
@ApiOperation("获取算法基础数据")

View File

@@ -83,6 +83,15 @@ public class DataIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/addInfluxDbList")
@ApiOperation("时序数据库插入数据")
public HttpResult<String> addInfluxDbList(@RequestBody List<DataIDto> dataIList) {
String methodDescribe = getMethodDescribe("addInfluxDbList");
dataIQuery.addInfluxDbList(dataIList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getDataI")
@ApiOperation("获取谐波电流")
@@ -92,6 +101,4 @@ public class DataIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, dataIDtoList, methodDescribe);
}
}

View File

@@ -11,6 +11,7 @@ import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import com.njcn.dataProcess.pojo.dto.DataInHarmIDto;
import com.njcn.dataProcess.service.IDataInharmI;
import com.njcn.web.controller.BaseController;
@@ -56,6 +57,15 @@ public class DataInharmIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataInHarmIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataInHarmIDto> data = dataInharmIQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getBaseData")
@ApiOperation("获取算法基础数据")

View File

@@ -52,7 +52,7 @@ public class DataPltController extends BaseController {
public HttpResult<String> batchInsertion(@RequestBody List<DataPltDTO> dataPltDTOList) {
String methodDescribe = getMethodDescribe("batchInsertion");
dataPltInsert.batchInsertion(dataPltDTOList);
dataPltQuery.batchInsertion(dataPltDTOList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}

View File

@@ -136,15 +136,12 @@ public class DataVController extends BaseController {
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/addInfluxDbList")
@ApiOperation("时序数据库插入数据")
@Deprecated
public HttpResult<String> addInfluxDbList(@RequestBody List<DataVDto> dataVList) {
String methodDescribe = getMethodDescribe("addInfluxDbList");
dataVQuery.addInfluxDbList(dataVList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON)
@PostMapping("/getMeasurementCount")
@ApiOperation("获取算法基础数据")

View File

@@ -4,6 +4,7 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmRateID;
import java.util.List;
@@ -13,6 +14,13 @@ import java.util.List;
*/
public interface IDataHarmRateI extends IMppService<RStatDataHarmRateID> {
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmRateIDto> getRawData(LineCountEvaluateParam lineParam);
/**
* 获取预处理数据
* @param lineParam 监测点参数
@@ -24,4 +32,6 @@ public interface IDataHarmRateI extends IMppService<RStatDataHarmRateID> {
*/
void addList(List<DataHarmRateIDto> list);
void addInfluxDbList(List<DataHarmRateIDto> dataIList);
}

View File

@@ -4,6 +4,7 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicIDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPhasicID;
@@ -17,8 +18,16 @@ import java.util.List;
* @version V1.0.0
*/
public interface IDataHarmphasicI extends IMppService<RStatDataHarmPhasicID> {
void batchInsertion(List<DataHarmphasicIDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmPhasicIDto> getRawData(LineCountEvaluateParam lineParam);
/**
* 获取监测点原始数据
* @param lineParam 监测点参数

View File

@@ -17,8 +17,16 @@ import java.util.List;
* @version V1.0.0
*/
public interface IDataHarmpowerQ extends IMppService<RStatDataHarmPowerQD> {
void batchInsertion(List<DataHarmpowerQDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmPowerQDto> getRawData(LineCountEvaluateParam lineParam);
/**
* 获取预处理数据
* @param lineParam 监测点参数

View File

@@ -4,6 +4,7 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerQDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerSDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPowerSD;
@@ -17,8 +18,16 @@ import java.util.List;
* @version V1.0.0
*/
public interface IDataHarmpowerS extends IMppService<RStatDataHarmPowerSD> {
void batchInsertion(List<DataHarmpowerSDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmPowerSDto> getRawData(LineCountEvaluateParam lineParam);
/**
* 获取预处理数据
* @param lineParam 监测点参数

View File

@@ -5,6 +5,7 @@ import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import com.njcn.dataProcess.pojo.po.RStatDataID;
import com.njcn.dataProcess.pojo.po.RStatDataVD;
@@ -39,6 +40,7 @@ public interface IDataI extends IMppService<RStatDataID> {
*/
void addList(List<DataIDto> dataIDtoList);
void addInfluxDbList(List<DataIDto> dataIList);
/**
* 获取谐波电流

View File

@@ -4,6 +4,7 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import com.njcn.dataProcess.pojo.dto.DataInHarmIDto;
import com.njcn.dataProcess.pojo.po.RStatDataInHarmID;
@@ -20,6 +21,13 @@ public interface IDataInharmI extends IMppService<RStatDataInHarmID> {
void batchInsertion(List<DataInharmIDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataInHarmIDto> getRawData(LineCountEvaluateParam lineParam);
/**
* 获取预处理数据
* @param lineParam 监测点参数

View File

@@ -9,6 +9,7 @@ import com.njcn.dataProcess.dao.relation.mapper.RStatDataFlickerRelationMapper;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.dataProcess.po.influx.DataHarmphasicI;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
@@ -23,10 +24,7 @@ import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -163,35 +161,44 @@ public class InfluxdbDataFlickerImpl extends MppServiceImpl<RStatDataFlickerRela
}
private void quality(List<DataFlicker> result, InfluxQueryWrapper influxQueryWrapper, LineCountEvaluateParam lineParam) {
List<DataFlicker> dataList;
List<DataFlicker> list = dataFlickerMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String, List<DataFlicker>> lineMap = list.stream().collect(Collectors.groupingBy(DataFlicker::getLineId));
//有异常数据
Map<String, List<String>> timeMap = lineParam.getAbnormalTime();
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k, v) -> {
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFlicker> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if (CollUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String, List<DataFlicker>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataFlicker::getLineId));
//有异常数据
Map<String, List<String>> timeMap = lineParam.getAbnormalTime();
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k, v) -> {
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFlicker> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
}
@@ -213,6 +220,7 @@ public class InfluxdbDataFlickerImpl extends MppServiceImpl<RStatDataFlickerRela
.select(DataFlicker::getPst)
.select(DataFlicker::getPlt)
.select(DataFlicker::getQualityFlag)
.select(DataFlicker::getAbnormalFlag)
.between(DataFlicker::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataFlicker::getQualityFlag, "0");
quality(result, influxQueryWrapper, lineParam);

View File

@@ -20,10 +20,7 @@ import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -64,7 +61,7 @@ public class InfluxdbDataFlucImpl extends MppServiceImpl<RStatDataFlucRelationMa
@Override
public List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataFlucDto> result = new ArrayList<>();
List<DataFluc> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
List<DataFluc> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataFlucDto dto = new DataFlucDto();
BeanUtils.copyProperties(item,dto);
@@ -77,7 +74,7 @@ public class InfluxdbDataFlucImpl extends MppServiceImpl<RStatDataFlucRelationMa
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataFluc> dataIList = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataFluc> dataIList = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(dataIList)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -129,7 +126,8 @@ public class InfluxdbDataFlucImpl extends MppServiceImpl<RStatDataFlucRelationMa
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataFluc> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataFluc> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataFluc> dataList;
List<DataFluc> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataFluc.class);
influxQueryWrapper.regular(DataFluc::getLineId, lineList)
@@ -138,36 +136,45 @@ public class InfluxdbDataFlucImpl extends MppServiceImpl<RStatDataFlucRelationMa
.select(DataFluc::getFluc)
.select(DataFluc::getFluccf)
.select(DataFluc::getQualityFlag)
.select(DataFluc::getAbnormalFlag)
.between(DataFluc::getTime, startTime, endTime)
.eq(DataFluc::getQualityFlag,"0");
List<DataFluc> list = dataFlucMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataFluc>> lineMap = list.stream().collect(Collectors.groupingBy(DataFluc::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFluc> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if (CollectionUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataFluc>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataFluc::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFluc> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -7,22 +8,24 @@ import com.njcn.dataProcess.dao.imapper.DataHarmRateIMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataHarmRateIRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmrateI;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateIDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmRateID;
import com.njcn.dataProcess.service.IDataHarmRateI;
import com.njcn.dataProcess.util.TimeUtils;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -35,10 +38,23 @@ public class InfluxdbDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
@Resource
private DataHarmRateIMapper dataHarmRateIMapper;
@Override
public List<DataHarmRateIDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmRateIDto> result = new ArrayList<>();
List<DataHarmrateI> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataHarmRateIDto dto = new DataHarmRateIDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmrateI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmrateI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -136,6 +152,18 @@ public class InfluxdbDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
}
@Override
public void addInfluxDbList(List<DataHarmRateIDto> dataIList) {
List<DataHarmrateI> result = new ArrayList<>();
dataIList.forEach(item -> {
DataHarmrateI dataI = new DataHarmrateI();
BeanUtils.copyProperties(item, dataI);
dataI.setTime(LocalDateTime.parse(item.getMinTime(), DATE_TIME_FORMATTER).atZone(ZoneId.systemDefault()).toInstant());
result.add(dataI);
});
dataHarmRateIMapper.insertBatch(result);
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
@@ -144,7 +172,8 @@ public class InfluxdbDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmrateI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmrateI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataHarmrateI> dataList;
List<DataHarmrateI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmrateI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -153,36 +182,45 @@ public class InfluxdbDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
.select(DataHarmrateI::getPhasicType)
.select(DataHarmrateI::getValueType)
.select(DataHarmrateI::getQualityFlag)
.select(DataHarmrateV::getAbnormalFlag)
.between(DataHarmrateI::getTime, startTime, endTime)
.eq(DataHarmrateI::getQualityFlag,"0");
List<DataHarmrateI> list = dataHarmRateIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmrateI>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmrateI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmrateI>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmrateI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -9,6 +9,7 @@ import com.njcn.dataProcess.dao.relation.mapper.RStatDataHarmRateVRelationMapper
import com.njcn.dataProcess.dto.DataHarmrateVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmRateVDto;
@@ -203,6 +204,7 @@ public class InfluxdbDataHarmRateVImpl extends MppServiceImpl<RStatDataHarmRateV
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmrateV> getMinuteData(LineCountEvaluateParam lineParam) {
List<DataHarmrateV> dataList;
List<DataHarmrateV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmrateV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -211,39 +213,48 @@ public class InfluxdbDataHarmRateVImpl extends MppServiceImpl<RStatDataHarmRateV
.select(DataHarmrateV::getPhasicType)
.select(DataHarmrateV::getValueType)
.select(DataHarmrateV::getQualityFlag)
.select(DataHarmrateV::getAbnormalFlag)
.between(DataHarmrateV::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataHarmrateV::getQualityFlag,"0");
if(CollUtil.isNotEmpty(lineParam.getPhasicType())){
influxQueryWrapper.regular(DataHarmrateV::getPhasicType,lineParam.getPhasicType());
}
List<DataHarmrateV> list = dataHarmRateVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmrateV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmrateV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmrateV>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmrateV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -8,7 +9,9 @@ import com.njcn.dataProcess.dao.relation.mapper.RStatDataHarmPhasicIRelationMapp
import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmphasicI;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicIDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPhasicID;
import com.njcn.dataProcess.service.IDataHarmphasicI;
@@ -17,14 +20,12 @@ import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -60,10 +61,23 @@ public class InfluxdbDataHarmphasicIImpl extends MppServiceImpl<RStatDataHarmPha
}
}
@Override
public List<DataHarmPhasicIDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmPhasicIDto> result = new ArrayList<>();
List<DataHarmphasicI> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataHarmPhasicIDto dto = new DataHarmPhasicIDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmphasicI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmphasicI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -169,7 +183,8 @@ public class InfluxdbDataHarmphasicIImpl extends MppServiceImpl<RStatDataHarmPha
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmphasicI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmphasicI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap,Boolean dataType) {
List<DataHarmphasicI> dataList;
List<DataHarmphasicI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmphasicI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -178,36 +193,45 @@ public class InfluxdbDataHarmphasicIImpl extends MppServiceImpl<RStatDataHarmPha
.select(DataHarmphasicI::getPhasicType)
.select(DataHarmphasicI::getValueType)
.select(DataHarmphasicI::getQualityFlag)
.select(DataHarmphasicI::getAbnormalFlag)
.between(DataHarmphasicI::getTime, startTime, endTime)
.eq(DataHarmphasicI::getQualityFlag,"0");
List<DataHarmphasicI> list = dataHarmphasicIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmphasicI>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmphasicI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmphasicI>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmphasicI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -9,6 +10,7 @@ import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmphasicI;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.po.influx.DataHarmpowerS;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicVDto;
@@ -65,7 +67,7 @@ public class InfluxdbDataHarmphasicVImpl extends MppServiceImpl<RStatDataHarmPha
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataHarmphasicV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
List<DataHarmphasicV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
@@ -78,7 +80,7 @@ public class InfluxdbDataHarmphasicVImpl extends MppServiceImpl<RStatDataHarmPha
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmphasicV> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmphasicV> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -184,7 +186,8 @@ public class InfluxdbDataHarmphasicVImpl extends MppServiceImpl<RStatDataHarmPha
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmphasicV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmphasicV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataHarmphasicV> dataList;
List<DataHarmphasicV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmphasicV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -193,36 +196,45 @@ public class InfluxdbDataHarmphasicVImpl extends MppServiceImpl<RStatDataHarmPha
.select(DataHarmphasicV::getPhasicType)
.select(DataHarmphasicV::getValueType)
.select(DataHarmphasicV::getQualityFlag)
.select(DataHarmphasicV::getAbnormalFlag)
.between(DataHarmphasicV::getTime, startTime, endTime)
.eq(DataHarmphasicV::getQualityFlag,"0");
List<DataHarmphasicV> list = dataHarmphasicVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmphasicV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmphasicV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmphasicV>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmphasicV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -9,6 +10,7 @@ import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerPDto;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
@@ -64,7 +66,7 @@ public class InfluxdbDataHarmpowerPImpl extends MppServiceImpl<RStatDataHarmPowe
@Override
public List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataPowerPDto> result = new ArrayList<>();
List<DataHarmpowerP> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
List<DataHarmpowerP> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(), lineParam.getDataType());
list.forEach(item->{
DataPowerPDto dto = new DataPowerPDto();
BeanUtils.copyProperties(item,dto);
@@ -77,7 +79,7 @@ public class InfluxdbDataHarmpowerPImpl extends MppServiceImpl<RStatDataHarmPowe
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmpowerP> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmpowerP> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(), lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -187,7 +189,8 @@ public class InfluxdbDataHarmpowerPImpl extends MppServiceImpl<RStatDataHarmPowe
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmpowerP> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmpowerP> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataHarmpowerP> dataList;
List<DataHarmpowerP> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmpowerP.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.P, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -199,36 +202,45 @@ public class InfluxdbDataHarmpowerPImpl extends MppServiceImpl<RStatDataHarmPowe
.select(DataHarmpowerP::getDf)
.select(DataHarmpowerP::getPf)
.select(DataHarmpowerP::getQualityFlag)
.select(DataHarmpowerP::getAbnormalFlag)
.between(DataHarmpowerP::getTime, startTime, endTime)
.eq(DataHarmpowerP::getQualityFlag,"0");
List<DataHarmpowerP> list = dataHarmpowerPMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmpowerP>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmpowerP::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerP> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if (CollUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmpowerP>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmpowerP::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerP> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -7,8 +8,9 @@ import com.njcn.dataProcess.dao.imapper.DataHarmpowerQMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataHarmPowerQRelationMapper;
import com.njcn.dataProcess.dto.DataHarmpowerQDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.po.influx.DataHarmpowerQ;
import com.njcn.dataProcess.po.influx.DataHarmrateI;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerQDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPowerQD;
@@ -18,6 +20,7 @@ import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
@@ -58,10 +61,23 @@ public class InfluxdbDataHarmpowerQImpl extends MppServiceImpl<RStatDataHarmPowe
}
}
@Override
public List<DataHarmPowerQDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmPowerQDto> result = new ArrayList<>();
List<DataHarmpowerQ> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataHarmPowerQDto dto = new DataHarmPowerQDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmpowerQ> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmpowerQ> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -169,7 +185,8 @@ public class InfluxdbDataHarmpowerQImpl extends MppServiceImpl<RStatDataHarmPowe
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmpowerQ> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmpowerQ> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataHarmpowerQ> dataList;
List<DataHarmpowerQ> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmpowerQ.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.Q, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -179,36 +196,45 @@ public class InfluxdbDataHarmpowerQImpl extends MppServiceImpl<RStatDataHarmPowe
.select(DataHarmpowerQ::getValueType)
.select(DataHarmpowerQ::getQ)
.select(DataHarmpowerQ::getQualityFlag)
.select(DataHarmpowerQ::getAbnormalFlag)
.between(DataHarmpowerQ::getTime, startTime, endTime)
.eq(DataHarmpowerQ::getQualityFlag,"0");
List<DataHarmpowerQ> list = dataHarmpowerQMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmpowerQ>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmpowerQ::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerQ> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmpowerQ>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmpowerQ::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerQ> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -7,8 +8,10 @@ import com.njcn.dataProcess.dao.imapper.DataHarmpowerSMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataHarmPowerSRelationMapper;
import com.njcn.dataProcess.dto.DataHarmpowerSDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmpowerQ;
import com.njcn.dataProcess.po.influx.DataHarmpowerS;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerQDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPowerSDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPowerSD;
import com.njcn.dataProcess.service.IDataHarmpowerS;
@@ -17,14 +20,12 @@ import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -60,10 +61,23 @@ public class InfluxdbDataHarmpowerSImpl extends MppServiceImpl<RStatDataHarmPowe
}
}
@Override
public List<DataHarmPowerSDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmPowerSDto> result = new ArrayList<>();
List<DataHarmpowerS> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataHarmPowerSDto dto = new DataHarmPowerSDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataHarmpowerS> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataHarmpowerS> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -171,7 +185,8 @@ public class InfluxdbDataHarmpowerSImpl extends MppServiceImpl<RStatDataHarmPowe
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmpowerS> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataHarmpowerS> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap,Boolean dataType) {
List<DataHarmpowerS> dataList;
List<DataHarmpowerS> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmpowerS.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.S, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -181,36 +196,45 @@ public class InfluxdbDataHarmpowerSImpl extends MppServiceImpl<RStatDataHarmPowe
.select(DataHarmpowerS::getValueType)
.select(DataHarmpowerS::getS)
.select(DataHarmpowerS::getQualityFlag)
.select(DataHarmpowerS::getAbnormalFlag)
.between(DataHarmpowerS::getTime, startTime, endTime)
.eq(DataHarmpowerS::getQualityFlag,"0");
List<DataHarmpowerS> list = dataHarmpowerSMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmpowerS>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmpowerS::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerS> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)) {
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataHarmpowerS>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataHarmpowerS::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerS> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -9,6 +9,7 @@ import com.njcn.dataProcess.dao.relation.mapper.RStatDataIRelationMapper;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.po.RStatDataID;
@@ -21,6 +22,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
@@ -182,6 +184,18 @@ public class InfluxdbDataIImpl extends MppServiceImpl<RStatDataIRelationMapper,
}
@Override
public void addInfluxDbList(List<DataIDto> dataIList) {
List<DataI> result = new ArrayList<>();
dataIList.forEach(item -> {
DataI dataI = new DataI();
BeanUtils.copyProperties(item, dataI);
dataI.setTime(LocalDateTime.parse(item.getMinTime(), DATE_TIME_FORMATTER).atZone(ZoneId.systemDefault()).toInstant());
result.add(dataI);
});
dataIMapper.insertBatch(result);
}
@Override
public List<DataIDto> getDataI(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
@@ -196,6 +210,7 @@ public class InfluxdbDataIImpl extends MppServiceImpl<RStatDataIRelationMapper,
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataI> getMinuteDataI(LineCountEvaluateParam lineParam) {
List<DataI> dataList;
List<DataI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -210,39 +225,48 @@ public class InfluxdbDataIImpl extends MppServiceImpl<RStatDataIRelationMapper,
.select(DataI::getIZero)
.select(DataI::getRms)
.select(DataI::getQualityFlag)
.select(DataI::getAbnormalFlag)
.between(DataI::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataI::getQualityFlag,"0");
if(CollUtil.isNotEmpty(lineParam.getPhasicType())){
influxQueryWrapper.regular(DataI::getPhasicType,lineParam.getPhasicType());
}
List<DataI> list = dataIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataI>> lineMap = list.stream().collect(Collectors.groupingBy(DataI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataI>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -1,5 +1,6 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.collection.CollectionUtil;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.common.utils.HarmonicTimesUtil;
@@ -8,6 +9,7 @@ import com.njcn.dataProcess.dao.relation.mapper.RStatDataInHarmIRelationMapper;
import com.njcn.dataProcess.dto.DataInharmIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataInharmI;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataInHarmIDto;
import com.njcn.dataProcess.pojo.po.RStatDataInHarmID;
@@ -17,14 +19,12 @@ import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
/**
@@ -60,10 +60,23 @@ public class InfluxdbDataInharmIImpl extends MppServiceImpl<RStatDataInHarmIRela
}
}
@Override
public List<DataInHarmIDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataInHarmIDto> result = new ArrayList<>();
List<DataInharmI> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
list.forEach(item->{
DataInHarmIDto dto = new DataInHarmIDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataInharmI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
List<DataInharmI> data = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime(),lineParam.getDataType());
if (CollectionUtil.isNotEmpty(data)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -169,7 +182,8 @@ public class InfluxdbDataInharmIImpl extends MppServiceImpl<RStatDataInHarmIRela
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataInharmI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
public List<DataInharmI> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap, Boolean dataType) {
List<DataInharmI> dataList;
List<DataInharmI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataInharmI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -178,36 +192,45 @@ public class InfluxdbDataInharmIImpl extends MppServiceImpl<RStatDataInHarmIRela
.select(DataInharmI::getPhasicType)
.select(DataInharmI::getValueType)
.select(DataInharmI::getQualityFlag)
.select(DataInharmI::getAbnormalFlag)
.between(DataInharmI::getTime, startTime, endTime)
.eq(DataInharmI::getQualityFlag,"0");
List<DataInharmI> list = dataInharmIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataInharmI>> lineMap = list.stream().collect(Collectors.groupingBy(DataInharmI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (dataType) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataInharmI>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataInharmI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -10,6 +10,7 @@ import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataInHarmVDto;
@@ -25,10 +26,7 @@ import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.stream.Collectors;
@@ -189,6 +187,7 @@ public class InfluxdbDataInharmVImpl extends MppServiceImpl<RStatDataInHarmVRela
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataInharmV> getMinuteData(LineCountEvaluateParam lineParam) {
List<DataInharmV> dataList;
List<DataInharmV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataInharmV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
@@ -197,39 +196,48 @@ public class InfluxdbDataInharmVImpl extends MppServiceImpl<RStatDataInHarmVRela
.select(DataInharmV::getPhasicType)
.select(DataInharmV::getValueType)
.select(DataInharmV::getQualityFlag)
.select(DataInharmV::getAbnormalFlag)
.between(DataInharmV::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataI::getQualityFlag,"0");
.eq(DataInharmV::getQualityFlag,"0");
if(CollUtil.isNotEmpty(lineParam.getPhasicType())){
influxQueryWrapper.regular(DataInharmV::getPhasicType,lineParam.getPhasicType());
}
List<DataInharmV> list = dataInharmVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataInharmV>> lineMap = list.stream().collect(Collectors.groupingBy(DataInharmV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataInharmV>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataInharmV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -7,6 +7,7 @@ import com.njcn.dataProcess.dao.imapper.DataPltMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataPltRelationMapper;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
@@ -132,6 +133,7 @@ public class InfluxdbDataPltImpl extends MppServiceImpl<RStatDataPltRelationMapp
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataPlt> getMinuteDataPlt(LineCountEvaluateParam lineParam) {
List<DataPlt> dataList;
List<DataPlt> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataPlt.class);
influxQueryWrapper.regular(DataPlt::getLineId, lineParam.getLineId())
@@ -139,39 +141,48 @@ public class InfluxdbDataPltImpl extends MppServiceImpl<RStatDataPltRelationMapp
.select(DataPlt::getPhasicType)
.select(DataPlt::getPlt)
.select(DataPlt::getQualityFlag)
.select(DataPlt::getAbnormalFlag)
.between(DataPlt::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataPlt::getQualityFlag,"0");
if(CollUtil.isNotEmpty(lineParam.getPhasicType())){
influxQueryWrapper.regular(DataV::getPhasicType,lineParam.getPhasicType());
}
List<DataPlt> list = dataPltMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataPlt>> lineMap = list.stream().collect(Collectors.groupingBy(DataPlt::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataPlt> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
if(CollUtil.isNotEmpty(list)){
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String,List<DataPlt>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataPlt::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(lineParam.getAbnormalTime())) {
lineMap.forEach((k,v)->{
List<String> timeList = lineParam.getAbnormalTime().get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataPlt> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//2.过滤掉异常数据后没有常数据,则用所有异常数据计算,但是需要标记数据为异常的
//没有常数据,则使用原数据
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
});
}
//没有异常数据,则使用原数据
else {
result.addAll(dataList);
}
}
return result;
}

View File

@@ -15,7 +15,6 @@ import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.dto.LineDataVFiveItemDTO;
import com.njcn.dataProcess.dto.MeasurementCountDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataVCvtDto;
@@ -25,6 +24,7 @@ import com.njcn.dataProcess.service.IDataV;
import com.njcn.dataProcess.util.TimeUtils;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
@@ -41,6 +41,7 @@ import java.util.stream.Collectors;
* @version 1.0
* @data 2024/11/7 11:02
*/
@Slf4j
@Service("InfluxdbDataVImpl")
public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper, RStatDataVD> implements IDataV {
@@ -128,6 +129,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
@@ -408,6 +410,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
.select(DataV::getVlDev)
.select(DataV::getVuDev)
.select(DataV::getQualityFlag)
.select(DataV::getAbnormalFlag)
.between(DataV::getTime, lineParam.getStartTime(), lineParam.getEndTime())
.eq(DataV::getQualityFlag, "0");
if (CollUtil.isNotEmpty(lineParam.getPhasicType())) {
@@ -417,12 +420,17 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
return result;
}
private void quality(List<DataV> result, InfluxQueryWrapper influxQueryWrapper, LineCountEvaluateParam
lineParam) {
private void quality(List<DataV> result, InfluxQueryWrapper influxQueryWrapper, LineCountEvaluateParam lineParam) {
List<DataV> dataList;
List<DataV> list = dataVMapper.selectByQueryWrapper(influxQueryWrapper);
if (CollUtil.isNotEmpty(list)) {
Map<String, List<DataV>> lineMap = list.stream().collect(Collectors.groupingBy(DataV::getLineId));
//过滤掉暂态事件影响的数据 true过滤 false不过滤
if (lineParam.getDataType()) {
dataList = list.stream().filter(item -> Objects.isNull(item.getAbnormalFlag())).collect(Collectors.toList());
} else {
dataList = list;
}
Map<String, List<DataV>> lineMap = dataList.stream().collect(Collectors.groupingBy(DataV::getLineId));
//有异常数据
Map<String, List<String>> timeMap = lineParam.getAbnormalTime();
if (CollectionUtil.isNotEmpty(timeMap)) {
@@ -449,7 +457,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
result.addAll(dataList);
}
}
}

View File

@@ -27,6 +27,11 @@ public class RelationDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
@Resource
private IDataHarmRateI dataHarmRateI;
@Override
public List<DataHarmRateIDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
@@ -45,4 +50,9 @@ public class RelationDataHarmRateIImpl extends MppServiceImpl<RStatDataHarmRateI
});
dataHarmRateI.saveOrUpdateBatchByMultiId(result,1000);
}
@Override
public void addInfluxDbList(List<DataHarmRateIDto> dataIList) {
}
}

View File

@@ -7,6 +7,7 @@ import com.njcn.dataProcess.dto.DataHarmphasicIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataHarmphasicI;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataHarmPhasicIDto;
import com.njcn.dataProcess.pojo.po.RStatDataHarmPhasicID;
import com.njcn.dataProcess.service.IDataHarmphasicI;
@@ -67,6 +68,11 @@ public class RelationDataHarmphasicIImpl extends MppServiceImpl<RStatDataHarmPha
}
}
@Override
public List<DataHarmPhasicIDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();

View File

@@ -69,6 +69,11 @@ public class RelationDataHarmpowerQImpl extends MppServiceImpl<RStatDataHarmPowe
}
@Override
public List<DataHarmPowerQDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();

View File

@@ -64,6 +64,11 @@ public class RelationDataHarmpowerSImpl extends MppServiceImpl<RStatDataHarmPowe
}
@Override
public List<DataHarmPowerSDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();

View File

@@ -5,16 +5,15 @@ import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DatePattern;
import cn.hutool.core.util.ObjectUtil;
import cn.hutool.core.util.StrUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataIRelationMapper;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.po.relation.DataI;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.po.RStatDataID;
import com.njcn.dataProcess.service.IDataI;
@@ -26,8 +25,13 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -40,9 +44,9 @@ import java.util.stream.Collectors;
@Service("RelationDataIImpl")
@RequiredArgsConstructor
public class RelationDataIImpl extends MppServiceImpl<RStatDataIRelationMapper, RStatDataID> implements IDataI {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
@Resource
private DataIRelationMapper dataIRelationMapper;
@Resource
private IDataI iDataI;
@@ -96,6 +100,10 @@ public class RelationDataIImpl extends MppServiceImpl<RStatDataIRelationMapper,
iDataI.saveOrUpdateBatchByMultiId(result,1000);
}
@Override
public void addInfluxDbList(List<DataIDto> dataIList) {
}
@Override
public List<DataIDto> getDataI(LineCountEvaluateParam lineParam) {
List<DataIDto> result = new ArrayList<>();

View File

@@ -62,6 +62,11 @@ public class RelationDataInharmIImpl extends MppServiceImpl<RStatDataInHarmIRela
}
}
@Override
public List<DataInHarmIDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();