数据清洗算法

This commit is contained in:
xy
2025-02-26 11:07:34 +08:00
parent 92d540e497
commit f1ddff6bcb
99 changed files with 3937 additions and 224 deletions

View File

@@ -6,10 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -54,7 +55,14 @@ public class DataFlickerController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataFlickerDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataFlickerDto> data = dataFlickerQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}

View File

@@ -6,9 +6,12 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -42,7 +45,6 @@ public class DataFlucController extends BaseController {
@InsertBean
private IDataFluc dataFlucInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/batchInsertion")
@ApiOperation("批量插入")
@@ -53,9 +55,13 @@ public class DataFlucController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataFlucDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataFlucDto> data = dataFlucQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -0,0 +1,55 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author hongawen
* @version 1.0
* @data 2024/11/6 19:48
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/dataHarmRateV")
@Api(tags = "谐波电压含有率")
public class DataHarmRateVController extends BaseController {
@QueryBean
private IDataHarmRateV dataHarmRateVQuery;
@InsertBean
private IDataHarmRateV dataHarmRateVInsert;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataHarmRateVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -54,7 +56,14 @@ public class DataHarmphasicVController extends BaseController {
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataHarmphasicVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,13 @@ public class DataHarmpowerPController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataPowerPDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataPowerPDto> data = dataHarmpowerPQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,13 @@ public class DataIController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataIDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataIDto> data = dataIQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -9,6 +9,8 @@ import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,14 @@ public class DataInharmVController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataHarmDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataHarmDto> data = dataInharmVQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -6,9 +6,11 @@ import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.annotation.QueryBean;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
@@ -53,9 +55,14 @@ public class DataPltController extends BaseController {
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getRawData")
@ApiOperation("获取原始数据")
public HttpResult<List<DataPltDto>> getRawData(@RequestBody LineCountEvaluateParam lineParam) {
String methodDescribe = getMethodDescribe("getRawData");
List<DataPltDto> data = dataPltQuery.getRawData(lineParam);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, data, methodDescribe);
}
}

View File

@@ -105,9 +105,10 @@ public class DataVController extends BaseController {
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/addInfluxDbList")
@ApiOperation("时序数据库插入数据")
@Deprecated
public HttpResult<String> addInfluxDbList(@RequestBody List<DataVDto> dataVList) {
String methodDescribe = getMethodDescribe("addInfluxDbList");
dataVInsert.addInfluxDbList(dataVList);
dataVQuery.addInfluxDbList(dataVList);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, "", methodDescribe);
}
}

View File

@@ -0,0 +1,57 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.service.IPqDataVerifyService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.ApiOperation;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 前端控制器
* </p>
*
* @author xy
* @since 2025-02-17
*/
@RestController
@RequestMapping("/pqDataVerify")
public class PqDataVerifyController extends BaseController {
@Resource
private IPqDataVerifyService pqDataVerifyService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.ADD)
@PostMapping("/insertData")
@ApiOperation("存储清洗的异常数据")
public HttpResult<List<PqDataVerify>> insertData(@RequestBody List<PqDataVerify> list) {
String methodDescribe = getMethodDescribe("insertData");
pqDataVerifyService.insertData(list);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, null, methodDescribe);
}
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/queryData")
@ApiOperation("查询清洗的异常数据")
public HttpResult<List<PqDataVerify>> queryData(@RequestBody LineCountEvaluateParam param) {
String methodDescribe = getMethodDescribe("queryData");
List<PqDataVerify> list = pqDataVerifyService.queryData(param);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
}

View File

@@ -0,0 +1,56 @@
package com.njcn.dataProcess.controller;
import com.njcn.common.pojo.annotation.OperateInfo;
import com.njcn.common.pojo.constant.OperateType;
import com.njcn.common.pojo.enums.common.LogEnum;
import com.njcn.common.pojo.enums.response.CommonResponseEnum;
import com.njcn.common.pojo.response.HttpResult;
import com.njcn.common.utils.HttpResultUtil;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.service.IPqReasonableRangeService;
import com.njcn.web.controller.BaseController;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Controller;
import org.springframework.validation.annotation.Validated;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
import java.util.List;
/**
* <p>
* 前端控制器
* </p>
*
* @author xy
* @since 2025-02-13
*/
@Validated
@Slf4j
@Controller
@RestController
@RequestMapping("/pqReasonableRange")
@Api(tags = "数据清洗标准库")
public class PqReasonableRangeController extends BaseController {
@Resource
private IPqReasonableRangeService pqReasonableRangeService;
@OperateInfo(info = LogEnum.BUSINESS_COMMON,operateType = OperateType.QUERY)
@PostMapping("/getData")
@ApiOperation("按条件获取数据合理范围")
public HttpResult<List<PqReasonableRangeDto>> getData(@RequestBody DataCleanParam param) {
String methodDescribe = getMethodDescribe("getData");
List<PqReasonableRangeDto> list = pqReasonableRangeService.getReasonableRangeList(param);
return HttpResultUtil.assembleCommonResponseResult(CommonResponseEnum.SUCCESS, list, methodDescribe);
}
}

View File

@@ -0,0 +1,14 @@
package com.njcn.dataProcess.dao.imapper;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.influx.base.InfluxDbBaseMapper;
/**
* @author xy
*/
public interface DataHarmRateVMapper extends InfluxDbBaseMapper<DataHarmrateV> {
}

View File

@@ -0,0 +1,16 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.github.jeffreyning.mybatisplus.base.MppBaseMapper;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
/**
* <p>
* Mapper 接口
* </p>
*
* @author xy
* @since 2025-02-17
*/
public interface PqDataVerifyMapper extends MppBaseMapper<PqDataVerify> {
}

View File

@@ -0,0 +1,18 @@
package com.njcn.dataProcess.dao.relation.mapper;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import java.util.List;
/**
* <p>
* Mapper 接口
* </p>
*
* @author xy
* @since 2025-02-13
*/
public interface PqReasonableRangeMapper extends BaseMapper<PqReasonableRange> {
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataFlicker {
void batchInsertion(List<DataFlickerDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataFluc {
void batchInsertion(List<DataFlucDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -0,0 +1,19 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
/**
* @author xy
*/
public interface IDataHarmRateV {
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
@@ -13,4 +15,11 @@ import java.util.List;
*/
public interface IDataHarmphasicV {
void batchInsertion(List<DataHarmphasicVDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,9 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import java.util.List;
@@ -13,4 +16,11 @@ import java.util.List;
*/
public interface IDataHarmpowerP {
void batchInsertion(List<DataHarmpowerPDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,10 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import java.util.List;
@@ -12,5 +16,13 @@ import java.util.List;
* @version V1.0.0
*/
public interface IDataI {
void batchInsertion(List<DataIDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataIDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,8 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import java.util.List;
@@ -14,4 +16,11 @@ import java.util.List;
public interface IDataInharmV {
void batchInsertion(List<DataInharmVDTO> dataIDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -1,6 +1,9 @@
package com.njcn.dataProcess.service;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import java.util.List;
@@ -13,4 +16,11 @@ import java.util.List;
*/
public interface IDataPlt {
void batchInsertion(List<DataPltDTO> dataPltDTOList);
/**
* 获取原始数据
* @param lineParam
* @return
*/
List<DataPltDto> getRawData(LineCountEvaluateParam lineParam);
}

View File

@@ -4,7 +4,6 @@ import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.dto.DataVDTO;
import com.njcn.dataProcess.dto.DataVFiveItemDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataV;
import com.njcn.dataProcess.pojo.dto.CommonMinuteDto;
import com.njcn.dataProcess.pojo.dto.DataVDto;
import com.njcn.dataProcess.pojo.po.RStatDataVD;

View File

@@ -0,0 +1,30 @@
package com.njcn.dataProcess.service;
import com.github.jeffreyning.mybatisplus.service.IMppService;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author xy
* @since 2025-02-17
*/
public interface IPqDataVerifyService extends IMppService<PqDataVerify> {
/**
* 异常数据插入
* @param list
*/
void insertData(List<PqDataVerify> list);
/**
* 异常数据查询
* @param param
*/
List<PqDataVerify> queryData(LineCountEvaluateParam param);
}

View File

@@ -0,0 +1,27 @@
package com.njcn.dataProcess.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import java.util.List;
/**
* <p>
* 服务类
* </p>
*
* @author xy
* @since 2025-02-13
*/
public interface IPqReasonableRangeService extends IService<PqReasonableRange> {
/**
* 根据条件获取稳态指标正常范围数据
* @param param
* @return
*/
List<PqReasonableRangeDto> getReasonableRangeList(DataCleanParam param);
}

View File

@@ -1,15 +1,23 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.dataProcess.dao.imapper.DataFlickerMapper;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFlicker;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +31,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataFlickerImpl implements IDataFlicker {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataFlickerMapper dataFlickerMapper;
@@ -44,4 +54,69 @@ public class InfluxdbDataFlickerImpl implements IDataFlicker {
}
}
@Override
public List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataFlickerDto> result = new ArrayList<>();
List<DataFlicker> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataFlickerDto dto = new DataFlickerDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataFlicker> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataFlicker> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataFlicker.class);
influxQueryWrapper.regular(DataFlicker::getLineId, lineList)
.select(DataFlicker::getLineId)
.select(DataFlicker::getPhasicType)
.select(DataFlicker::getFluc)
.select(DataFlicker::getPst)
.select(DataFlicker::getPlt)
.select(DataFlicker::getQualityFlag)
.between(DataFlicker::getTime, startTime, endTime)
.eq(DataFlicker::getQualityFlag,"0");
List<DataFlicker> list = dataFlickerMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataFlicker>> lineMap = list.stream().collect(Collectors.groupingBy(DataFlicker::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFlicker> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,23 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataFlucDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.dataProcess.dao.imapper.DataFlucMapper;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataFluc;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +31,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataFlucImpl implements IDataFluc {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataFlucMapper dataFlucMapper;
@@ -44,4 +54,68 @@ public class InfluxdbDataFlucImpl implements IDataFluc {
}
}
@Override
public List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataFlucDto> result = new ArrayList<>();
List<DataFluc> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataFlucDto dto = new DataFlucDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataFluc> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataFluc> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataFluc.class);
influxQueryWrapper.regular(DataFluc::getLineId, lineList)
.select(DataFluc::getLineId)
.select(DataFluc::getPhasicType)
.select(DataFluc::getFluc)
.select(DataFluc::getFluccf)
.select(DataFluc::getQualityFlag)
.between(DataFluc::getTime, startTime, endTime)
.eq(DataFluc::getQualityFlag,"0");
List<DataFluc> list = dataFlucMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataFluc>> lineMap = list.stream().collect(Collectors.groupingBy(DataFluc::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataFluc> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -0,0 +1,98 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataHarmRateVMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmrateV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* @author xy
*/
@Service("InfluxdbDataHarmRateVImpl")
@RequiredArgsConstructor
public class InfluxdbDataHarmRateVImpl implements IDataHarmRateV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmRateVMapper dataHarmRateVMapper;
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataHarmrateV> list = getMinuteDataI(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmrateV> getMinuteDataI(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmrateV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmrateV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(2, 50, 1));
influxQueryWrapper.regular(DataHarmrateV::getLineId, lineList)
.select(DataHarmrateV::getLineId)
.select(DataHarmrateV::getPhasicType)
.select(DataHarmrateV::getValueType)
.select(DataHarmrateV::getQualityFlag)
.between(DataHarmrateV::getTime, startTime, endTime)
.eq(DataHarmrateV::getQualityFlag,"0");
List<DataHarmrateV> list = dataHarmRateVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmrateV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmrateV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmrateV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,25 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataHarmphasicVMapper;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmphasicV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +33,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataHarmphasicVImpl implements IDataHarmphasicV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmphasicVMapper dataHarmphasicVMapper;
@@ -44,4 +56,68 @@ public class InfluxdbDataHarmphasicVImpl implements IDataHarmphasicV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataHarmphasicV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmphasicV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmphasicV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmphasicV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataHarmphasicV::getLineId, lineList)
.select(DataHarmphasicV::getLineId)
.select(DataHarmphasicV::getPhasicType)
.select(DataHarmphasicV::getValueType)
.select(DataHarmphasicV::getQualityFlag)
.between(DataHarmphasicV::getTime, startTime, endTime)
.eq(DataHarmphasicV::getQualityFlag,"0");
List<DataHarmphasicV> list = dataHarmphasicVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmphasicV>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmphasicV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmphasicV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,28 @@
package com.njcn.dataProcess.service.impl.influxdb;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.imapper.DataHarmpowerPMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataHarmpowerP;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +36,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataHarmpowerPImpl implements IDataHarmpowerP {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataHarmpowerPMapper dataHarmpowerPMapper;
@@ -43,4 +58,71 @@ public class InfluxdbDataHarmpowerPImpl implements IDataHarmpowerP {
}
}
@Override
public List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataPowerPDto> result = new ArrayList<>();
List<DataHarmpowerP> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataPowerPDto dto = new DataPowerPDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataHarmpowerP> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataHarmpowerP> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataHarmpowerP.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.P, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataHarmpowerP::getLineId, lineList)
.select(DataHarmpowerP::getLineId)
.select(DataHarmpowerP::getPhasicType)
.select(DataHarmpowerP::getValueType)
.select(DataHarmpowerP::getP)
.select(DataHarmpowerP::getDf)
.select(DataHarmpowerP::getPf)
.select(DataHarmpowerP::getQualityFlag)
.between(DataHarmpowerP::getTime, startTime, endTime)
.eq(DataHarmpowerP::getQualityFlag,"0");
List<DataHarmpowerP> list = dataHarmpowerPMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataHarmpowerP>> lineMap = list.stream().collect(Collectors.groupingBy(DataHarmpowerP::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataHarmpowerP> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,25 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataIDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataIMapper;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,6 +33,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataIImpl implements IDataI {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataIMapper dataIMapper;
@@ -43,4 +55,75 @@ public class InfluxdbDataIImpl implements IDataI {
}
}
@Override
public List<DataIDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataIDto> result = new ArrayList<>();
List<DataI> list = getMinuteDataI(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataIDto dto = new DataIDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataI> getMinuteDataI(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataI> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataI.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.I, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataI::getLineId, lineList)
.select(DataI::getLineId)
.select(DataI::getPhasicType)
.select(DataI::getValueType)
.select(DataI::getINeg)
.select(DataI::getIPos)
.select(DataI::getIThd)
.select(DataI::getIUnbalance)
.select(DataI::getIZero)
.select(DataI::getRms)
.select(DataI::getQualityFlag)
.between(DataI::getTime, startTime, endTime)
.eq(DataI::getQualityFlag,"0");
List<DataI> list = dataIMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataI>> lineMap = list.stream().collect(Collectors.groupingBy(DataI::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataI> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,17 +1,29 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataInharmVMapper;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataInharmV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
* Description:
* Date: 2024/11/18 14:33【需求编号】
@@ -23,6 +35,8 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataInharmVImpl implements IDataInharmV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataInharmVMapper dataInharmVMapper;
@@ -43,4 +57,68 @@ public class InfluxdbDataInharmVImpl implements IDataInharmV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataHarmDto> result = new ArrayList<>();
List<DataInharmV> list = getMinuteData(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataHarmDto dto = new DataHarmDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataInharmV> getMinuteData(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataInharmV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataInharmV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataInharmV::getLineId, lineList)
.select(DataInharmV::getLineId)
.select(DataInharmV::getPhasicType)
.select(DataInharmV::getValueType)
.select(DataInharmV::getQualityFlag)
.between(DataI::getTime, startTime, endTime)
.eq(DataI::getQualityFlag,"0");
List<DataInharmV> list = dataInharmVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataInharmV>> lineMap = list.stream().collect(Collectors.groupingBy(DataInharmV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataInharmV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -1,15 +1,26 @@
package com.njcn.dataProcess.service.impl.influxdb;
import com.njcn.dataProcess.dto.DataPltDTO;
import cn.hutool.core.collection.CollectionUtil;
import com.njcn.common.utils.HarmonicTimesUtil;
import com.njcn.dataProcess.dao.imapper.DataPltMapper;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.influx.DataI;
import com.njcn.dataProcess.po.influx.DataPlt;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.influx.constant.InfluxDbSqlConstant;
import com.njcn.influx.query.InfluxQueryWrapper;
import lombok.RequiredArgsConstructor;
import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
/**
@@ -23,8 +34,9 @@ import java.util.stream.Collectors;
@RequiredArgsConstructor
public class InfluxdbDataPltImpl implements IDataPlt {
private final DataPltMapper dataPltMapper;
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
private final DataPltMapper dataPltMapper;
@Override
public void batchInsertion(List<DataPltDTO> dataPltDTOList) {
@@ -44,4 +56,68 @@ public class InfluxdbDataPltImpl implements IDataPlt {
}
}
@Override
public List<DataPltDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataPltDto> result = new ArrayList<>();
List<DataPlt> list = getMinuteDataPlt(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataPltDto dto = new DataPltDto();
BeanUtils.copyProperties(item,dto);
dto.setMinTime(DATE_TIME_FORMATTER.format(item.getTime()));
result.add(dto);
});
return result;
}
/**
* 按监测点集合、时间条件获取dataI分钟数据
* timeMap参数来判断是否进行数据处理 timeMap为空则不进行数据处理
* 需要进行剔除异常数据时,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataPlt> getMinuteDataPlt(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataPlt> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataPlt.class);
influxQueryWrapper.regular(DataPlt::getLineId, lineList)
.select(DataPlt::getLineId)
.select(DataPlt::getPhasicType)
.select(DataPlt::getPlt)
.select(DataPlt::getQualityFlag)
.between(DataPlt::getTime, startTime, endTime)
.eq(DataPlt::getQualityFlag,"0");
List<DataPlt> list = dataPltMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataPlt>> lineMap = list.stream().collect(Collectors.groupingBy(DataPlt::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,当前监测点自身的异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataPlt> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -29,6 +29,7 @@ import java.time.LocalDateTime;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Collectors;
/**
@@ -39,7 +40,7 @@ import java.util.stream.Collectors;
@Service("InfluxdbDataVImpl")
public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper, RStatDataVD> implements IDataV {
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());;
private final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss").withZone(ZoneId.systemDefault());
@Resource
private DataVMapper dataVMapper;
@@ -119,7 +120,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
@Override
public List<DataVDto> getRawData(LineCountEvaluateParam lineParam) {
List<DataVDto> result = new ArrayList<>();
List<DataV> list = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), false);
List<DataV> list = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(), lineParam.getAbnormalTime());
list.forEach(item->{
DataVDto dto = new DataVDto();
BeanUtils.copyProperties(item,dto);
@@ -132,7 +133,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
@Override
public List<CommonMinuteDto> getBaseData(LineCountEvaluateParam lineParam) {
List<CommonMinuteDto> result = new ArrayList<>();
List<DataV> dataVList = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),true);
List<DataV> dataVList = getMinuteDataV(lineParam.getLineId(), lineParam.getStartTime(), lineParam.getEndTime(),lineParam.getAbnormalTime());
if (CollectionUtil.isNotEmpty(dataVList)) {
String time = TimeUtils.StringTimeToString(lineParam.getStartTime());
//以监测点分组
@@ -141,6 +142,7 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
CommonMinuteDto dto = new CommonMinuteDto();
dto.setLineId(line);
dto.setTime(time);
dto.setQualityFlag(lineList.get(0).getQualityFlag());
//以相别分组
Map<String,List<DataV>> phasicTypeMap = lineList.stream().collect(Collectors.groupingBy(DataV::getPhasicType));
List<CommonMinuteDto.PhasicType> phasicTypes = new ArrayList<>();
@@ -256,8 +258,14 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
/**
* 按监测点集合、时间条件获取dataV分钟数据
* timeMap参数来判断是否进行数据出来 timeMap为空则不进行数据处理
* 剔除异常数据,这里会有三种情况判断
* 1.无异常数据,则直接返回集合;
* 2.异常数据和无异常数据参杂,剔除异常数据,只计算正常数据;
* 3.全是异常数据,则使用异常数据进行计算,但是日表中需要标记出来,此数据有异常
*/
public List<DataV> getMinuteDataV(List<String> lineList, String startTime, String endTime, boolean clean) {
public List<DataV> getMinuteDataV(List<String> lineList, String startTime, String endTime, Map<String,List<String>> timeMap) {
List<DataV> result = new ArrayList<>();
InfluxQueryWrapper influxQueryWrapper = new InfluxQueryWrapper(DataV.class);
influxQueryWrapper.samePrefixAndSuffix(InfluxDbSqlConstant.V, "", HarmonicTimesUtil.harmonicTimesList(1, 50, 1));
influxQueryWrapper.regular(DataV::getLineId, lineList)
@@ -276,11 +284,38 @@ public class InfluxdbDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
.select(DataV::getVlDev)
.select(DataV::getVuDev)
.select(DataV::getQualityFlag)
.between(DataV::getTime, startTime, endTime);
if (clean) {
influxQueryWrapper.eq(DataV::getAbnormalFlag,0);
.between(DataV::getTime, startTime, endTime)
.eq(DataV::getQualityFlag,"0");
List<DataV> list = dataVMapper.selectByQueryWrapper(influxQueryWrapper);
Map<String,List<DataV>> lineMap = list.stream().collect(Collectors.groupingBy(DataV::getLineId));
//有异常数据
if (CollectionUtil.isNotEmpty(timeMap)) {
lineMap.forEach((k,v)->{
List<String> timeList = timeMap.get(k);
//有异常数据,是自身异常数据
if (CollectionUtil.isNotEmpty(timeList)) {
List<DataV> filterList = v.stream().filter(item -> !timeList.contains(DATE_TIME_FORMATTER.format(item.getTime()))).collect(Collectors.toList());
//1.过滤掉异常数据后还有正常数据,则用正常数据计算
if (CollectionUtil.isNotEmpty(filterList)) {
result.addAll(filterList);
}
//2.过滤掉异常数据后没有正常数据,则用所有异常数据计算,但是需要标记数据为异常的
else {
v.parallelStream().forEach(item -> item.setQualityFlag("1"));
result.addAll(v);
}
}
//没有异常数据,则使用原数据
else {
result.addAll(v);
}
});
}
return dataVMapper.selectByQueryWrapper(influxQueryWrapper);
//没有异常数据,则使用原数据
else {
result.addAll(list);
}
return result;
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.relation;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.dataProcess.dao.relation.mapper.PqDataVerifyMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.po.PqDataVerify;
import com.njcn.dataProcess.service.IPqDataVerifyService;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Objects;
/**
* <p>
* 服务实现类
* </p>
*
* @author xy
* @since 2025-02-17
*/
@Service
public class PqDataVerifyServiceImpl extends MppServiceImpl<PqDataVerifyMapper, PqDataVerify> implements IPqDataVerifyService {
@Override
@Transactional(rollbackFor = Exception.class)
public void insertData(List<PqDataVerify> list) {
this.saveOrUpdateBatchByMultiId(list);
}
@Override
public List<PqDataVerify> queryData(LineCountEvaluateParam param) {
LambdaQueryWrapper<PqDataVerify> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.between(PqDataVerify::getTime,param.getStartTime(),param.getEndTime());
if (CollUtil.isNotEmpty(param.getLineId())) {
queryWrapper.in(PqDataVerify::getLineId,param.getLineId());
}
if (!Objects.isNull(param.getTableName())) {
queryWrapper.eq(PqDataVerify::getIndexTable, param.getTableName());
}
return this.list(queryWrapper);
}
}

View File

@@ -0,0 +1,46 @@
package com.njcn.dataProcess.service.impl.relation;
import cn.hutool.core.collection.CollUtil;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.njcn.dataProcess.dao.relation.mapper.PqReasonableRangeMapper;
import com.njcn.dataProcess.param.DataCleanParam;
import com.njcn.dataProcess.pojo.dto.PqReasonableRangeDto;
import com.njcn.dataProcess.pojo.po.PqReasonableRange;
import com.njcn.dataProcess.service.IPqReasonableRangeService;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
/**
* <p>
* 服务实现类
* </p>
*
* @author xy
* @since 2025-02-13
*/
@Service
public class PqReasonableRangeServiceImpl extends ServiceImpl<PqReasonableRangeMapper, PqReasonableRange> implements IPqReasonableRangeService {
@Override
public List<PqReasonableRangeDto> getReasonableRangeList(DataCleanParam param) {
List<PqReasonableRangeDto> result = new ArrayList<>();
LambdaQueryWrapper<PqReasonableRange> queryWrapper = new LambdaQueryWrapper<>();
queryWrapper.eq(PqReasonableRange::getBelongingSystem,param.getSystemType())
.eq(PqReasonableRange::getDataSource,param.getDataSource())
.eq(PqReasonableRange::getInfluxdbTableName,param.getTableName())
.eq(PqReasonableRange::getState,1);
List<PqReasonableRange> list = this.list(queryWrapper);
if (CollUtil.isNotEmpty(list)) {
list.forEach(item->{
PqReasonableRangeDto dto = new PqReasonableRangeDto();
BeanUtils.copyProperties(item,dto);
result.add(dto);
});
}
return result;
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlickerDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlickerRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataFlicker;
import com.njcn.dataProcess.pojo.dto.DataFlickerDto;
import com.njcn.dataProcess.service.IDataFlicker;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -53,4 +56,9 @@ public class RelationDataFlickerImpl implements IDataFlicker {
}
}
@Override
public List<DataFlickerDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataFlucDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataFlucRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataFluc;
import com.njcn.dataProcess.pojo.dto.DataFlucDto;
import com.njcn.dataProcess.service.IDataFluc;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -43,5 +46,12 @@ public class RelationDataFlucImpl implements IDataFluc {
dataFlucRelationMapper.insertBatchSomeColumn(dataFlucList);
}
};
}
@Override
public List<DataFlucDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
;
}

View File

@@ -0,0 +1,28 @@
package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmRateV;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Collections;
import java.util.List;
/**
* @author xy
*/
@Service("RelationDataHarmRateVImpl")
@RequiredArgsConstructor
public class RelationDataHarmRateVImpl implements IDataHarmRateV {
@Resource
private DataIRelationMapper dataIRelationMapper;
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -2,7 +2,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmphasicVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmphasicVRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataHarmphasicV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataHarmphasicV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -10,6 +12,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -50,5 +53,10 @@ public class RelationDataHarmphasicVImpl implements IDataHarmphasicV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataHarmpowerPDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataHarmpowerPRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataHarmpowerP;
import com.njcn.dataProcess.pojo.dto.DataPowerPDto;
import com.njcn.dataProcess.service.IDataHarmpowerP;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -53,4 +56,9 @@ public class RelationDataHarmpowerPImpl implements IDataHarmpowerP {
}
}
@Override
public List<DataPowerPDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataIDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataIRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataI;
import com.njcn.dataProcess.pojo.dto.DataIDto;
import com.njcn.dataProcess.service.IDataI;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -12,6 +14,7 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataIImpl implements IDataI {
dataIRelationMapper.insertBatchSomeColumn(dataIList);
}
}
@Override
public List<DataIDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataInharmVDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataVInharmVRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataInharmV;
import com.njcn.dataProcess.pojo.dto.DataHarmDto;
import com.njcn.dataProcess.service.IDataInharmV;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataInharmVImpl implements IDataInharmV {
}
}
@Override
public List<DataHarmDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -3,7 +3,9 @@ package com.njcn.dataProcess.service.impl.relation;
import com.njcn.dataProcess.dto.DataPltDTO;
import com.njcn.dataProcess.dao.relation.mapper.DataPltRelationMapper;
import com.njcn.dataProcess.param.LineCountEvaluateParam;
import com.njcn.dataProcess.po.relation.DataPlt;
import com.njcn.dataProcess.pojo.dto.DataPltDto;
import com.njcn.dataProcess.service.IDataPlt;
import com.njcn.dataProcess.util.BeanFeildUtils;
import lombok.RequiredArgsConstructor;
@@ -11,6 +13,7 @@ import org.apache.commons.collections4.ListUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
@@ -51,4 +54,9 @@ public class RelationDataPltImpl implements IDataPlt {
dataPltRelationMapper.insertBatchSomeColumn(dataPltList);
}
}
@Override
public List<DataPltDto> getRawData(LineCountEvaluateParam lineParam) {
return Collections.emptyList();
}
}

View File

@@ -2,6 +2,7 @@ package com.njcn.dataProcess.service.impl.relation;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.github.jeffreyning.mybatisplus.service.MppServiceImpl;
import com.njcn.dataProcess.annotation.InsertBean;
import com.njcn.dataProcess.dao.relation.mapper.DataVRelationMapper;
import com.njcn.dataProcess.dao.relation.mapper.RStatDataVRelationMapper;
import com.njcn.dataProcess.dto.DataVDTO;
@@ -20,6 +21,7 @@ import org.springframework.beans.BeanUtils;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.lang.reflect.Field;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Collections;
@@ -37,8 +39,11 @@ import java.util.stream.Collectors;
public class RelationDataVImpl extends MppServiceImpl<RStatDataVRelationMapper, RStatDataVD> implements IDataV {
@Resource
private DataVRelationMapper dataVRelationMapper;
@Resource
@InsertBean
private IDataV iDataV;
@Override
public Map<String, List<DataVFiveItemDTO>> getLineCountEvaluate(LineCountEvaluateParam lineParam) {
@@ -100,10 +105,11 @@ public class RelationDataVImpl extends MppServiceImpl<RStatDataVRelationMapper,
RStatDataVD dataV = new RStatDataVD();
dataV.setTime(TimeUtils.LocalDataTimeToLocalDate2(item.getTime()));
dataV.setPhasicType(item.getPhasicType());
dataV.setQualityFlag(Integer.valueOf(item.getQualityFlag()));
BeanUtils.copyProperties(item, dataV);
result.add(dataV);
});
iDataV.saveOrUpdateBatch(result);
iDataV.saveOrUpdateBatchByMultiId(result);
}
@Override